You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardExpand all lines: torchvision/ops/misc.py
+6-6Lines changed: 6 additions & 6 deletions
Original file line number
Diff line number
Diff line change
@@ -119,14 +119,14 @@ def __init__(
119
119
120
120
classConv2dNormActivation(ConvNormActivation):
121
121
"""
122
-
Configurable block used for Convolution2d-Normalzation-Activation blocks.
122
+
Configurable block used for Convolution2d-Normalization-Activation blocks.
123
123
124
124
Args:
125
125
in_channels (int): Number of channels in the input image
126
-
out_channels (int): Number of channels produced by the Convolution-Normalzation-Activation block
126
+
out_channels (int): Number of channels produced by the Convolution-Normalization-Activation block
127
127
kernel_size: (int, optional): Size of the convolving kernel. Default: 3
128
128
stride (int, optional): Stride of the convolution. Default: 1
129
-
padding (int, tuple or str, optional): Padding added to all four sides of the input. Default: None, in wich case it will calculated as ``padding = (kernel_size - 1) // 2 * dilation``
129
+
padding (int, tuple or str, optional): Padding added to all four sides of the input. Default: None, in which case it will calculated as ``padding = (kernel_size - 1) // 2 * dilation``
130
130
groups (int, optional): Number of blocked connections from input channels to output channels. Default: 1
131
131
norm_layer (Callable[..., torch.nn.Module], optional): Norm layer that will be stacked on top of the convolution layer. If ``None`` this layer wont be used. Default: ``torch.nn.BatchNorm2d``
132
132
activation_layer (Callable[..., torch.nn.Module], optinal): Activation function which will be stacked on top of the normalization layer (if not None), otherwise on top of the conv layer. If ``None`` this layer wont be used. Default: ``torch.nn.ReLU``
@@ -169,14 +169,14 @@ def __init__(
169
169
170
170
classConv3dNormActivation(ConvNormActivation):
171
171
"""
172
-
Configurable block used for Convolution3d-Normalzation-Activation blocks.
172
+
Configurable block used for Convolution3d-Normalization-Activation blocks.
173
173
174
174
Args:
175
175
in_channels (int): Number of channels in the input video.
176
-
out_channels (int): Number of channels produced by the Convolution-Normalzation-Activation block
176
+
out_channels (int): Number of channels produced by the Convolution-Normalization-Activation block
177
177
kernel_size: (int, optional): Size of the convolving kernel. Default: 3
178
178
stride (int, optional): Stride of the convolution. Default: 1
179
-
padding (int, tuple or str, optional): Padding added to all four sides of the input. Default: None, in wich case it will calculated as ``padding = (kernel_size - 1) // 2 * dilation``
179
+
padding (int, tuple or str, optional): Padding added to all four sides of the input. Default: None, in which case it will calculated as ``padding = (kernel_size - 1) // 2 * dilation``
180
180
groups (int, optional): Number of blocked connections from input channels to output channels. Default: 1
181
181
norm_layer (Callable[..., torch.nn.Module], optional): Norm layer that will be stacked on top of the convolution layer. If ``None`` this layer wont be used. Default: ``torch.nn.BatchNorm3d``
182
182
activation_layer (Callable[..., torch.nn.Module], optinal): Activation function which will be stacked on top of the normalization layer (if not None), otherwise on top of the conv layer. If ``None`` this layer wont be used. Default: ``torch.nn.ReLU``
0 commit comments