@@ -59,27 +59,6 @@ def forward(self, x):
5959 return self .outconv (torch .cat ([branch1 , branch2 , branch3 , branch4 , branch5 ], dim = 1 ))
6060
6161
62- class ResidualBlock (nn .Module ):
63- def __init__ (self , in_channels : int , out_channels : int ):
64- super (ResidualBlock , self ).__init__ ()
65- self .conv1 = nn .Conv2d (in_channels , out_channels , kernel_size = 3 , stride = 1 , padding = 1 )
66- self .relu1 = nn .ReLU (inplace = True )
67- self .conv2 = nn .Conv2d (out_channels , out_channels , kernel_size = 3 , stride = 1 , padding = 1 )
68- self .relu2 = nn .ReLU (inplace = True )
69-
70- def forward (self , x ):
71- identity = x
72-
73- out = self .conv1 (x )
74- out = self .relu1 (out )
75- out = self .conv2 (out )
76-
77- out += identity
78- out = self .relu2 (out )
79-
80- return out
81-
82-
8362class Proposed (nn .Module ):
8463 def __init__ (self , num_classes : int ):
8564 super (Proposed , self ).__init__ ()
@@ -121,14 +100,6 @@ def double_conv(self, in_channels: int, out_channels: int, batch_norm=True):
121100 nn .ReLU (inplace = True )
122101 )
123102
124- def make_layer (self , in_channels , out_channels , num_blocks ):
125- layers = [nn .Conv2d (in_channels , out_channels , kernel_size = 3 , stride = 1 , padding = 1 )]
126-
127- for _ in range (num_blocks ):
128- layers .append (ResidualBlock (out_channels , out_channels ))
129-
130- return nn .Sequential (* layers )
131-
132103 def forward (self , x ):
133104 # Encoder
134105 encode1 = self .encode1 (self .initial_conv (x ))
0 commit comments