@@ -201,13 +201,13 @@ def _forward_fast_attention(self, inputs):
201201 # Connections for P6_0 and P7_0 to P6_1 respectively
202202 p6_up = self .conv6_up (self .swish (weight [0 ] * p6_in + weight [1 ] * self .p6_upsample (p7_in )))
203203
204- # Weights for P5_0 and P6_0 to P5_1
204+ # Weights for P5_0 and P6_1 to P5_1
205205 p5_w1 = self .p5_w1_relu (self .p5_w1 )
206206 weight = p5_w1 / (torch .sum (p5_w1 , dim = 0 ) + self .epsilon )
207207 # Connections for P5_0 and P6_0 to P5_1 respectively
208208 p5_up = self .conv5_up (self .swish (weight [0 ] * p5_in + weight [1 ] * self .p5_upsample (p6_up )))
209209
210- # Weights for P4_0 and P5_0 to P4_1
210+ # Weights for P4_0 and P5_1 to P4_1
211211 p4_w1 = self .p4_w1_relu (self .p4_w1 )
212212 weight = p4_w1 / (torch .sum (p4_w1 , dim = 0 ) + self .epsilon )
213213 # Connections for P4_0 and P5_0 to P4_1 respectively
@@ -272,10 +272,10 @@ def _forward(self, inputs):
272272 # Connections for P6_0 and P7_0 to P6_1 respectively
273273 p6_up = self .conv6_up (self .swish (p6_in + self .p6_upsample (p7_in )))
274274
275- # Connections for P5_0 and P6_0 to P5_1 respectively
275+ # Connections for P5_0 and P6_1 to P5_1 respectively
276276 p5_up = self .conv5_up (self .swish (p5_in + self .p5_upsample (p6_up )))
277277
278- # Connections for P4_0 and P5_0 to P4_1 respectively
278+ # Connections for P4_0 and P5_1 to P4_1 respectively
279279 p4_up = self .conv4_up (self .swish (p4_in + self .p4_upsample (p5_up )))
280280
281281 # Connections for P3_0 and P4_1 to P3_2 respectively
0 commit comments