crumb commited on
Commit
3781035
·
1 Parent(s): 7c0e4c1

Update modeling_switchgpt2.py

Browse files
Files changed (1) hide show
  1. modeling_switchgpt2.py +16 -16
modeling_switchgpt2.py CHANGED
@@ -171,22 +171,22 @@ class Attention(nn.Module):
171
  outputs = [a, present] + attn_outputs[1:]
172
  return outputs # a, present, (attentions)
173
 
174
-
175
- class MLP(nn.Module):
176
- def __init__(self, n_state, config): # in MLP: n_state=3072 (4 * n_embd)
177
- super().__init__()
178
- nx = config.n_embd
179
- # self.c_fc = Conv1D(n_state, nx)
180
- # self.c_proj = Conv1D(nx, n_state)
181
- self.c_fc = nn.Linear(nx, n_state)
182
- self.c_proj = nn.Linear(n_state, nx)
183
- self.act = ACT2FN[config.activation_function]
184
- self.dropout = nn.Dropout(config.resid_pdrop)
185
-
186
- def forward(self, x):
187
- h = self.act(self.c_fc(x))
188
- h2 = self.c_proj(h)
189
- return self.dropout(h2)
190
 
191
 
192
  class Block(nn.Module):
 
171
  outputs = [a, present] + attn_outputs[1:]
172
  return outputs # a, present, (attentions)
173
 
174
+ # the old MLP class
175
+ # class MLP(nn.Module):
176
+ # def __init__(self, n_state, config): # in MLP: n_state=3072 (4 * n_embd)
177
+ # super().__init__()
178
+ # nx = config.n_embd
179
+ # # self.c_fc = Conv1D(n_state, nx)
180
+ # # self.c_proj = Conv1D(nx, n_state)
181
+ # self.c_fc = nn.Linear(nx, n_state)
182
+ # self.c_proj = nn.Linear(n_state, nx)
183
+ # self.act = ACT2FN[config.activation_function]
184
+ # self.dropout = nn.Dropout(config.resid_pdrop)
185
+
186
+ # def forward(self, x):
187
+ # h = self.act(self.c_fc(x))
188
+ # h2 = self.c_proj(h)
189
+ # return self.dropout(h2)
190
 
191
 
192
  class Block(nn.Module):