jbilcke-hf HF Staff commited on
Commit
b701d97
·
1 Parent(s): c81c2d8

didn't work, skipping

Browse files
networks/op/fused_act.py CHANGED
@@ -128,4 +128,4 @@ def fused_leaky_relu(input, bias=None, negative_slope=0.2, scale=2 ** 0.5):
128
  input.contiguous(), bias, negative_slope, scale
129
  )
130
 
131
- torch.compiler.allow_in_graph(fused.fused_bias_act)
 
128
  input.contiguous(), bias, negative_slope, scale
129
  )
130
 
131
+ #torch.compiler.allow_in_graph(fused.fused_bias_act)
networks/op/upfirdn2d.py CHANGED
@@ -211,4 +211,4 @@ def upfirdn2d_native(
211
 
212
  return out.view(-1, channel, out_h, out_w)
213
 
214
- torch.compiler.allow_in_graph(upfirdn2d_op.upfirdn2d)
 
211
 
212
  return out.view(-1, channel, out_h, out_w)
213
 
214
+ #torch.compiler.allow_in_graph(upfirdn2d_op.upfirdn2d)