Skip to content

Commit

Permalink
transformers.Conv1D
Browse files Browse the repository at this point in the history
  • Loading branch information
plutonium-239 committed Apr 16, 2024
1 parent 4df05a3 commit 6b34d79
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions experiments/util/measurements.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
Parameter,
Embedding
)
from transformers import Conv1D
from torchvision.models.convnext import LayerNorm2d

from memsave_torch.nn.Conv2d import MemSaveConv2d
Expand Down Expand Up @@ -322,7 +323,7 @@ def separate_grad_arguments(
Raises:
NotImplementedError: If an unknown layer with parameters is encountered.
"""
linear = (Linear, MemSaveLinear)
linear = (Linear, MemSaveLinear, Conv1D)
conv = (
Conv1d,
Conv2d,
Expand All @@ -346,7 +347,7 @@ def separate_layer(layer: Module, grad_weight: bool, grad_bias: bool):
grad_bias: Whether to compute the gradient of the layer bias.
"""
leafs.append(layer.weight) if grad_weight else no_leafs.append(layer.weight)
if layer.bias is not None:
if 'bias' in layer._parameters and layer.bias is not None:
leafs.append(layer.bias) if grad_bias else no_leafs.append(layer.bias)

layers = [m for m in model.modules() if len(list(m.modules())) == 1]
Expand Down

0 comments on commit 6b34d79

Please sign in to comment.