Skip to content

Commit 9355a8e

Browse files
committed
Create pytorchl_freezeLayers.py
1 parent a308fc2 commit 9355a8e

File tree

1 file changed

+16
-0
lines changed

1 file changed

+16
-0
lines changed
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
"""Example on how to freeze certain layers while training (in pytorch lightning)
2+
Author: Mohit Mayank
3+
"""
4+
5+
# Before defining the optimizer, we need to freeze the layers
6+
# In pytorch lightning, as optimizer is defined in configure_optimizers, we freeze layers there.
7+
def configure_optimizers(self):
8+
# iterate through the layers and freeze the one with certain name (here all BERT models)
9+
# note: the name of layer depends on the varibale name
10+
for name, param in self.named_parameters():
11+
if 'BERTModel' in name:
12+
param.requires_grad = False
13+
# only pass the non-frozen paramters to optimizer
14+
optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()), lr=1e-3)
15+
# return optimizer
16+
return optimizer

0 commit comments

Comments
 (0)