CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
ai-forever

Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.

GitHub Repository: ai-forever/sber-swap
Path: blob/main/apex/tests/L0/run_amp/test_larc.py
Views: 794
1
import unittest
2
3
import torch
4
from torch import nn
5
from torch.nn import Parameter
6
7
from apex import amp
8
from apex.parallel.LARC import LARC
9
from utils import common_init
10
11
12
class MyModel(torch.nn.Module):
13
def __init__(self, unique):
14
super(MyModel, self).__init__()
15
self.weight0 = Parameter(
16
unique + torch.arange(2, device="cuda", dtype=torch.float32)
17
)
18
19
def forward(self, input):
20
return (input * self.weight0).sum()
21
22
23
class TestLARC(unittest.TestCase):
24
def setUp(self):
25
self.x = torch.ones((2), device="cuda", dtype=torch.float32)
26
common_init(self)
27
28
def tearDown(self):
29
pass
30
31
def test_larc_mixed_precision(self):
32
for opt_level in ["O0", "O1", "O2", "O3"]:
33
model = MyModel(1)
34
35
optimizer = LARC(
36
torch.optim.SGD(
37
[{"params": model.parameters(), "lr": 0.25}], momentum=0.125
38
)
39
)
40
41
model, optimizer = amp.initialize(
42
model, optimizer, opt_level=opt_level, verbosity=0
43
)
44
45
optimizer.zero_grad()
46
loss = model(self.x)
47
with amp.scale_loss(loss, optimizer) as scaled_loss:
48
scaled_loss.backward()
49
optimizer.step()
50
51
52
if __name__ == "__main__":
53
unittest.main()
54
55