Skip to content

Commit 7b328a0

Browse files
committedSep 20, 2019
init commit combined Adam
1 parent 844afd2 commit 7b328a0

File tree

1 file changed

+8
-3
lines changed

1 file changed

+8
-3
lines changed
 

‎network/oe_h.py

+8-3
Original file line numberDiff line numberDiff line change
@@ -1416,7 +1416,7 @@ def __init__(self, graph_dict, imageless_dataloaders, image_dir,
14161416

14171417
# set this graph to use for generating corrupt pairs on the fly
14181418
# so this graph should correspond to the graph: fully connected graph - tc graph
1419-
# for the train set
1419+
# for the train setf
14201420
self.criterion.set_negative_graph(self.graph_dict['G_train_neg'], self.graph_dict['mapping_node_to_ix'],
14211421
self.graph_dict['mapping_ix_to_node'])
14221422
#
@@ -1514,9 +1514,14 @@ def run_model(self, optimizer):
15141514
self.optimizer_labels = optim.SGD([{'params': self.model.parameters(), 'lr': 0.0}], momentum=0.0)
15151515
self.optimizer_images = optim.Adam([{'params': self.img_feat_net.parameters()}], lr=self.lr_images)
15161516
else:
1517+
params_to_update = [{'params': self.model.parameters()},
1518+
{'params': self.img_feat_net.parameters()}]
1519+
15171520
self.optimizer_labels = optim.Adam([{'params': self.model.parameters()}], lr=self.lr_labels)
15181521
self.optimizer_images = optim.Adam([{'params': self.img_feat_net.parameters()}], lr=self.lr_images)
15191522

1523+
self.optimizer_labels = optim.Adam([{'params': list(self.model.parameters()) + list(self.img_feat_net.parameters())}], lr=self.lr_labels)
1524+
15201525
self.scheduler_labels = torch.optim.lr_scheduler.MultiStepLR(self.optimizer_labels, milestones=self.lr_step, gamma=0.1)
15211526
self.scheduler_images = torch.optim.lr_scheduler.MultiStepLR(self.optimizer_images, milestones=self.lr_step, gamma=0.1)
15221527

@@ -1732,7 +1737,7 @@ def pass_samples(self, phase, save_to_tensorboard=True):
17321737

17331738
# zero the parameter gradients
17341739
self.optimizer_labels.zero_grad()
1735-
self.optimizer_images.zero_grad()
1740+
# self.optimizer_images.zero_grad()
17361741

17371742
# forward
17381743
# track history if only in train
@@ -1762,7 +1767,7 @@ def pass_samples(self, phase, save_to_tensorboard=True):
17621767
# convert euclidean gradients to riemannian gradients for the label embeddings
17631768
self.model.module.embeddings.weight.grad.data *= (1.0/self.lambda_x(self.model.module.embeddings.weight.data))**2
17641769
self.optimizer_labels.step()
1765-
self.optimizer_images.step()
1770+
# self.optimizer_images.step()
17661771
self.model.module.embeddings.weight.data = self.soft_clip(self.model.module.embeddings.weight.data)
17671772

17681773
# statistics

0 commit comments

Comments
 (0)