Skip to content

Commit

Permalink
Fix manual optimization in pl_example (#6373)
Browse files Browse the repository at this point in the history
* Fix automatic_optimization

* Fix automatic_optimization

* Uncomment fairscale
  • Loading branch information
akihironitta authored Mar 6, 2021
1 parent facfda8 commit 34b733b
Showing 1 changed file with 6 additions and 7 deletions.
13 changes: 6 additions & 7 deletions pl_examples/basic_examples/conv_sequential_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,8 +102,9 @@ def __init__(self, lr=0.05, batch_size=32, manual_optimization=False):
nn.Linear(512, 10)
)
self._example_input_array = torch.randn((1, 3, 32, 32))
self._manual_optimization = manual_optimization
if self._manual_optimization:

if manual_optimization:
self.automatic_optimization = False
self.training_step = self.training_step_manual

def forward(self, x):
Expand Down Expand Up @@ -165,10 +166,6 @@ def configure_optimizers(self):
}
}

@property
def automatic_optimization(self) -> bool:
return not self._manual_optimization


#################################
# Instantiate Data Module #
Expand All @@ -189,6 +186,7 @@ def instantiate_datamodule(args):
])

cifar10_dm = pl_bolts.datamodules.CIFAR10DataModule(
data_dir=args.data_dir,
batch_size=args.batch_size,
train_transforms=train_transforms,
test_transforms=test_transforms,
Expand All @@ -206,6 +204,7 @@ def instantiate_datamodule(args):

parser = ArgumentParser(description="Pipe Example")
parser.add_argument("--use_rpc_sequential", action="store_true")
parser.add_argument("--manual_optimization", action="store_true")
parser = Trainer.add_argparse_args(parser)
parser = pl_bolts.datamodules.CIFAR10DataModule.add_argparse_args(parser)
args = parser.parse_args()
Expand All @@ -216,7 +215,7 @@ def instantiate_datamodule(args):
if args.use_rpc_sequential:
plugins = RPCSequentialPlugin()

model = LitResnet(batch_size=args.batch_size, manual_optimization=not args.automatic_optimization)
model = LitResnet(batch_size=args.batch_size, manual_optimization=args.manual_optimization)

trainer = pl.Trainer.from_argparse_args(args, plugins=[plugins] if plugins else None)
trainer.fit(model, cifar10_dm)
Expand Down

0 comments on commit 34b733b

Please sign in to comment.