Skip to content

Commit

Permalink
change behavior of finetuning argument
Browse files Browse the repository at this point in the history
  • Loading branch information
zhangir-azerbayev committed Oct 24, 2023
1 parent 10fc028 commit 123c356
Showing 1 changed file with 4 additions and 6 deletions.
10 changes: 4 additions & 6 deletions megatron/checkpointing.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,20 +351,18 @@ def load_checkpoint(
):
"""Load a model checkpoint and return the iteration."""
if neox_args.deepspeed:
load_optim_and_scheduler = (
load_optim= (
not neox_args.no_load_optim
) # TODO: These should be configured by separate args
if neox_args.finetune:
load_optim_and_scheduler = False
if iteration is not None:
tag = get_checkpoint_tag(iteration)
else:
tag = None
checkpoint_name, state_dict = model.load_checkpoint(
neox_args.load,
load_optimizer_states=load_optim_and_scheduler,
load_lr_scheduler_states=load_optim_and_scheduler,
load_module_only=not load_optim_and_scheduler,
load_optimizer_states=load_optim,
load_lr_scheduler_states=False,
load_module_only=not load_optim,
tag=tag,
)

Expand Down

0 comments on commit 123c356

Please sign in to comment.