Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

remove frame inspection on self.hparams #2253

Merged
merged 6 commits into from
Jun 19, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 18 additions & 1 deletion pytorch_lightning/core/lightning.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import collections
import inspect
import os
import re
from abc import ABC, abstractmethod
from argparse import Namespace
from typing import Any, Callable, Dict, List, Optional, Tuple, Union, Sequence
Expand Down Expand Up @@ -1692,4 +1693,20 @@ def hparams(self) -> Union[AttributeDict, str]:

@hparams.setter
def hparams(self, hp: Union[dict, Namespace, Any]):
self.save_hyperparameters(hp, frame=inspect.currentframe().f_back.f_back)
hparams_assignment_name = self.__get_hparams_assignment_variable()
self._hparams_name = hparams_assignment_name
self._set_hparams(hp)

def __get_hparams_assignment_variable(self):
"""
looks at the code of the class to figure out what the user named self.hparams
this only happens when the user explicitly sets self.hparams
"""
class_code = inspect.getsource(self.__class__)
lines = class_code.split('\n')
for line in lines:
line = re.sub(r"\s+", "", line, flags=re.UNICODE)
if 'self.hparams=' in line:
Copy link
Member

@Borda Borda Jun 19, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is a naive hack assuming user uses standard naming, it will fail on

class AAA(...):
    def __init__(me, param1, ...):
         me.hparams = param1

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nope... 'hparams' is a fallback... try it!

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the point is with rename self to me or what ever

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

also what about spacing

self.hparams                <50x spaces> = param1

or rename the vasiable

class AAA():
  def __init__(me, param1):
    copy_param = copy.deepcopy(param1)
    me.hparams = copy_param

return line.split('=')[1]

return None
4 changes: 4 additions & 0 deletions pytorch_lightning/core/saving.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,14 +176,18 @@ def _load_model_state(cls, checkpoint: Dict[str, Any], *args, **kwargs):
# pass in the values we saved automatically
if cls.CHECKPOINT_HYPER_PARAMS_KEY in checkpoint:
model_args = {}

# add some back compatibility, the actual one shall be last
for hparam_key in CHECKPOINT_PAST_HPARAMS_KEYS + (cls.CHECKPOINT_HYPER_PARAMS_KEY,):
if hparam_key in checkpoint:
model_args.update(checkpoint[hparam_key])

if cls.CHECKPOINT_HYPER_PARAMS_TYPE in checkpoint:
model_args = checkpoint[cls.CHECKPOINT_HYPER_PARAMS_TYPE](model_args)

args_name = checkpoint.get(cls.CHECKPOINT_HYPER_PARAMS_NAME)
init_args_name = inspect.signature(cls).parameters.keys()

if args_name == 'kwargs':
cls_kwargs = {k: v for k, v in model_args.items() if k in init_args_name}
kwargs.update(**cls_kwargs)
Expand Down
2 changes: 1 addition & 1 deletion tests/models/test_hparams.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def _run_standard_hparams_test(tmpdir, model, cls, try_overwrite=False):
assert model.hparams.test_arg == 14

# verify we can train
trainer = Trainer(default_root_dir=tmpdir, max_epochs=1, overfit_batches=0.5)
trainer = Trainer(default_root_dir=tmpdir, max_epochs=1, overfit_batches=2)
trainer.fit(model)

# make sure the raw checkpoint saved the properties
Expand Down