From 9b7573b626440635f22a5d0c0b55bb4fd5e822b6 Mon Sep 17 00:00:00 2001 From: William Falcon Date: Thu, 2 Apr 2020 18:38:59 -0400 Subject: [PATCH 01/14] quick patch --- pytorch_lightning/trainer/trainer.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 005512d213d74..8f6205409c4c4 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -963,8 +963,5 @@ class _PatchDataLoader(object): def __init__(self, dataloader: Union[List[DataLoader], DataLoader]): self.dataloader = dataloader - # Assign __code__, needed for checking if method has been overriden - self.__code__ = self.__call__.__code__ - def __call__(self) -> Union[List[DataLoader], DataLoader]: return self.dataloader From 72fb5a49df24445974cd9d9c0165b41a54d4211b Mon Sep 17 00:00:00 2001 From: William Falcon Date: Thu, 2 Apr 2020 21:46:59 -0400 Subject: [PATCH 02/14] testing fix --- pytorch_lightning/trainer/model_hooks.py | 5 ++++- pytorch_lightning/trainer/trainer.py | 2 ++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/model_hooks.py b/pytorch_lightning/trainer/model_hooks.py index 177aff1faaf03..3a706b781c6bf 100644 --- a/pytorch_lightning/trainer/model_hooks.py +++ b/pytorch_lightning/trainer/model_hooks.py @@ -20,8 +20,11 @@ def is_overriden(self, method_name: str, model: LightningModule = None) -> bool: # in case of calling deprecated method return False + instance_attr = getattr(model, method_name) + super_attr = getattr(super_object, method_name) + # when code pointers are different, it was overriden - is_overriden = getattr(model, method_name).__code__ is not getattr(super_object, method_name).__code__ + is_overriden = str(instance_attr.__code__) is not str(super_attr.__code__) return is_overriden def has_arg(self, f_name, arg_name): diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 8f6205409c4c4..003b58c0481ca 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -963,5 +963,7 @@ class _PatchDataLoader(object): def __init__(self, dataloader: Union[List[DataLoader], DataLoader]): self.dataloader = dataloader + self.code = str(self.__call__.__code__) + def __call__(self) -> Union[List[DataLoader], DataLoader]: return self.dataloader From dd6d860ef0496a477788b682dbc8caf610b0f493 Mon Sep 17 00:00:00 2001 From: William Falcon Date: Thu, 2 Apr 2020 21:47:16 -0400 Subject: [PATCH 03/14] testing fix --- pytorch_lightning/trainer/trainer.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 003b58c0481ca..28c36e8db9c9c 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -902,6 +902,7 @@ def check_model_configuration(self, model: LightningModule): """ # Check training_step, train_dataloader, configure_optimizer methods + import pdb; pdb.set_trace() if not self.is_overriden('training_step', model): raise MisconfigurationException( 'No `training_step()` method defined. Lightning `Trainer` expects as minimum a' From d68b89d12e4b1e43ab03bb2654c22adfe572b8ee Mon Sep 17 00:00:00 2001 From: William Falcon Date: Thu, 2 Apr 2020 21:55:25 -0400 Subject: [PATCH 04/14] testing fix --- pytorch_lightning/trainer/model_hooks.py | 2 +- pytorch_lightning/trainer/trainer.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/pytorch_lightning/trainer/model_hooks.py b/pytorch_lightning/trainer/model_hooks.py index 3a706b781c6bf..661cdf8d4dca3 100644 --- a/pytorch_lightning/trainer/model_hooks.py +++ b/pytorch_lightning/trainer/model_hooks.py @@ -24,7 +24,7 @@ def is_overriden(self, method_name: str, model: LightningModule = None) -> bool: super_attr = getattr(super_object, method_name) # when code pointers are different, it was overriden - is_overriden = str(instance_attr.__code__) is not str(super_attr.__code__) + is_overriden = instance_attr.__code__.hash() is not super_attr.__code__.hash() return is_overriden def has_arg(self, f_name, arg_name): diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 28c36e8db9c9c..1a7626a040ac8 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -902,7 +902,6 @@ def check_model_configuration(self, model: LightningModule): """ # Check training_step, train_dataloader, configure_optimizer methods - import pdb; pdb.set_trace() if not self.is_overriden('training_step', model): raise MisconfigurationException( 'No `training_step()` method defined. Lightning `Trainer` expects as minimum a' @@ -964,7 +963,7 @@ class _PatchDataLoader(object): def __init__(self, dataloader: Union[List[DataLoader], DataLoader]): self.dataloader = dataloader - self.code = str(self.__call__.__code__) + self.code = self.__call__.__code__.hash() def __call__(self) -> Union[List[DataLoader], DataLoader]: return self.dataloader From fdfd5bcd55a3b58cec5bd1dfa212eb342b578260 Mon Sep 17 00:00:00 2001 From: William Falcon Date: Thu, 2 Apr 2020 21:56:20 -0400 Subject: [PATCH 05/14] testing fix --- pytorch_lightning/trainer/model_hooks.py | 2 +- pytorch_lightning/trainer/trainer.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/trainer/model_hooks.py b/pytorch_lightning/trainer/model_hooks.py index 661cdf8d4dca3..b199f6990f125 100644 --- a/pytorch_lightning/trainer/model_hooks.py +++ b/pytorch_lightning/trainer/model_hooks.py @@ -24,7 +24,7 @@ def is_overriden(self, method_name: str, model: LightningModule = None) -> bool: super_attr = getattr(super_object, method_name) # when code pointers are different, it was overriden - is_overriden = instance_attr.__code__.hash() is not super_attr.__code__.hash() + is_overriden = str(instance_attr.__code__).hash() is not str(super_attr.__code__).hash() return is_overriden def has_arg(self, f_name, arg_name): diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 1a7626a040ac8..70d510a84cd32 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -963,7 +963,7 @@ class _PatchDataLoader(object): def __init__(self, dataloader: Union[List[DataLoader], DataLoader]): self.dataloader = dataloader - self.code = self.__call__.__code__.hash() + self.code = str(self.__call__.__code__).hash() def __call__(self) -> Union[List[DataLoader], DataLoader]: return self.dataloader From 1a9827a4f49c31a7353406959af2b444fbf1288a Mon Sep 17 00:00:00 2001 From: William Falcon Date: Thu, 2 Apr 2020 21:57:26 -0400 Subject: [PATCH 06/14] testing fix --- pytorch_lightning/trainer/model_hooks.py | 2 +- pytorch_lightning/trainer/trainer.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/trainer/model_hooks.py b/pytorch_lightning/trainer/model_hooks.py index b199f6990f125..c99d3c8d325b7 100644 --- a/pytorch_lightning/trainer/model_hooks.py +++ b/pytorch_lightning/trainer/model_hooks.py @@ -24,7 +24,7 @@ def is_overriden(self, method_name: str, model: LightningModule = None) -> bool: super_attr = getattr(super_object, method_name) # when code pointers are different, it was overriden - is_overriden = str(instance_attr.__code__).hash() is not str(super_attr.__code__).hash() + is_overriden = hash(instance_attr.__code__) is not hash(super_attr.__code__) return is_overriden def has_arg(self, f_name, arg_name): diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 70d510a84cd32..790b2c178cf23 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -963,7 +963,7 @@ class _PatchDataLoader(object): def __init__(self, dataloader: Union[List[DataLoader], DataLoader]): self.dataloader = dataloader - self.code = str(self.__call__.__code__).hash() + self.code = hash(self.__call__.__code__) def __call__(self) -> Union[List[DataLoader], DataLoader]: return self.dataloader From ec9cc34edc52238319170c3283a5f24422d395dd Mon Sep 17 00:00:00 2001 From: William Falcon Date: Fri, 3 Apr 2020 07:03:34 -0400 Subject: [PATCH 07/14] testing fix --- pytorch_lightning/trainer/model_hooks.py | 2 +- pytorch_lightning/trainer/trainer.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/pytorch_lightning/trainer/model_hooks.py b/pytorch_lightning/trainer/model_hooks.py index c99d3c8d325b7..2baacb897f0ac 100644 --- a/pytorch_lightning/trainer/model_hooks.py +++ b/pytorch_lightning/trainer/model_hooks.py @@ -24,7 +24,7 @@ def is_overriden(self, method_name: str, model: LightningModule = None) -> bool: super_attr = getattr(super_object, method_name) # when code pointers are different, it was overriden - is_overriden = hash(instance_attr.__code__) is not hash(super_attr.__code__) + is_overriden = instance_attr.__code__ is not super_attr.__code__ return is_overriden def has_arg(self, f_name, arg_name): diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 790b2c178cf23..cb0d7ac08893b 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -907,6 +907,7 @@ def check_model_configuration(self, model: LightningModule): 'No `training_step()` method defined. Lightning `Trainer` expects as minimum a' ' `training_step()`, `training_dataloader()` and `configure_optimizers()` to be defined.') + import pdb; pdb.set_trace() if not self.is_overriden('train_dataloader', model): raise MisconfigurationException( 'No `train_dataloader()` method defined. Lightning `Trainer` expects as minimum a' @@ -963,7 +964,5 @@ class _PatchDataLoader(object): def __init__(self, dataloader: Union[List[DataLoader], DataLoader]): self.dataloader = dataloader - self.code = hash(self.__call__.__code__) - def __call__(self) -> Union[List[DataLoader], DataLoader]: return self.dataloader From 5a7347351464ed8a09093d1f105ba517486d5d8e Mon Sep 17 00:00:00 2001 From: William Falcon Date: Fri, 3 Apr 2020 07:22:23 -0400 Subject: [PATCH 08/14] testing fix --- pytorch_lightning/trainer/model_hooks.py | 8 ++++++-- pytorch_lightning/trainer/trainer.py | 8 ++++---- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/pytorch_lightning/trainer/model_hooks.py b/pytorch_lightning/trainer/model_hooks.py index 2baacb897f0ac..3227a67cf0883 100644 --- a/pytorch_lightning/trainer/model_hooks.py +++ b/pytorch_lightning/trainer/model_hooks.py @@ -23,8 +23,12 @@ def is_overriden(self, method_name: str, model: LightningModule = None) -> bool: instance_attr = getattr(model, method_name) super_attr = getattr(super_object, method_name) - # when code pointers are different, it was overriden - is_overriden = instance_attr.__code__ is not super_attr.__code__ + # when code pointers are different, it was implemented + if 'dataloader' in method_name: + import pdb; pdb.set_trace() + is_overriden = instance_attr.code != str(super_attr.__code__) + else: + is_overriden = instance_attr.__code__ is not super_attr.__code__ return is_overriden def has_arg(self, f_name, arg_name): diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index cb0d7ac08893b..cfcf20b1a7661 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -649,12 +649,12 @@ def fit( model.logger = self.logger self.copy_trainer_model_properties(model) - # set up the passed in dataloaders (if needed) - self.__attach_dataloaders(model, train_dataloader, val_dataloaders, test_dataloaders) - # check that model is configured correctly self.check_model_configuration(model) + # set up the passed in dataloaders (if needed) + self.__attach_dataloaders(model, train_dataloader, val_dataloaders, test_dataloaders) + # download the data and do whatever transforms we need # do before any spawn calls so that the model can assign properties # only on proc 0 because no spawn has happened yet @@ -907,7 +907,6 @@ def check_model_configuration(self, model: LightningModule): 'No `training_step()` method defined. Lightning `Trainer` expects as minimum a' ' `training_step()`, `training_dataloader()` and `configure_optimizers()` to be defined.') - import pdb; pdb.set_trace() if not self.is_overriden('train_dataloader', model): raise MisconfigurationException( 'No `train_dataloader()` method defined. Lightning `Trainer` expects as minimum a' @@ -963,6 +962,7 @@ class _PatchDataLoader(object): def __init__(self, dataloader: Union[List[DataLoader], DataLoader]): self.dataloader = dataloader + self.code = str(self.__call__.__code__) def __call__(self) -> Union[List[DataLoader], DataLoader]: return self.dataloader From 7dbd07ba0622fe36e8b554f38ef68ddba655b601 Mon Sep 17 00:00:00 2001 From: William Falcon Date: Fri, 3 Apr 2020 07:24:50 -0400 Subject: [PATCH 09/14] testing fix --- pytorch_lightning/trainer/trainer.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index cfcf20b1a7661..d93832f15488a 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -649,12 +649,12 @@ def fit( model.logger = self.logger self.copy_trainer_model_properties(model) - # check that model is configured correctly - self.check_model_configuration(model) - # set up the passed in dataloaders (if needed) self.__attach_dataloaders(model, train_dataloader, val_dataloaders, test_dataloaders) + # check that model is configured correctly + self.check_model_configuration(model) + # download the data and do whatever transforms we need # do before any spawn calls so that the model can assign properties # only on proc 0 because no spawn has happened yet From 8e0309d2c98fa96db007573f2033c28d790f7c6f Mon Sep 17 00:00:00 2001 From: William Falcon Date: Fri, 3 Apr 2020 07:28:08 -0400 Subject: [PATCH 10/14] testing fix --- pytorch_lightning/trainer/model_hooks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/model_hooks.py b/pytorch_lightning/trainer/model_hooks.py index 3227a67cf0883..e76f2fe57ea8b 100644 --- a/pytorch_lightning/trainer/model_hooks.py +++ b/pytorch_lightning/trainer/model_hooks.py @@ -24,7 +24,7 @@ def is_overriden(self, method_name: str, model: LightningModule = None) -> bool: super_attr = getattr(super_object, method_name) # when code pointers are different, it was implemented - if 'dataloader' in method_name: + if not isinstance(instance_attr, function): import pdb; pdb.set_trace() is_overriden = instance_attr.code != str(super_attr.__code__) else: From 923ade3cb9f85bf5be896ce1d00bf47ae4cc9bfc Mon Sep 17 00:00:00 2001 From: William Falcon Date: Fri, 3 Apr 2020 07:29:05 -0400 Subject: [PATCH 11/14] testing fix --- pytorch_lightning/trainer/model_hooks.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pytorch_lightning/trainer/model_hooks.py b/pytorch_lightning/trainer/model_hooks.py index e76f2fe57ea8b..082043aa2b48e 100644 --- a/pytorch_lightning/trainer/model_hooks.py +++ b/pytorch_lightning/trainer/model_hooks.py @@ -24,6 +24,7 @@ def is_overriden(self, method_name: str, model: LightningModule = None) -> bool: super_attr = getattr(super_object, method_name) # when code pointers are different, it was implemented + print(instance_attr) if not isinstance(instance_attr, function): import pdb; pdb.set_trace() is_overriden = instance_attr.code != str(super_attr.__code__) From 427b847a5a95d262b6e7dd5364c75792562aa79a Mon Sep 17 00:00:00 2001 From: William Falcon Date: Fri, 3 Apr 2020 07:29:30 -0400 Subject: [PATCH 12/14] testing fix --- pytorch_lightning/trainer/model_hooks.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pytorch_lightning/trainer/model_hooks.py b/pytorch_lightning/trainer/model_hooks.py index 082043aa2b48e..add3a9ec113f2 100644 --- a/pytorch_lightning/trainer/model_hooks.py +++ b/pytorch_lightning/trainer/model_hooks.py @@ -24,8 +24,7 @@ def is_overriden(self, method_name: str, model: LightningModule = None) -> bool: super_attr = getattr(super_object, method_name) # when code pointers are different, it was implemented - print(instance_attr) - if not isinstance(instance_attr, function): + if hasattr(instance_attr, 'code'): import pdb; pdb.set_trace() is_overriden = instance_attr.code != str(super_attr.__code__) else: From 2abd552ec19920c2793fbe077cb861a364f5f042 Mon Sep 17 00:00:00 2001 From: William Falcon Date: Fri, 3 Apr 2020 07:32:33 -0400 Subject: [PATCH 13/14] testing fix --- pytorch_lightning/trainer/model_hooks.py | 6 ++++-- pytorch_lightning/trainer/trainer.py | 6 +++++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/pytorch_lightning/trainer/model_hooks.py b/pytorch_lightning/trainer/model_hooks.py index add3a9ec113f2..3dc26963ffa3a 100644 --- a/pytorch_lightning/trainer/model_hooks.py +++ b/pytorch_lightning/trainer/model_hooks.py @@ -24,8 +24,10 @@ def is_overriden(self, method_name: str, model: LightningModule = None) -> bool: super_attr = getattr(super_object, method_name) # when code pointers are different, it was implemented - if hasattr(instance_attr, 'code'): - import pdb; pdb.set_trace() + if hasattr(instance_attr, 'patch_loader_code'): + # cannot pickle __code__ so cannot verify if PatchDataloader + # exists which shows dataloader methods have been overwritten. + # so, we hack it by using the string representation is_overriden = instance_attr.code != str(super_attr.__code__) else: is_overriden = instance_attr.__code__ is not super_attr.__code__ diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index d93832f15488a..ae007352ca28b 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -962,7 +962,11 @@ class _PatchDataLoader(object): def __init__(self, dataloader: Union[List[DataLoader], DataLoader]): self.dataloader = dataloader - self.code = str(self.__call__.__code__) + + # cannot pickle __code__ so cannot verify if PatchDataloader + # exists which shows dataloader methods have been overwritten. + # so, we hack it by using the string representation + self.patch_loader_code = str(self.__call__.__code__) def __call__(self) -> Union[List[DataLoader], DataLoader]: return self.dataloader From abea342e857784bb7270d164ea183480f467dcba Mon Sep 17 00:00:00 2001 From: William Falcon Date: Fri, 3 Apr 2020 07:33:50 -0400 Subject: [PATCH 14/14] testing fix --- pytorch_lightning/trainer/model_hooks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/model_hooks.py b/pytorch_lightning/trainer/model_hooks.py index 3dc26963ffa3a..d4871ff2158b6 100644 --- a/pytorch_lightning/trainer/model_hooks.py +++ b/pytorch_lightning/trainer/model_hooks.py @@ -28,7 +28,7 @@ def is_overriden(self, method_name: str, model: LightningModule = None) -> bool: # cannot pickle __code__ so cannot verify if PatchDataloader # exists which shows dataloader methods have been overwritten. # so, we hack it by using the string representation - is_overriden = instance_attr.code != str(super_attr.__code__) + is_overriden = instance_attr.patch_loader_code != str(super_attr.__code__) else: is_overriden = instance_attr.__code__ is not super_attr.__code__ return is_overriden