Skip to content

Commit

Permalink
added functionality to specify seed in randomisation tests
Browse files Browse the repository at this point in the history
  • Loading branch information
annahedstroem committed Mar 7, 2022
1 parent 0826b91 commit 128ac42
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 3 deletions.
3 changes: 2 additions & 1 deletion quantus/helpers/pytorch_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def state_dict(self):
"""Get a dictionary of the model's learnable parameters."""
return self.model.state_dict()

def get_random_layer_generator(self, order: str = "top_down"):
def get_random_layer_generator(self, order: str = "top_down", seed: int = 42):
"""
In every iteration yields a copy of the model with one additional layer's parameters randomized.
Set order to top_down for cascading randomization.
Expand All @@ -65,5 +65,6 @@ def get_random_layer_generator(self, order: str = "top_down"):
for module in modules:
if order == "independent":
random_layer_model.load_state_dict(original_parameters)
torch.manual_seed(seed=seed+1)
module[1].reset_parameters()
yield module[0], random_layer_model
3 changes: 2 additions & 1 deletion quantus/helpers/tf_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def load_state_dict(self, original_parameters):
"""Set model's learnable parameters."""
self.model.set_weights(original_parameters)

def get_random_layer_generator(self, order: str = "top_down"):
def get_random_layer_generator(self, order: str = "top_down", seed: int = 42):
"""
In every iteration yields a copy of the model with one additional layer's parameters randomized.
Set order to top_down for cascading randomization.
Expand All @@ -70,5 +70,6 @@ def get_random_layer_generator(self, order: str = "top_down"):
if order == "independent":
random_layer_model.set_weights(original_parameters)
weights = layer.get_weights()
np.random.seed(seed=seed+1)
layer.set_weights([np.random.permutation(w) for w in weights])
yield layer.name, random_layer_model
5 changes: 4 additions & 1 deletion quantus/metrics/randomisation_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ def __init__(self, *args, **kwargs):
self.display_progressbar = self.kwargs.get("display_progressbar", False)
self.similarity_func = self.kwargs.get("similarity_func", correlation_spearman)
self.layer_order = kwargs.get("layer_order", "independent")
self.seed = self.kwargs.get("seed", 42)
self.last_results = {}
self.all_results = []

Expand Down Expand Up @@ -183,7 +184,7 @@ def __call__(
pbar = tqdm(total=n_iterations)

for layer_name, random_layer_model in model.get_random_layer_generator(
order=self.layer_order
order=self.layer_order, seed=self.seed
):

similarity_scores = []
Expand Down Expand Up @@ -266,6 +267,7 @@ def __init__(self, *args, **kwargs):
self.normalise_func = self.kwargs.get("normalise_func", normalise_by_negative)
self.similarity_func = self.kwargs.get("similarity_func", ssim)
self.num_classes = self.kwargs.get("num_classes", 1000)
self.seed = self.kwargs.get("seed", 42)
self.last_results = []
self.all_results = []

Expand Down Expand Up @@ -384,6 +386,7 @@ def __call__(
a = self.normalise_func(a)

# Randomly select off-class labels.
random.seed(a=self.seed)
y_off = np.array(
[
random.choice(
Expand Down

0 comments on commit 128ac42

Please sign in to comment.