Skip to content

Commit

Permalink
🔨 Pass pre-trained from config to ModelLightning (#529)
Browse files Browse the repository at this point in the history
Pass pre-trained from config to LightningModule
  • Loading branch information
samet-akcay committed Sep 9, 2022
1 parent dff86b1 commit baca449
Show file tree
Hide file tree
Showing 6 changed files with 6 additions and 0 deletions.
1 change: 1 addition & 0 deletions anomalib/models/cflow/lightning_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,7 @@ def __init__(self, hparams: Union[DictConfig, ListConfig]) -> None:
input_size=hparams.model.input_size,
backbone=hparams.model.backbone,
layers=hparams.model.layers,
pre_trained=hparams.model.pre_trained,
fiber_batch_size=hparams.dataset.fiber_batch_size,
decoder=hparams.model.decoder,
condition_vector=hparams.model.condition_vector,
Expand Down
1 change: 1 addition & 0 deletions anomalib/models/dfkde/lightning_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,7 @@ def __init__(self, hparams: Union[DictConfig, ListConfig]) -> None:
super().__init__(
layers=hparams.model.layers,
backbone=hparams.model.backbone,
pre_trained=hparams.model.pre_trained,
max_training_points=hparams.model.max_training_points,
pre_processing=hparams.model.pre_processing,
n_components=hparams.model.n_components,
Expand Down
1 change: 1 addition & 0 deletions anomalib/models/dfm/lightning_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ def __init__(self, hparams: Union[DictConfig, ListConfig]) -> None:
super().__init__(
backbone=hparams.model.backbone,
layer=hparams.model.layer,
pre_trained=hparams.model.pre_trained,
pooling_kernel_size=hparams.model.pooling_kernel_size,
pca_level=hparams.model.pca_level,
score_type=hparams.model.score_type,
Expand Down
1 change: 1 addition & 0 deletions anomalib/models/fastflow/lightning_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ def __init__(self, hparams: Union[DictConfig, ListConfig]) -> None:
super().__init__(
input_size=hparams.model.input_size,
backbone=hparams.model.backbone,
pre_trained=hparams.model.pre_trained,
flow_steps=hparams.model.flow_steps,
conv3x3_only=hparams.model.conv3x3_only,
hidden_ratio=hparams.model.hidden_ratio,
Expand Down
1 change: 1 addition & 0 deletions anomalib/models/patchcore/lightning_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,7 @@ def __init__(self, hparams) -> None:
input_size=hparams.model.input_size,
backbone=hparams.model.backbone,
layers=hparams.model.layers,
pre_trained=hparams.model.pre_trained,
coreset_sampling_ratio=hparams.model.coreset_sampling_ratio,
num_neighbors=hparams.model.num_neighbors,
)
Expand Down
1 change: 1 addition & 0 deletions anomalib/models/reverse_distillation/lightning_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ def __init__(self, hparams: Union[DictConfig, ListConfig]):
input_size=hparams.model.input_size,
backbone=hparams.model.backbone,
layers=hparams.model.layers,
pre_trained=hparams.model.pre_trained,
anomaly_map_mode=hparams.model.anomaly_map_mode,
lr=hparams.model.lr,
beta1=hparams.model.beta1,
Expand Down

0 comments on commit baca449

Please sign in to comment.