Skip to content

Commit

Permalink
append tensorboard writer to writer
Browse files Browse the repository at this point in the history
  • Loading branch information
grace-omotoso authored and akainth015 committed Apr 8, 2021
1 parent bb1cdb4 commit 8ff7cae
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 37 deletions.
12 changes: 3 additions & 9 deletions tests/test_configs.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,7 @@ def test_cfg_composition(self):
)
_, config = convert_to_attrdict(cfg.default_cfg)
self.assertEqual(
config.MODEL.TRUNK.TRUNK_PARAMS.RESNETS.DEPTH,
101,
"config composition failed",
config.MODEL.TRUNK.RESNETS.DEPTH, 101, "config composition failed"
)


Expand Down Expand Up @@ -84,14 +82,10 @@ def test_cfg_cli_composition(self):
)
_, config = convert_to_attrdict(cfg.default_cfg)
self.assertEqual(
config.MODEL.TRUNK.TRUNK_PARAMS.RESNETS.GROUPS,
32,
"config composition failed",
config.MODEL.TRUNK.RESNETS.GROUPS, 32, "config composition failed"
)
self.assertEqual(
config.MODEL.TRUNK.TRUNK_PARAMS.RESNETS.WIDTH_PER_GROUP,
16,
"config composition failed",
config.MODEL.TRUNK.RESNETS.WIDTH_PER_GROUP, 16, "config composition failed"
)


Expand Down
10 changes: 2 additions & 8 deletions vissl/models/trunks/regnet_fsdp.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,9 +91,7 @@ def __init__(
super().__init__()
self.stage_depth = 0

fsdp_config = {
"wrapper_cls": fsdp_wrapper,
}
fsdp_config = {"wrapper_cls": fsdp_wrapper}
fsdp_config.update(model_config.FSDP_CONFIG)
for i in range(depth):
# Make a block and move it to cuda since shard-as-we-build of FSDP needs
Expand Down Expand Up @@ -193,11 +191,7 @@ def __init__(self, model_config: AttrDict, model_name: str):
"RES_STEM_IN": ResStemIN,
"SIMPLE_STEM_IN": SimpleStemIN,
}[params.stem_type](
3,
params.stem_width,
params.bn_epsilon,
params.bn_momentum,
activation,
3, params.stem_width, params.bn_epsilon, params.bn_momentum, activation
)
init_weights(stem)
stem = auto_wrap_bn(stem, single_rank_pg=False)
Expand Down
7 changes: 4 additions & 3 deletions vissl/trainer/train_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,11 +119,12 @@ def initiate_vissl_event_storage(self):
create_event_storage()
self._event_storage = get_event_storage()

def build_event_storage_writers(self):
from vissl.utils.events import JsonWriter
def build_event_storage_writers(self, config: AttrDict):
from vissl.utils.events import JsonWriter, TensorboardWriter

self.event_storage_writers = [
JsonWriter(f"{self.checkpoint_folder}/stdout.json")
JsonWriter(f"{self.checkpoint_folder}/stdout.json"),
TensorboardWriter(config.log_dir, config.flush_secs),
]

@property
Expand Down
35 changes: 18 additions & 17 deletions vissl/utils/events.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import json
from collections import defaultdict

import torch
from fvcore.common.file_io import PathManager
from fvcore.common.history_buffer import HistoryBuffer
import torch


_VISSL_EVENT_STORAGE_STACK = []
Expand Down Expand Up @@ -47,8 +47,8 @@ def __init__(self, start_iter=0):
self._history = defaultdict(HistoryBuffer)
self._latest_scalars = {}
self._iter = start_iter
self._vis_data = [] # later for tensorboard
self._histograms = [] # later for tensorboard
self._vis_data = [] # later for tensorboard
self._histograms = [] # later for tensorboard

def put_scalar(self, name, value):
"""
Expand Down Expand Up @@ -124,7 +124,9 @@ def put_histogram(self, hist_name, hist_tensor, bins=1000):

# Create a histogram with PyTorch
hist_counts = torch.histc(hist_tensor, bins=bins)
hist_edges = torch.linspace(start=ht_min, end=ht_max, steps=bins + 1, dtype=torch.float32)
hist_edges = torch.linspace(
start=ht_min, end=ht_max, steps=bins + 1, dtype=torch.float32
)

# Parameter for the add_histogram_raw function of SummaryWriter
hist_params = dict(
Expand Down Expand Up @@ -180,9 +182,9 @@ def write(self):
def close(self):
self._file_handle.close()


class TensorboardWriter(VisslEventWriter):

def __init__(self, log_dir: str, flush_secs: int, **kwargs):
def __init__(self, log_dir: str, flush_secs: int, **kwargs):
"""
Args:
log_dir (str): the directory to save the output events
Expand All @@ -193,17 +195,16 @@ def __init__(self, log_dir: str, flush_secs: int, **kwargs):
from torch.utils.tensorboard import SummaryWriter

self._tb_writer = SummaryWriter(log_dir, **kwargs)

def write(self):
storage = get_event_storage()
to_save = defaultdict(dict)

# storage.put_{image,histogram} is only meant to be used by
# tensorboard writer. So we access its internal fields directly from here.

def write(self):
storage = get_event_storage()

# storage.put_{image,histogram} is only meant to be used by
# tensorboard writer. So we access its internal fields directly from here.
if len(storage._vis_data) >= 1:
for img_name, img, step_num in storage._vis_data:
self._tb_writer.add_image(img_name, img, step_num)

# Storage stores all image data and rely on this writer to clear them.
# As a result it assumes only one writer will use its image data.
# An alternative design is to let storage store limited recent
Expand All @@ -215,7 +216,7 @@ def write(self):
for params in storage._histograms:
self._tb_writer.add_histogram_raw(**params)
storage.clear_histograms()
def close(self):
if hasattr(self, "_tb_writer"): # doesn't exist when the code fails at import

def close(self):
if hasattr(self, "_tb_writer"): # doesn't exist when the code fails at import
self._writer.close()

0 comments on commit 8ff7cae

Please sign in to comment.