Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Switch from tensorboard to tensorboardx in logger #15728

Merged
merged 37 commits into from Nov 21, 2022
Merged
Show file tree
Hide file tree
Changes from 15 commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
7b85d77
Switch from tensorboard to tensorboardx in logger
lantiga Nov 18, 2022
cedf06e
Warn if log_graph is set to True but tensorboard is not installed
lantiga Nov 18, 2022
9b67345
Update tensorboard.py
williamFalcon Nov 18, 2022
57a5c3c
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Nov 18, 2022
99d3f1a
Update src/pytorch_lightning/loggers/tensorboard.py
lantiga Nov 18, 2022
1cfde33
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Nov 18, 2022
df72585
Update src/pytorch_lightning/loggers/tensorboard.py
lantiga Nov 18, 2022
df0982a
Fix warning message formatting
lantiga Nov 18, 2022
5052464
Import tensorboard if available
lantiga Nov 18, 2022
f123b68
Ignore mypy error
lantiga Nov 18, 2022
f68f752
Apply suggestions from code review
Borda Nov 18, 2022
31f79bf
Update src/pytorch_lightning/loggers/tensorboard.py
lantiga Nov 18, 2022
ff5aeea
Fix type ignore comments
lantiga Nov 20, 2022
0a66901
Implement suggestions
carmocca Nov 18, 2022
37f9970
Fix tests when only tensorboardX is available
carmocca Nov 21, 2022
fcbaea3
Fix CLI test
carmocca Nov 21, 2022
0923919
Merge branch 'master' into tensorboardx
carmocca Nov 21, 2022
c02d14e
Fix merge
carmocca Nov 21, 2022
0e4552f
Apply suggestions from code review
Borda Nov 21, 2022
d9db264
Apply suggestions from code review
Borda Nov 21, 2022
4576ef3
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Nov 21, 2022
dea1c24
Merge branch 'master' into tensorboardx
lantiga Nov 21, 2022
105da79
simplify for TBX as required pkg
Borda Nov 21, 2022
14f709d
simplify for TBX as required pkg
Borda Nov 21, 2022
b1f04d7
docs example
Borda Nov 21, 2022
65c4798
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Nov 21, 2022
d84184d
...
Borda Nov 21, 2022
b1d9909
Merge branch 'tensorboardx' of https://github.com/PyTorchLightning/py…
Borda Nov 21, 2022
0683ca3
tests
Borda Nov 21, 2022
dd38bee
Merge branch 'master' into tensorboardx
Borda Nov 21, 2022
0488a6c
chlog
Borda Nov 21, 2022
ceb561f
Merge branch 'tensorboardx' of https://github.com/PyTorchLightning/py…
Borda Nov 21, 2022
61bdeaf
reqs
Borda Nov 21, 2022
d3dc7ee
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Nov 21, 2022
317db17
tbx 2.4
Borda Nov 21, 2022
968487b
tbx 2.2
Borda Nov 21, 2022
3a4ff4e
Merge branch 'master' into tensorboardx
Borda Nov 21, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion requirements/pytorch/base.txt
Expand Up @@ -6,7 +6,7 @@ torch>=1.10.*, <=1.13.0
tqdm>=4.57.0, <4.65.0
PyYAML>=5.4, <=6.0
fsspec[http]>2021.06.0, <2022.8.0
tensorboard>=2.9.1, <2.11.0
tensorboardX>=2.0, <=2.5.1
torchmetrics>=0.7.0, <0.10.1 # needed for using fixed compare_version
packaging>=17.0, <=21.3
typing-extensions>=4.0.0, <=4.4.0
Expand Down
1 change: 1 addition & 0 deletions requirements/pytorch/test.txt
Expand Up @@ -14,3 +14,4 @@ psutil<5.9.4 # for `DeviceStatsMonitor`
pandas>1.0, <1.5.2 # needed in benchmarks
fastapi<0.87.0
uvicorn<0.19.1
tensorboard>=2.9.1, <2.11.0
carmocca marked this conversation as resolved.
Show resolved Hide resolved
Borda marked this conversation as resolved.
Show resolved Hide resolved
55 changes: 46 additions & 9 deletions src/pytorch_lightning/loggers/tensorboard.py
Expand Up @@ -19,12 +19,11 @@
import logging
import os
from argparse import Namespace
from typing import Any, Dict, Mapping, Optional, Union
from typing import Any, Dict, Mapping, Optional, TYPE_CHECKING, Union

import numpy as np
from lightning_utilities.core.imports import RequirementCache
from torch import Tensor
from torch.utils.tensorboard import SummaryWriter
from torch.utils.tensorboard.summary import hparams

import pytorch_lightning as pl
from lightning_lite.utilities.cloud_io import get_filesystem
Expand All @@ -36,6 +35,17 @@
from pytorch_lightning.utilities.logger import _sanitize_params as _utils_sanitize_params
from pytorch_lightning.utilities.rank_zero import rank_zero_only, rank_zero_warn

_TENSORBOARD_AVAILABLE = RequirementCache("tensorboard>=2.9.1")
_TENSORBOARDX_AVAILABLE = RequirementCache("tensorboardX>=2.0")
if TYPE_CHECKING:
# assumes at least one will be installed when type checking
if _TENSORBOARD_AVAILABLE:
from torch.utils.tensorboard import SummaryWriter
else:
from tensorboardX import SummaryWriter # type: ignore[no-redef]
else:
SummaryWriter = Any
Borda marked this conversation as resolved.
Show resolved Hide resolved

log = logging.getLogger(__name__)

if _OMEGACONF_AVAILABLE:
Expand All @@ -46,9 +56,9 @@ class TensorBoardLogger(Logger):
r"""
Log to local file system in `TensorBoard <https://www.tensorflow.org/tensorboard>`_ format.

Implemented using :class:`~torch.utils.tensorboard.SummaryWriter`. Logs are saved to
``os.path.join(save_dir, name, version)``. This is the default logger in Lightning, it comes
preinstalled.
Implemented using :class:`torch.utils.tensorboard.SummaryWriter` if ``tensorboard`` is available. Otherwise, it uses
the ``tensorboardX`` implementation. Logs are saved to ``os.path.join(save_dir, name, version)``. This is the
default logger in Lightning, it comes preinstalled.

Example:

Expand Down Expand Up @@ -77,7 +87,7 @@ class TensorBoardLogger(Logger):
sub_dir: Sub-directory to group TensorBoard logs. If a sub_dir argument is passed
then logs are saved in ``/save_dir/name/version/sub_dir/``. Defaults to ``None`` in which
logs are saved in ``/save_dir/name/version/``.
\**kwargs: Additional arguments used by :class:`SummaryWriter` can be passed as keyword
\**kwargs: Additional arguments used by :class:`torch.utils.tensorboard.SummaryWriter` can be passed as keyword
arguments in this logger. To automatically flush to disk, `max_queue` sets the size
of the queue for pending logs before flushing. `flush_secs` determines how many seconds
elapses before flushing.
Expand All @@ -97,13 +107,28 @@ def __init__(
sub_dir: Optional[_PATH] = None,
**kwargs: Any,
):
if not _TENSORBOARD_AVAILABLE and not _TENSORBOARDX_AVAILABLE:
Borda marked this conversation as resolved.
Show resolved Hide resolved
raise ModuleNotFoundError(f"{_TENSORBOARD_AVAILABLE!s}. You can also install `tensorboardX` if you prefer.")

super().__init__()
save_dir = os.fspath(save_dir)
self._save_dir = save_dir
self._name = name or ""
self._version = version
self._sub_dir = None if sub_dir is None else os.fspath(sub_dir)
self._log_graph = log_graph
self._log_graph = log_graph and _TENSORBOARD_AVAILABLE
if log_graph and not _TENSORBOARD_AVAILABLE:
if _TENSORBOARDX_AVAILABLE:
rank_zero_warn(
"You set `TensorBoardLogger(log_graph=True)` but `tensorboard` is not available. `tensorboardX` is"
" installed but it does not support this feature."
)
else:
rank_zero_warn(
"You set `TensorBoardLogger(log_graph=True)` but `tensorboard` is not available. "
+ str(_TENSORBOARDX_AVAILABLE)
)
Borda marked this conversation as resolved.
Show resolved Hide resolved

self._default_hp_metric = default_hp_metric
self._prefix = prefix
self._fs = get_filesystem(save_dir)
Expand Down Expand Up @@ -157,7 +182,7 @@ def sub_dir(self) -> Optional[str]:

@property
@rank_zero_experiment
def experiment(self) -> SummaryWriter:
def experiment(self) -> "SummaryWriter":
r"""
Actual tensorboard object. To use TensorBoard features in your
:class:`~pytorch_lightning.core.module.LightningModule` do the following.
Expand All @@ -173,6 +198,12 @@ def experiment(self) -> SummaryWriter:
assert rank_zero_only.rank == 0, "tried to init log dirs in non global_rank=0"
if self.root_dir:
self._fs.makedirs(self.root_dir, exist_ok=True)

if _TENSORBOARD_AVAILABLE:
from torch.utils.tensorboard import SummaryWriter
else:
from tensorboardX import SummaryWriter # type: ignore[no-redef]

self._experiment = SummaryWriter(log_dir=self.log_dir, **self._kwargs)
return self._experiment

Expand Down Expand Up @@ -209,6 +240,12 @@ def log_hyperparams(

if metrics:
self.log_metrics(metrics, 0)

if _TENSORBOARD_AVAILABLE:
from torch.utils.tensorboard.summary import hparams
else:
from tensorboardX.summary import hparams # type: ignore[no-redef]

exp, ssi, sei = hparams(params, metrics)
writer = self.experiment._get_file_writer()
writer.add_summary(exp)
Expand Down
1 change: 1 addition & 0 deletions tests/tests_pytorch/conftest.py
Expand Up @@ -75,6 +75,7 @@ def restore_env_variables():
"CUDA_MODULE_LOADING", # leaked since PyTorch 1.13
"KMP_INIT_AT_FORK", # leaked since PyTorch 1.13
"KMP_DUPLICATE_LIB_OK", # leaked since PyTorch 1.13
"CRC32C_SW_MODE", # leaked by tensorboardX
}
leaked_vars.difference_update(allowlist)
assert not leaked_vars, f"test is leaking environment variable(s): {set(leaked_vars)}"
Expand Down
42 changes: 25 additions & 17 deletions tests/tests_pytorch/loggers/test_all.py
Expand Up @@ -15,22 +15,16 @@
import inspect
import pickle
from unittest import mock
from unittest.mock import ANY
from unittest.mock import ANY, Mock

import pytest
import torch

from pytorch_lightning import Callback, Trainer
from pytorch_lightning.demos.boring_classes import BoringModel
from pytorch_lightning.loggers import (
CometLogger,
CSVLogger,
MLFlowLogger,
NeptuneLogger,
TensorBoardLogger,
WandbLogger,
)
from pytorch_lightning.loggers import CometLogger, CSVLogger, MLFlowLogger, NeptuneLogger, WandbLogger
from pytorch_lightning.loggers.logger import DummyExperiment
from pytorch_lightning.loggers.tensorboard import _TENSORBOARD_AVAILABLE, _TENSORBOARDX_AVAILABLE, TensorBoardLogger
from tests_pytorch.helpers.runif import RunIf
from tests_pytorch.loggers.test_comet import _patch_comet_atexit
from tests_pytorch.loggers.test_mlflow import mock_mlflow_run_creation
Expand Down Expand Up @@ -300,10 +294,17 @@ def test_logger_with_prefix_all(tmpdir, monkeypatch):
logger.experiment.__getitem__().log.assert_called_once_with(1.0)

# TensorBoard
with mock.patch("pytorch_lightning.loggers.tensorboard.SummaryWriter"):
logger = _instantiate_logger(TensorBoardLogger, save_dir=tmpdir, prefix=prefix)
logger.log_metrics({"test": 1.0}, step=0)
logger.experiment.add_scalar.assert_called_once_with("tmp-test", 1.0, 0)
if _TENSORBOARD_AVAILABLE:
import torch.utils.tensorboard as tb
elif not _TENSORBOARDX_AVAILABLE:
import tensorboardX as tb
else:
pytest.skip("`tensorboard` not installed.")

monkeypatch.setattr(tb, "SummaryWriter", Mock())
logger = _instantiate_logger(TensorBoardLogger, save_dir=tmpdir, prefix=prefix)
logger.log_metrics({"test": 1.0}, step=0)
logger.experiment.add_scalar.assert_called_once_with("tmp-test", 1.0, 0)

# WandB
with mock.patch("pytorch_lightning.loggers.wandb.wandb") as wandb, mock.patch(
Expand All @@ -316,17 +317,24 @@ def test_logger_with_prefix_all(tmpdir, monkeypatch):
logger.experiment.log.assert_called_once_with({"tmp-test": 1.0, "trainer/global_step": 0})


def test_logger_default_name(tmpdir):
def test_logger_default_name(tmpdir, monkeypatch):
"""Test that the default logger name is lightning_logs."""

# CSV
logger = CSVLogger(save_dir=tmpdir)
assert logger.name == "lightning_logs"

# TensorBoard
with mock.patch("pytorch_lightning.loggers.tensorboard.SummaryWriter"):
logger = _instantiate_logger(TensorBoardLogger, save_dir=tmpdir)
assert logger.name == "lightning_logs"
if _TENSORBOARD_AVAILABLE:
import torch.utils.tensorboard as tb
elif not _TENSORBOARDX_AVAILABLE:
import tensorboardX as tb
else:
pytest.skip("`tensorboard` not installed.")

monkeypatch.setattr(tb, "SummaryWriter", Mock())
logger = _instantiate_logger(TensorBoardLogger, save_dir=tmpdir)
assert logger.name == "lightning_logs"

# MLflow
with mock.patch("pytorch_lightning.loggers.mlflow.mlflow"), mock.patch(
Expand Down
21 changes: 15 additions & 6 deletions tests/tests_pytorch/loggers/test_tensorboard.py
Expand Up @@ -15,6 +15,7 @@
import os
from argparse import Namespace
from unittest import mock
from unittest.mock import Mock

import numpy as np
import pytest
Expand All @@ -23,7 +24,7 @@

from pytorch_lightning import Trainer
from pytorch_lightning.demos.boring_classes import BoringModel
from pytorch_lightning.loggers import TensorBoardLogger
from pytorch_lightning.loggers.tensorboard import _TENSORBOARD_AVAILABLE, _TENSORBOARDX_AVAILABLE, TensorBoardLogger
from pytorch_lightning.utilities.imports import _OMEGACONF_AVAILABLE
from tests_pytorch.helpers.runif import RunIf

Expand Down Expand Up @@ -220,6 +221,7 @@ def test_tensorboard_log_graph(tmpdir, example_input_array):
logger.log_graph(model, example_input_array)


@pytest.mark.skipif(not _TENSORBOARD_AVAILABLE, reason=str(_TENSORBOARD_AVAILABLE))
def test_tensorboard_log_graph_warning_no_example_input_array(tmpdir):
"""test that log graph throws warning if model.example_input_array is None."""
model = BoringModel()
Expand Down Expand Up @@ -276,23 +278,30 @@ def training_step(self, *args):
assert count_steps == model.indexes


@mock.patch("pytorch_lightning.loggers.tensorboard.SummaryWriter")
def test_tensorboard_finalize(summary_writer, tmpdir):
def test_tensorboard_finalize(monkeypatch, tmpdir):
"""Test that the SummaryWriter closes in finalize."""
if _TENSORBOARD_AVAILABLE:
import torch.utils.tensorboard as tb
elif not _TENSORBOARDX_AVAILABLE:
import tensorboardX as tb
else:
pytest.skip("`tensorboard` not installed.")

monkeypatch.setattr(tb, "SummaryWriter", Mock())
logger = TensorBoardLogger(save_dir=tmpdir)
assert logger._experiment is None
logger.finalize("any")

# no log calls, no experiment created -> nothing to flush
summary_writer.assert_not_called()
logger.experiment.assert_not_called()

logger = TensorBoardLogger(save_dir=tmpdir)
logger.log_metrics({"flush_me": 11.1}) # trigger creation of an experiment
logger.finalize("any")

# finalize flushes to experiment directory
summary_writer().flush.assert_called()
summary_writer().close.assert_called()
logger.experiment.flush.assert_called()
logger.experiment.close.assert_called()


def test_tensorboard_save_hparams_to_yaml_once(tmpdir):
Expand Down