Skip to content

Commit

Permalink
Merge branch 'master' into model_size_bytes
Browse files Browse the repository at this point in the history
  • Loading branch information
kartik4949 committed Dec 7, 2020
2 parents 199bafe + b00991e commit 14480c7
Show file tree
Hide file tree
Showing 74 changed files with 2,363 additions and 1,104 deletions.
63 changes: 63 additions & 0 deletions .drone.jsonnet
@@ -0,0 +1,63 @@
/*
Copyright The PyTorch Lightning team.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/

// https://github.com/drone/drone-jsonnet-config/blob/master/.drone.jsonnet

local pipeline(name, image) = {
kind: "pipeline",
type: "docker",
name: name,
steps: [
{
name: "testing",
image: image,
environment: {
"CODECOV_TOKEN": {
from_secret: "codecov_token"
},
"MKL_THREADING_LAYER": "GNU",
},
commands: [
"python --version",
"pip --version",
"nvidia-smi",
"pip install -r ./requirements/devel.txt --upgrade-strategy only-if-needed -v --no-cache-dir",
"pip list",
"coverage run --source pytorch_lightning -m pytest pytorch_lightning tests -v -ra --color=yes --durations=25",
"python -m pytest benchmarks pl_examples -v -ra --color=yes --maxfail=2 --durations=0",
"coverage report",
"codecov --token $CODECOV_TOKEN --flags=gpu,pytest --name='GPU-coverage' --env=linux --build $DRONE_BUILD_NUMBER --commit $DRONE_COMMIT",
"python tests/collect_env_details.py"
],
},
],
trigger: {
branch: [
"master",
"release/*"
],
event: [
"push",
"pull_request"
]
},
depends_on: if name == "torch-GPU-nightly" then ["torch-GPU"]
};

[
pipeline("torch-GPU", "pytorchlightning/pytorch_lightning:base-cuda-py3.7-torch1.6"),
pipeline("torch-GPU-nightly", "pytorchlightning/pytorch_lightning:base-cuda-py3.7-torch1.7"),
]
45 changes: 35 additions & 10 deletions .github/CODEOWNERS
Validating CODEOWNERS rules …
Expand Up @@ -5,23 +5,48 @@
# the repo. Unless a later match takes precedence,
# @global-owner1 and @global-owner2 will be requested for
# review when someone opens a pull request.
* @williamfalcon @borda @teddykoker @awaelchli @nateraw @justusschock @tchaton @SeanNaren @ananyahjha93
* @williamfalcon @borda @tchaton @SeanNaren @awaelchli @justusschock

# Metrics
/pytorch_lightning/metrics/* @teddykoker @ananyahjha93 @justusschock
/tests/metrics/* @teddykoker @ananyahjha93 @justusschock
/pytorch_lightning/metrics/ @teddykoker @ananyahjha93 @justusschock
/tests/metrics/ @teddykoker @ananyahjha93 @justusschock
/docs/source/metrics.rst @teddykoker @ananyahjha93 @justusschock

# API
/pytorch_lightning/callbacks/base.py @williamfalcon
/pytorch_lightning/core/datamodule.py @williamfalcon
/pytorch_lightning/trainer/trainer.py @williamfalcon
/pytorch_lightning/core/hooks.py @williamfalcon
/pytorch_lightning/core/lightning.py @williamfalcon
/pytorch_lightning/callbacks/base.py @williamfalcon
/pytorch_lightning/core/datamodule.py @williamfalcon
/pytorch_lightning/trainer/trainer.py @williamfalcon @tchaton
/pytorch_lightning/core/hooks.py @williamfalcon
/pytorch_lightning/core/lightning.py @williamfalcon @tchaton
/pytorch_lightning/core/optimizer.py @tchaton
/pytorch_lightning/trainer/training_loop.py @tchaton @SeanNaren
/pytorch_lightning/trainer/evaluation_loop.py @tchaton @SeanNaren

# Connectors
/pytorch_lightning/trainer/connectors/ @tchaton @SeanNaren

# accelerators
/pytorch_lightning/accelerators/* @williamfalcon
/pytorch_lightning/accelerators/ @williamfalcon @tchaton @SeanNaren @awaelchli @justusschock

# owners
/pytorch_lightning/.github/CODEOWNERS @williamfalcon
/.github/CODEOWNERS @williamfalcon
# main
/README.md @williamfalcon @edenlightning
# installation
/setup.py @borda @williamfalcon

# CI/CD
/.github/workflows/ @borda @tchaton
/.github/*.py @borda @tchaton
/dockers/ @borda @tchaton
# configs in root
/*.yml @borda @tchaton

# Docs
/docs/ @edenlightning @tchaton @borda @awaelchli
/.github/*.md @edenlightning @williamfalcon @borda
/.github/ISSUE_TEMPLATE/*.md @edenlightning @borda @tchaton
/docs/source/conf.py @borda @awaelchli

# Testing
/tests/base/boring_model.py @williamfalcon
26 changes: 25 additions & 1 deletion CHANGELOG.md
Expand Up @@ -4,8 +4,14 @@ All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

## Unreleased

## [1.1.0rc1] - 2020-12-02
### Fixed

- Fixed `LoggerConnector` to have logged metrics on root device in DP ([#4138](https://github.com/PyTorchLightning/pytorch-lightning/pull/4138))


## [1.1.0rc] - 2020-12-02

### Added

Expand Down Expand Up @@ -50,6 +56,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Added optimizer refactors ([#4658](https://github.com/PyTorchLightning/pytorch-lightning/pull/4658))


- Added `PrecisionRecallCurve, ROC, AveragePrecision` class metric ([#4549](https://github.com/PyTorchLightning/pytorch-lightning/pull/4549))


- Added custom `Apex` and `NativeAMP` as `Precision plugins` ([#4355](https://github.com/PyTorchLightning/pytorch-lightning/pull/4355))


Expand All @@ -72,9 +81,14 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

### Changed

- Removed `multiclass_roc` and `multiclass_precision_recall_curve`, use `roc` and `precision_recall_curve` instead ([#4549](https://github.com/PyTorchLightning/pytorch-lightning/pull/4549))



- Tuner algorithms will be skipped if `fast_dev_run=True` ([#3903](https://github.com/PyTorchLightning/pytorch-lightning/pull/3903))



- WandbLogger does not force wandb `reinit` arg to True anymore and creates a run only when needed ([#4648](https://github.com/PyTorchLightning/pytorch-lightning/pull/4648))


Expand All @@ -89,6 +103,12 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Deprecated `prefix` argument in `ModelCheckpoint` ([#4765](https://github.com/PyTorchLightning/pytorch-lightning/pull/4765))


- Deprecated the old way of assigning hyper-parameters through `self.hparams = ...` ([#4813](https://github.com/PyTorchLightning/pytorch-lightning/pull/4813))


- Deprecated `mode='auto'` from `ModelCheckpoint` and `EarlyStopping` ([#4695](https://github.com/PyTorchLightning/pytorch-lightning/pull/4695))


### Removed


Expand All @@ -97,6 +117,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

- Added feature to move tensors to CPU before saving ([#4309](https://github.com/PyTorchLightning/pytorch-lightning/pull/4309))

- Fixed `LoggerConnector` to have logged metrics on root device in DP ([#4138](https://github.com/PyTorchLightning/pytorch-lightning/pull/4138))


- Auto convert tensors to contiguous format when `gather_all` ([#4907](https://github.com/PyTorchLightning/pytorch-lightning/pull/4907))


## [1.0.8] - 2020-11-24
Expand Down
1 change: 1 addition & 0 deletions MANIFEST.in
Expand Up @@ -54,6 +54,7 @@ include pyproject.toml
# Exclude build configs
exclude *.yml
exclude *.yaml
exclude *.jsonnet

# Exclude pyright config
exclude .pyrightconfig.json
Expand Down
8 changes: 8 additions & 0 deletions docs/source/_images/lightning_icon.svg
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
4 changes: 2 additions & 2 deletions docs/source/hyperparameters.rst
Expand Up @@ -167,8 +167,8 @@ improve readability and reproducibility.
def train_dataloader(self):
return DataLoader(mnist_train, batch_size=self.hparams.batch_size)
.. warning:: Deprecated. This method of assigning hyperparameters to the LightningModule is no longer
recommended and will not be supported in future versions of Lightning.
.. warning:: Deprecated since v1.1.0. This method of assigning hyperparameters to the LightningModule
will no longer be supported from v1.3.0. Use the ``self.save_hyperparameters()`` method from above instead.


4. You can also save full objects such as `dict` or `Namespace` to the checkpoint.
Expand Down
17 changes: 11 additions & 6 deletions docs/source/logging.rst
Expand Up @@ -6,7 +6,7 @@

.. role:: hidden
:class: hidden-section

.. _logging:


Expand Down Expand Up @@ -57,9 +57,11 @@ Logging from a LightningModule

Lightning offers automatic log functionalities for logging scalars, or manual logging for anything else.

Automatic logging
Automatic Logging
=================
Use the :func:`~~pytorch_lightning.core.lightning.LightningModule.log` method to log from anywhere in a :ref:`lightning_module`.
Use the :func:`~~pytorch_lightning.core.lightning.LightningModule.log`
method to log from anywhere in a :ref:`lightning_module` and :ref:`callbacks`
except functions with `batch_start` in their names.

.. code-block:: python
Expand Down Expand Up @@ -95,6 +97,9 @@ The :func:`~~pytorch_lightning.core.lightning.LightningModule.log` method has a
argument of :class:`~pytorch_lightning.callbacks.model_checkpoint.ModelCheckpoint` or in the graphs plotted to the logger of your choice.


If your work requires to log in an unsupported function, please open an issue with a clear description of why it is blocking you.


Manual logging
==============
If you want to log anything that is not a scalar, like histograms, text, images, etc... you may need to use the logger object directly.
Expand Down Expand Up @@ -144,8 +149,8 @@ Use the :func:`~pytorch_lightning.loggers.base.rank_zero_experiment` and :func:`
def experiment(self):
# Return the experiment object associated with this logger.
pass
@property

@property
def version(self):
# Return the experiment version, int or str.
return '0.1'
Expand Down Expand Up @@ -238,7 +243,7 @@ if you are using a logger. These defaults can be customized by overriding the
:func:`~pytorch_lightning.core.lightning.LightningModule.get_progress_bar_dict` hook in your module.

.. code-block:: python
def get_progress_bar_dict(self):
# don't show the version number
items = super().get_progress_bar_dict()
Expand Down
37 changes: 24 additions & 13 deletions docs/source/metrics.rst
Expand Up @@ -239,6 +239,24 @@ ConfusionMatrix
.. autoclass:: pytorch_lightning.metrics.classification.ConfusionMatrix
:noindex:

PrecisionRecallCurve
~~~~~~~~~~~~~~~~~~~~

.. autoclass:: pytorch_lightning.metrics.classification.PrecisionRecallCurve
:noindex:

AveragePrecision
~~~~~~~~~~~~~~~~

.. autoclass:: pytorch_lightning.metrics.classification.AveragePrecision
:noindex:

ROC
~~~

.. autoclass:: pytorch_lightning.metrics.classification.ROC
:noindex:

Regression Metrics
------------------

Expand Down Expand Up @@ -326,7 +344,7 @@ multiclass_auroc [func]
average_precision [func]
~~~~~~~~~~~~~~~~~~~~~~~~

.. autofunction:: pytorch_lightning.metrics.functional.classification.average_precision
.. autofunction:: pytorch_lightning.metrics.functional.average_precision
:noindex:


Expand Down Expand Up @@ -365,10 +383,10 @@ iou [func]
:noindex:


multiclass_roc [func]
roc [func]
~~~~~~~~~~~~~~~~~~~~~

.. autofunction:: pytorch_lightning.metrics.functional.classification.multiclass_roc
.. autofunction:: pytorch_lightning.metrics.functional.roc
:noindex:


Expand All @@ -389,7 +407,7 @@ precision_recall [func]
precision_recall_curve [func]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

.. autofunction:: pytorch_lightning.metrics.functional.classification.precision_recall_curve
.. autofunction:: pytorch_lightning.metrics.functional.precision_recall_curve
:noindex:


Expand All @@ -400,13 +418,6 @@ recall [func]
:noindex:


roc [func]
~~~~~~~~~~

.. autofunction:: pytorch_lightning.metrics.functional.classification.roc
:noindex:


stat_scores [func]
~~~~~~~~~~~~~~~~~~

Expand All @@ -424,14 +435,14 @@ stat_scores_multiple_classes [func]
to_categorical [func]
~~~~~~~~~~~~~~~~~~~~~

.. autofunction:: pytorch_lightning.metrics.functional.classification.to_categorical
.. autofunction:: pytorch_lightning.metrics.utils.to_categorical
:noindex:


to_onehot [func]
~~~~~~~~~~~~~~~~

.. autofunction:: pytorch_lightning.metrics.functional.classification.to_onehot
.. autofunction:: pytorch_lightning.metrics.utils.to_onehot
:noindex:


Expand Down

0 comments on commit 14480c7

Please sign in to comment.