From 45820d9a38d257bcfe37b9c53d5964f5e78c8a90 Mon Sep 17 00:00:00 2001 From: Nikhil Shenoy Date: Wed, 4 Jan 2023 14:14:21 -0800 Subject: [PATCH 01/11] fixed transformers, callbacks and quantization link --- docs/source-pytorch/ecosystem/transformers.rst | 2 +- docs/source-pytorch/extensions/callbacks.rst | 8 ++++---- src/pytorch_lightning/callbacks/quantization.py | 3 ++- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/docs/source-pytorch/ecosystem/transformers.rst b/docs/source-pytorch/ecosystem/transformers.rst index b20402a52699d..d49e4ba54e598 100644 --- a/docs/source-pytorch/ecosystem/transformers.rst +++ b/docs/source-pytorch/ecosystem/transformers.rst @@ -16,7 +16,7 @@ In Lightning Transformers, we offer the following benefits: - Backed by `HuggingFace Transformers `_ models and datasets, spanning multiple modalities and tasks within NLP/Audio and Vision. - Task Abstraction for Rapid Research & Experimentation - Build your own custom transformer tasks across all modalities with little friction. - Powerful config composition backed by `Hydra `_ - simply swap out models, optimizers, schedulers task, and many more configurations without touching the code. -- Seamless Memory and Speed Optimizations - Out-of-the-box training optimizations such as `DeepSpeed ZeRO `_ or `FairScale Sharded Training `_ with no code changes. +- Seamless Memory and Speed Optimizations - Out-of-the-box training optimizations such as `DeepSpeed ZeRO `_ or `FairScale Sharded Training `_ with no code changes. ----------------- diff --git a/docs/source-pytorch/extensions/callbacks.rst b/docs/source-pytorch/extensions/callbacks.rst index 401b9606258b6..9292c295db751 100644 --- a/docs/source-pytorch/extensions/callbacks.rst +++ b/docs/source-pytorch/extensions/callbacks.rst @@ -61,10 +61,10 @@ Examples ******** You can do pretty much anything with callbacks. -- `Add a MLP to fine-tune self-supervised networks `_. -- `Find how to modify an image input to trick the classification result `_. -- `Interpolate the latent space of any variational model `_. -- `Log images to Tensorboard for any model `_. +- `Add a MLP to fine-tune self-supervised networks `_. +- `Find how to modify an image input to trick the classification result `_. +- `Interpolate the latent space of any variational model `_. +- `Log images to Tensorboard for any model `_. -------------- diff --git a/src/pytorch_lightning/callbacks/quantization.py b/src/pytorch_lightning/callbacks/quantization.py index 2e37660d2d58f..4eed24c8a843f 100644 --- a/src/pytorch_lightning/callbacks/quantization.py +++ b/src/pytorch_lightning/callbacks/quantization.py @@ -151,7 +151,8 @@ def custom_trigger_last(trainer): not be controlled by the callback. .. _PyTorch Quantization: https://pytorch.org/docs/stable/quantization.html#quantization-aware-training - .. _torch.quantization.QConfig: https://pytorch.org/docs/stable/torch.quantization.html#torch.quantization.QConfig + .. _torch.quantization.QConfig: + https://pytorch.org/docs/stable/generated/torch.quantization.qconfig.QConfig.html#qconfig """ OBSERVER_TYPES = ("histogram", "average") From 93a94f4c07722451659ccc4a334366e80215fcba Mon Sep 17 00:00:00 2001 From: Nikhil Shenoy Date: Wed, 4 Jan 2023 18:59:16 -0800 Subject: [PATCH 02/11] Updated the link for the on-line training paper --- docs/source-pytorch/common/lightning_module.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source-pytorch/common/lightning_module.rst b/docs/source-pytorch/common/lightning_module.rst index 690b0209c4cf6..e1b9b38f4f808 100644 --- a/docs/source-pytorch/common/lightning_module.rst +++ b/docs/source-pytorch/common/lightning_module.rst @@ -1210,7 +1210,7 @@ and the Trainer will apply Truncated Backprop to it. (`Williams et al. "An efficient gradient-based algorithm for on-line training of recurrent network trajectories." -`_) +`_) `Tutorial `_ From 7ad0b453f0c6f7a7087512f4cd0cbd7d78b60b7c Mon Sep 17 00:00:00 2001 From: Nikhil Shenoy Date: Wed, 4 Jan 2023 19:56:59 -0800 Subject: [PATCH 03/11] Updated efficient data batching link --- src/pytorch_lightning/strategies/ipu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pytorch_lightning/strategies/ipu.py b/src/pytorch_lightning/strategies/ipu.py index 72ac9ba3f1634..61da2ec78d2e5 100644 --- a/src/pytorch_lightning/strategies/ipu.py +++ b/src/pytorch_lightning/strategies/ipu.py @@ -64,7 +64,7 @@ def __init__( device_iterations: Number of iterations to run on device at once before returning to host. This can be used as an optimization to speed up training. - https://docs.graphcore.ai/projects/poptorch-user-guide/en/0.1.67/batching.html + https://docs.graphcore.ai/projects/poptorch-user-guide/en/latest/batching.html autoreport: Enable auto-reporting for IPUs using PopVision https://docs.graphcore.ai/projects/graphcore-popvision-user-guide/en/latest/graph/graph.html autoreport_dir: Optional directory to store autoReport output. From 38c0dc8ffdec6d988bc28556e0e7910e4a874944 Mon Sep 17 00:00:00 2001 From: Nikhil Shenoy Date: Wed, 4 Jan 2023 20:12:52 -0800 Subject: [PATCH 04/11] Updated graph analyser opening reports link --- docs/source-pytorch/accelerators/ipu_intermediate.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source-pytorch/accelerators/ipu_intermediate.rst b/docs/source-pytorch/accelerators/ipu_intermediate.rst index 68c866ea96644..806ddd0ce32f9 100644 --- a/docs/source-pytorch/accelerators/ipu_intermediate.rst +++ b/docs/source-pytorch/accelerators/ipu_intermediate.rst @@ -60,4 +60,4 @@ Lightning supports dumping all reports to a directory to open using the tool. trainer = pl.Trainer(accelerator="ipu", devices=8, strategy=IPUStrategy(autoreport_dir="report_dir/")) trainer.fit(model) -This will dump all reports to ``report_dir/`` which can then be opened using the Graph Analyser Tool, see `Opening Reports `__. +This will dump all reports to ``report_dir/`` which can then be opened using the Graph Analyser Tool, see `Opening Reports `__. From c3a812f9aee388cb13f99afdcceafb260db15373 Mon Sep 17 00:00:00 2001 From: Nikhil Shenoy Date: Wed, 4 Jan 2023 20:19:25 -0800 Subject: [PATCH 05/11] fixed deepspeed offload documentation --- docs/source-pytorch/advanced/model_parallel.rst | 2 +- docs/source-pytorch/fabric/fabric.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source-pytorch/advanced/model_parallel.rst b/docs/source-pytorch/advanced/model_parallel.rst index db5605619ad43..c1e68b780f186 100644 --- a/docs/source-pytorch/advanced/model_parallel.rst +++ b/docs/source-pytorch/advanced/model_parallel.rst @@ -614,7 +614,7 @@ DeepSpeed ZeRO Stage 3 ====================== DeepSpeed ZeRO Stage 3 shards the optimizer states, gradients and the model parameters (also optionally activations). Sharding model parameters and activations comes with an increase in distributed communication, however allows you to scale your models massively from one GPU to multiple GPUs. -**The DeepSpeed team report the ability to fine-tune models with over 40B parameters on a single GPU and over 2 Trillion parameters on 512 GPUs.** For more information we suggest checking the `DeepSpeed ZeRO-3 Offload documentation `__. +**The DeepSpeed team report the ability to fine-tune models with over 40B parameters on a single GPU and over 2 Trillion parameters on 512 GPUs.** For more information we suggest checking the `DeepSpeed ZeRO-3 Offload documentation `__. We've ran benchmarks for all these features and given a simple example of how all these features work in Lightning, which you can see at `minGPT `_. diff --git a/docs/source-pytorch/fabric/fabric.rst b/docs/source-pytorch/fabric/fabric.rst index 02334579b993f..aaa1f8b52e75d 100644 --- a/docs/source-pytorch/fabric/fabric.rst +++ b/docs/source-pytorch/fabric/fabric.rst @@ -110,7 +110,7 @@ DDP with 8 GPUs and `torch.bfloat16 `_ with mixed precision: +`DeepSpeed Zero3 `_ with mixed precision: .. code-block:: bash From ec72c8ef949faeec6789eee606a96d9963468aaa Mon Sep 17 00:00:00 2001 From: Nikhil Shenoy Date: Wed, 4 Jan 2023 23:48:20 -0800 Subject: [PATCH 06/11] Updated ModelPruning Callback remove pytorch link --- src/pytorch_lightning/callbacks/pruning.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pytorch_lightning/callbacks/pruning.py b/src/pytorch_lightning/callbacks/pruning.py index ad5f8776c56b4..2005457418d0d 100644 --- a/src/pytorch_lightning/callbacks/pruning.py +++ b/src/pytorch_lightning/callbacks/pruning.py @@ -261,7 +261,7 @@ def _wrap_pruning_fn(pruning_fn: Callable, **kwargs: Any) -> Callable: def make_pruning_permanent(self, module: nn.Module) -> None: """Removes pruning buffers from any pruned modules. - Adapted from https://github.com/pytorch/pytorch/blob/1.7.1/torch/nn/utils/prune.py#L1176-L1180 + Adapted from https://github.com/pytorch/pytorch/blob/master/torch/nn/utils/prune.py#L1172-L1200 """ for _, module in module.named_modules(): for k in list(module._forward_pre_hooks): From 1d245721546a1a19a1c17b31a53c0371d21a0235 Mon Sep 17 00:00:00 2001 From: Nikhil Shenoy Date: Thu, 5 Jan 2023 13:10:06 -0800 Subject: [PATCH 07/11] Updated relative links with absolute links --- docs/source-pytorch/common/hyperparameters.rst | 2 +- docs/source-pytorch/index.rst | 2 +- docs/source-pytorch/starter/introduction.rst | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/source-pytorch/common/hyperparameters.rst b/docs/source-pytorch/common/hyperparameters.rst index b5d9b509a8208..09a185271357d 100644 --- a/docs/source-pytorch/common/hyperparameters.rst +++ b/docs/source-pytorch/common/hyperparameters.rst @@ -12,7 +12,7 @@ Configure hyperparameters from the CLI (legacy) .. warning:: This is the documentation for the use of Python's ``argparse`` to implement a CLI. This approach is no longer - recommended, and people are encouraged to use the new `LightningCLI <../cli/lightning_cli.html>`_ class instead. + recommended, and people are encouraged to use the new `LightningCLI `_ class instead. Lightning has utilities to interact seamlessly with the command line ``ArgumentParser`` diff --git a/docs/source-pytorch/index.rst b/docs/source-pytorch/index.rst index 5c28d1b057c54..bc4760fe53bbb 100644 --- a/docs/source-pytorch/index.rst +++ b/docs/source-pytorch/index.rst @@ -62,7 +62,7 @@ Conda users -Or read the `advanced install guide `_ +Or read the `advanced install guide `_ We are fully compatible with any stable PyTorch version v1.10 and above. diff --git a/docs/source-pytorch/starter/introduction.rst b/docs/source-pytorch/starter/introduction.rst index 7942074df4165..49ae89e43d4a1 100644 --- a/docs/source-pytorch/starter/introduction.rst +++ b/docs/source-pytorch/starter/introduction.rst @@ -99,7 +99,7 @@ For `conda `_ users -Or read the `advanced install guide `_ +Or read the `advanced install guide `_ ---- @@ -179,7 +179,7 @@ The Lightning :doc:`Trainer <../common/trainer>` "mixes" any :doc:`LightningModu trainer = pl.Trainer(limit_train_batches=100, max_epochs=1) trainer.fit(model=autoencoder, train_dataloaders=train_loader) -The Lightning :doc:`Trainer <../common/trainer>` automates `40+ tricks <../common/trainer.html#trainer-flags>`_ including: +The Lightning :doc:`Trainer <../common/trainer>` automates `40+ tricks `_ including: * Epoch and batch iteration * ``optimizer.step()``, ``loss.backward()``, ``optimizer.zero_grad()`` calls From 8991a7735fac43af59c7a0ca0020b74a0ba5d983 Mon Sep 17 00:00:00 2001 From: Nikhil Shenoy Date: Thu, 5 Jan 2023 15:43:14 -0800 Subject: [PATCH 08/11] Fixed model pruning link --- src/pytorch_lightning/callbacks/pruning.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pytorch_lightning/callbacks/pruning.py b/src/pytorch_lightning/callbacks/pruning.py index 2005457418d0d..d2d0fea396fb7 100644 --- a/src/pytorch_lightning/callbacks/pruning.py +++ b/src/pytorch_lightning/callbacks/pruning.py @@ -261,7 +261,7 @@ def _wrap_pruning_fn(pruning_fn: Callable, **kwargs: Any) -> Callable: def make_pruning_permanent(self, module: nn.Module) -> None: """Removes pruning buffers from any pruned modules. - Adapted from https://github.com/pytorch/pytorch/blob/master/torch/nn/utils/prune.py#L1172-L1200 + Adapted from https://github.com/pytorch/pytorch/blob/v1.7.1/torch/nn/utils/prune.py#L1118-L1122 """ for _, module in module.named_modules(): for k in list(module._forward_pre_hooks): From bf9e8cbe83892894992e2d31beb72d9f6acc1395 Mon Sep 17 00:00:00 2001 From: Nikhil Shenoy Date: Fri, 6 Jan 2023 15:20:45 -0800 Subject: [PATCH 09/11] Reset relative links and updated conf.py with ignore --- docs/source-pytorch/conf.py | 3 +++ docs/source-pytorch/index.rst | 2 +- docs/source-pytorch/starter/introduction.rst | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/source-pytorch/conf.py b/docs/source-pytorch/conf.py index 5d95d62e65569..3934b5a482c65 100644 --- a/docs/source-pytorch/conf.py +++ b/docs/source-pytorch/conf.py @@ -412,3 +412,6 @@ def package_list_from_file(file): # ignore all links in any CHANGELOG file linkcheck_exclude_documents = [r"^(.*\/)*CHANGELOG.*$"] + +# ignore the following relative links +linkcheck_ignore = [r"^starter/installation.html$", r"^installation.html$"] diff --git a/docs/source-pytorch/index.rst b/docs/source-pytorch/index.rst index bc4760fe53bbb..5c28d1b057c54 100644 --- a/docs/source-pytorch/index.rst +++ b/docs/source-pytorch/index.rst @@ -62,7 +62,7 @@ Conda users -Or read the `advanced install guide `_ +Or read the `advanced install guide `_ We are fully compatible with any stable PyTorch version v1.10 and above. diff --git a/docs/source-pytorch/starter/introduction.rst b/docs/source-pytorch/starter/introduction.rst index 49ae89e43d4a1..10d890ce199b3 100644 --- a/docs/source-pytorch/starter/introduction.rst +++ b/docs/source-pytorch/starter/introduction.rst @@ -99,7 +99,7 @@ For `conda `_ users -Or read the `advanced install guide `_ +Or read the `advanced install guide `_ ---- From b1c477166ef699e09762ad5adb253abf83a1afd8 Mon Sep 17 00:00:00 2001 From: Nikhil Shenoy Date: Fri, 6 Jan 2023 15:34:19 -0800 Subject: [PATCH 10/11] Set other relative links to old format, and added them to linkcheck_ignore --- docs/source-pytorch/common/hyperparameters.rst | 2 +- docs/source-pytorch/conf.py | 7 ++++++- docs/source-pytorch/starter/introduction.rst | 2 +- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/docs/source-pytorch/common/hyperparameters.rst b/docs/source-pytorch/common/hyperparameters.rst index 09a185271357d..b5d9b509a8208 100644 --- a/docs/source-pytorch/common/hyperparameters.rst +++ b/docs/source-pytorch/common/hyperparameters.rst @@ -12,7 +12,7 @@ Configure hyperparameters from the CLI (legacy) .. warning:: This is the documentation for the use of Python's ``argparse`` to implement a CLI. This approach is no longer - recommended, and people are encouraged to use the new `LightningCLI `_ class instead. + recommended, and people are encouraged to use the new `LightningCLI <../cli/lightning_cli.html>`_ class instead. Lightning has utilities to interact seamlessly with the command line ``ArgumentParser`` diff --git a/docs/source-pytorch/conf.py b/docs/source-pytorch/conf.py index 3934b5a482c65..27781b80a9720 100644 --- a/docs/source-pytorch/conf.py +++ b/docs/source-pytorch/conf.py @@ -414,4 +414,9 @@ def package_list_from_file(file): linkcheck_exclude_documents = [r"^(.*\/)*CHANGELOG.*$"] # ignore the following relative links -linkcheck_ignore = [r"^starter/installation.html$", r"^installation.html$"] +linkcheck_ignore = [ + r"^starter/installation.html$", + r"^installation.html$", + r"^../cli/lightning_cli.html$", + r"^../common/trainer.html#trainer-flags$", +] diff --git a/docs/source-pytorch/starter/introduction.rst b/docs/source-pytorch/starter/introduction.rst index 10d890ce199b3..7942074df4165 100644 --- a/docs/source-pytorch/starter/introduction.rst +++ b/docs/source-pytorch/starter/introduction.rst @@ -179,7 +179,7 @@ The Lightning :doc:`Trainer <../common/trainer>` "mixes" any :doc:`LightningModu trainer = pl.Trainer(limit_train_batches=100, max_epochs=1) trainer.fit(model=autoencoder, train_dataloaders=train_loader) -The Lightning :doc:`Trainer <../common/trainer>` automates `40+ tricks `_ including: +The Lightning :doc:`Trainer <../common/trainer>` automates `40+ tricks <../common/trainer.html#trainer-flags>`_ including: * Epoch and batch iteration * ``optimizer.step()``, ``loss.backward()``, ``optimizer.zero_grad()`` calls From ac17ad86c9f080ae464b3951a01954bd0b3447fb Mon Sep 17 00:00:00 2001 From: Nikhil Shenoy Date: Mon, 9 Jan 2023 09:26:26 -0800 Subject: [PATCH 11/11] Updated with why we are ignoring certain links --- docs/source-pytorch/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source-pytorch/conf.py b/docs/source-pytorch/conf.py index 27781b80a9720..a6393ba9b2d06 100644 --- a/docs/source-pytorch/conf.py +++ b/docs/source-pytorch/conf.py @@ -413,7 +413,7 @@ def package_list_from_file(file): # ignore all links in any CHANGELOG file linkcheck_exclude_documents = [r"^(.*\/)*CHANGELOG.*$"] -# ignore the following relative links +# ignore the following relative links (false positive errors during linkcheck) linkcheck_ignore = [ r"^starter/installation.html$", r"^installation.html$",