Skip to content

Commit b6ebfa6

Browse files
author
Baizhou Zhang
committed
remove Example: in code docstrings
1 parent 86f9bc9 commit b6ebfa6

File tree

6 files changed

+0
-14
lines changed

6 files changed

+0
-14
lines changed

colossalai/booster/plugin/gemini_plugin.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -229,8 +229,6 @@ class GeminiPlugin(DPPluginBase):
229229
"""
230230
Plugin for Gemini.
231231
232-
Example:
233-
234232
```python
235233
from colossalai.booster import Booster
236234
from colossalai.booster.plugin import GeminiPlugin

colossalai/booster/plugin/hybrid_parallel_plugin.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -266,8 +266,6 @@ class HybridParallelPlugin(PipelinePluginBase):
266266
Tensor parallel, pipeline parallel and data parallel(DDP/ZeRO) can be picked and combined in this plugin.
267267
The size of tp and pp should be passed in by user, then the size of dp is automatically calculated from dp_size = world_size / (tp_size * pp_size).
268268
269-
Example:
270-
271269
```python
272270
from colossalai.booster import Booster
273271
from colossalai.booster.plugin import HybridParallelPlugin

colossalai/booster/plugin/low_level_zero_plugin.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -213,8 +213,6 @@ class LowLevelZeroPlugin(DPPluginBase):
213213
"""
214214
Plugin for low level zero.
215215
216-
Example:
217-
218216
```python
219217
from colossalai.booster import Booster
220218
from colossalai.booster.plugin import LowLevelZeroPlugin

colossalai/booster/plugin/torch_ddp_plugin.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -130,8 +130,6 @@ class TorchDDPPlugin(DPPluginBase):
130130
"""
131131
Plugin for PyTorch DDP.
132132
133-
Example:
134-
135133
```python
136134
from colossalai.booster import Booster
137135
from colossalai.booster.plugin import TorchDDPPlugin

colossalai/booster/plugin/torch_fsdp_plugin.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -143,8 +143,6 @@ class TorchFSDPPlugin(DPPluginBase):
143143
"""
144144
Plugin for PyTorch FSDP.
145145
146-
Example:
147-
148146
```python
149147
from colossalai.booster import Booster
150148
from colossalai.booster.plugin import TorchFSDPPlugin

colossalai/cluster/dist_coordinator.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@ class in the whole program.
2020
- master: the process with rank 0
2121
- node master: the process with local rank 0 on the current node
2222
23-
Example:
2423
2524
```python
2625
from colossalai.cluster.dist_coordinator import DistCoordinator
@@ -134,7 +133,6 @@ def priority_execution(self, executor_rank: int = 0, process_group: ProcessGroup
134133
other processes in the same process group. This is often useful when downloading is required
135134
as we only want to download in one process to prevent file corruption.
136135
137-
Example:
138136
139137
```python
140138
from colossalai.cluster import DistCoordinator
@@ -180,8 +178,6 @@ def on_master_only(self, process_group: ProcessGroup = None):
180178
"""
181179
A function wrapper that only executes the wrapped function on the master process (rank 0).
182180
183-
Example:
184-
185181
```python
186182
from colossalai.cluster import DistCoordinator
187183
dist_coordinator = DistCoordinator()

0 commit comments

Comments
 (0)