|
11 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
12 | 12 | # See the License for the specific language governing permissions and |
13 | 13 | # limitations under the License. |
14 | | -from contextlib import contextmanager |
15 | | -from typing import Any, Callable, Generator, Mapping, Optional, Set, Type, Union |
| 14 | +from typing import Mapping, Optional, Union |
16 | 15 |
|
17 | 16 | from lightning_utilities.core.imports import module_available |
18 | 17 | from torch import Tensor |
19 | 18 | from torch.nn import Module, Parameter |
20 | 19 |
|
21 | | -from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation |
22 | | - |
23 | | - |
24 | | -def is_meta_init() -> bool: |
25 | | - rank_zero_deprecation( |
26 | | - "`pytorch_lightning.utilities.meta.is_meta_init` is deprecated in v1.8 and will be removed in v1.9." |
27 | | - " The function has become a no-op." |
28 | | - " Please check out the `torchdistx` project instead: https://github.com/pytorch/torchdistx" |
29 | | - ) |
30 | | - return False |
31 | | - |
32 | | - |
33 | | -def init_meta(module_fn: Callable[..., Module], *args: Any, **kwargs: Any) -> None: |
34 | | - rank_zero_deprecation( |
35 | | - "`pytorch_lightning.utilities.meta.init_meta` is deprecated in v1.8 and will be removed in v1.9." |
36 | | - " The function has become a no-op." |
37 | | - " Please check out the `torchdistx` project instead: https://github.com/pytorch/torchdistx" |
38 | | - ) |
39 | | - |
40 | | - |
41 | | -def get_all_subclasses(cls: Type) -> Set[Type]: |
42 | | - rank_zero_deprecation( |
43 | | - "`pytorch_lightning.utilities.meta.get_all_subclasses` is deprecated in v1.8 and will be removed in v1.9." |
44 | | - " Please copy its implementation if you have a use for it." |
45 | | - ) |
46 | | - from lightning_utilities.core.inheritance import get_all_subclasses as new_get_all_subclasses |
47 | | - |
48 | | - return new_get_all_subclasses(cls) |
49 | | - |
50 | | - |
51 | | -def recursively_setattr(root_module: Any, prefix: str, materialized_module: Module) -> None: |
52 | | - rank_zero_deprecation( |
53 | | - "`pytorch_lightning.utilities.meta.recursively_setattr` is deprecated in v1.8 and will be removed in v1.9." |
54 | | - " Please copy its implementation if you have a use for it." |
55 | | - ) |
56 | | - *path, name = prefix.split(".") |
57 | | - for p in path: |
58 | | - root_module = getattr(root_module, p) |
59 | | - |
60 | | - try: |
61 | | - index = int(name) |
62 | | - root_module[index] = materialized_module |
63 | | - except ValueError: |
64 | | - setattr(root_module, name, materialized_module) |
65 | | - |
66 | | - |
67 | | -def materialize_module(root_module: Module) -> None: |
68 | | - rank_zero_deprecation( |
69 | | - "`pytorch_lightning.utilities.meta.materialize_module` is deprecated in v1.8 and will be removed in v1.9." |
70 | | - " The function has become a no-op." |
71 | | - " Please check out the `torchdistx` project instead: https://github.com/pytorch/torchdistx" |
72 | | - ) |
73 | | - |
74 | | - |
75 | | -@contextmanager |
76 | | -def init_meta_context() -> Generator: |
77 | | - rank_zero_deprecation( |
78 | | - "`pytorch_lightning.utilities.meta.init_meta_context` is deprecated in v1.8 and will be removed in v1.9." |
79 | | - " The function has become a no-op." |
80 | | - " Please check out the `torchdistx` project instead: https://github.com/pytorch/torchdistx" |
81 | | - ) |
82 | | - yield |
83 | | - |
84 | | - |
85 | | -def is_on_meta_device(module: Module) -> bool: |
86 | | - rank_zero_deprecation( |
87 | | - "`pytorch_lightning.utilities.meta.is_on_meta_device` is deprecated in v1.8 and will be removed in v1.9." |
88 | | - " Please copy its implementation if you have a use for it." |
89 | | - ) |
90 | | - try: |
91 | | - param = next(module.parameters()) |
92 | | - return param.is_meta |
93 | | - except StopIteration: |
94 | | - return False |
95 | | - |
96 | 20 |
|
97 | 21 | def _is_deferred(module: Optional[Module]) -> bool: |
98 | 22 | if module is None or not module_available("torchdistx.fake"): |
|
0 commit comments