6565
6666
6767class _Connector :
68- """The Connector parses several Lite arguments and instantiates the Strategy including its owned components.
68+ """The Connector parses several Fabric arguments and instantiates the Strategy including its owned components.
6969
7070 A. accelerator flag could be:
7171 1. accelerator class
@@ -297,7 +297,7 @@ def _check_device_config_and_set_final_flags(
297297 else self ._accelerator_flag
298298 )
299299 raise ValueError (
300- f"`Lite (devices={ self ._devices_flag !r} )` value is not a valid input"
300+ f"`Fabric (devices={ self ._devices_flag !r} )` value is not a valid input"
301301 f" using { accelerator_name } accelerator."
302302 )
303303
@@ -345,7 +345,7 @@ def _set_parallel_devices_and_init_accelerator(self) -> None:
345345 f"`{ accelerator_cls .__qualname__ } ` can not run on your system"
346346 " since the accelerator is not available. The following accelerator(s)"
347347 " is available and can be passed into `accelerator` argument of"
348- f" `Lite `: { available_accelerator } ."
348+ f" `Fabric `: { available_accelerator } ."
349349 )
350350
351351 self ._set_devices_flag_if_auto_passed ()
@@ -416,14 +416,14 @@ def _check_strategy_and_fallback(self) -> None:
416416 strategy_flag = "ddp"
417417 if strategy_flag in _DDP_FORK_ALIASES and "fork" not in torch .multiprocessing .get_all_start_methods ():
418418 raise ValueError (
419- f"You selected `Lite (strategy='{ strategy_flag } ')` but process forking is not supported on this"
420- f" platform. We recommed `Lite (strategy='ddp_spawn')` instead."
419+ f"You selected `Fabric (strategy='{ strategy_flag } ')` but process forking is not supported on this"
420+ f" platform. We recommed `Fabric (strategy='ddp_spawn')` instead."
421421 )
422422 if (
423423 strategy_flag in _FSDP_ALIASES or isinstance (self ._strategy_flag , FSDPStrategy )
424424 ) and self ._accelerator_flag not in ("cuda" , "gpu" ):
425425 raise ValueError (
426- "You selected the FSDP strategy but FSDP is only available on GPU. Set `Lite (accelerator='gpu', ...)`"
426+ "You selected the FSDP strategy but FSDP is only available on GPU. Set `Fabric (accelerator='gpu', ...)`"
427427 " to continue or select a different strategy."
428428 )
429429 if strategy_flag :
@@ -449,7 +449,7 @@ def _check_and_init_precision(self) -> Precision:
449449 elif self ._precision_input in (16 , "bf16" ):
450450 if self ._precision_input == 16 :
451451 rank_zero_warn (
452- "You passed `Lite (accelerator='tpu', precision=16)` but AMP"
452+ "You passed `Fabric (accelerator='tpu', precision=16)` but AMP"
453453 " is not supported with TPUs. Using `precision='bf16'` instead."
454454 )
455455 return TPUBf16Precision ()
@@ -463,7 +463,7 @@ def _check_and_init_precision(self) -> Precision:
463463
464464 if self ._precision_input == 16 and self ._accelerator_flag == "cpu" :
465465 rank_zero_warn (
466- "You passed `Lite (accelerator='cpu', precision=16)` but native AMP is not supported on CPU."
466+ "You passed `Fabric (accelerator='cpu', precision=16)` but native AMP is not supported on CPU."
467467 " Using `precision='bf16'` instead."
468468 )
469469 self ._precision_input = "bf16"
@@ -487,7 +487,7 @@ def _validate_precision_choice(self) -> None:
487487 if isinstance (self .accelerator , TPUAccelerator ):
488488 if self ._precision_input == 64 :
489489 raise NotImplementedError (
490- "`Lite (accelerator='tpu', precision=64)` is not implemented."
490+ "`Fabric (accelerator='tpu', precision=64)` is not implemented."
491491 " Please, open an issue in `https://github.com/Lightning-AI/lightning/issues`"
492492 " requesting this feature."
493493 )
@@ -519,10 +519,10 @@ def _lazy_init_strategy(self) -> None:
519519
520520 if _IS_INTERACTIVE and self .strategy .launcher and not self .strategy .launcher .is_interactive_compatible :
521521 raise RuntimeError (
522- f"`Lite (strategy={ self ._strategy_flag !r} )` is not compatible with an interactive"
522+ f"`Fabric (strategy={ self ._strategy_flag !r} )` is not compatible with an interactive"
523523 " environment. Run your code as a script, or choose one of the compatible strategies:"
524- f" Lite (strategy=None|{ '|' .join (_StrategyType .interactive_compatible_types ())} )."
525- " In case you are spawning processes yourself, make sure to include the Lite "
524+ f" Fabric (strategy=None|{ '|' .join (_StrategyType .interactive_compatible_types ())} )."
525+ " In case you are spawning processes yourself, make sure to include the Fabric "
526526 " creation inside the worker function."
527527 )
528528
@@ -549,9 +549,9 @@ def _argument_from_env(name: str, current: Any, default: Any) -> Any:
549549
550550 if env_value is not None and env_value != current and current != default :
551551 raise ValueError (
552- f"Your code has `LightningLite ({ name } ={ current !r} , ...)` but it conflicts with the value "
552+ f"Your code has `Fabric ({ name } ={ current !r} , ...)` but it conflicts with the value "
553553 f"`--{ name } ={ current } ` set through the CLI. "
554- " Remove it either from the CLI or from the Lightning Lite object."
554+ " Remove it either from the CLI or from the Lightning Fabric object."
555555 )
556556 if env_value is None :
557557 return current
0 commit comments