105105from xarray .core .utils import (
106106 Default ,
107107 Frozen ,
108+ FrozenMappingWarningOnValuesAccess ,
108109 HybridMappingProxy ,
109110 OrderedSet ,
110111 _default ,
@@ -778,14 +779,15 @@ def dims(self) -> Frozen[Hashable, int]:
778779
779780 Note that type of this object differs from `DataArray.dims`.
780781 See `Dataset.sizes` and `DataArray.sizes` for consistently named
781- properties.
782+ properties. This property will be changed to return a type more consistent with
783+ `DataArray.dims` in the future, i.e. a set of dimension names.
782784
783785 See Also
784786 --------
785787 Dataset.sizes
786788 DataArray.dims
787789 """
788- return Frozen (self ._dims )
790+ return FrozenMappingWarningOnValuesAccess (self ._dims )
789791
790792 @property
791793 def sizes (self ) -> Frozen [Hashable , int ]:
@@ -800,7 +802,7 @@ def sizes(self) -> Frozen[Hashable, int]:
800802 --------
801803 DataArray.sizes
802804 """
803- return self .dims
805+ return Frozen ( self ._dims )
804806
805807 @property
806808 def dtypes (self ) -> Frozen [Hashable , np .dtype ]:
@@ -1411,7 +1413,7 @@ def _copy_listed(self, names: Iterable[Hashable]) -> Self:
14111413 variables [name ] = self ._variables [name ]
14121414 except KeyError :
14131415 ref_name , var_name , var = _get_virtual_variable (
1414- self ._variables , name , self .dims
1416+ self ._variables , name , self .sizes
14151417 )
14161418 variables [var_name ] = var
14171419 if ref_name in self ._coord_names or ref_name in self .dims :
@@ -1426,7 +1428,7 @@ def _copy_listed(self, names: Iterable[Hashable]) -> Self:
14261428 for v in variables .values ():
14271429 needed_dims .update (v .dims )
14281430
1429- dims = {k : self .dims [k ] for k in needed_dims }
1431+ dims = {k : self .sizes [k ] for k in needed_dims }
14301432
14311433 # preserves ordering of coordinates
14321434 for k in self ._variables :
@@ -1448,7 +1450,7 @@ def _construct_dataarray(self, name: Hashable) -> DataArray:
14481450 try :
14491451 variable = self ._variables [name ]
14501452 except KeyError :
1451- _ , name , variable = _get_virtual_variable (self ._variables , name , self .dims )
1453+ _ , name , variable = _get_virtual_variable (self ._variables , name , self .sizes )
14521454
14531455 needed_dims = set (variable .dims )
14541456
@@ -1475,7 +1477,7 @@ def _item_sources(self) -> Iterable[Mapping[Hashable, Any]]:
14751477 yield HybridMappingProxy (keys = self ._coord_names , mapping = self .coords )
14761478
14771479 # virtual coordinates
1478- yield HybridMappingProxy (keys = self .dims , mapping = self )
1480+ yield HybridMappingProxy (keys = self .sizes , mapping = self )
14791481
14801482 def __contains__ (self , key : object ) -> bool :
14811483 """The 'in' operator will return true or false depending on whether
@@ -2569,7 +2571,7 @@ def info(self, buf: IO | None = None) -> None:
25692571 lines = []
25702572 lines .append ("xarray.Dataset {" )
25712573 lines .append ("dimensions:" )
2572- for name , size in self .dims .items ():
2574+ for name , size in self .sizes .items ():
25732575 lines .append (f"\t { name } = { size } ;" )
25742576 lines .append ("\n variables:" )
25752577 for name , da in self .variables .items ():
@@ -2697,10 +2699,10 @@ def chunk(
26972699 else :
26982700 chunks_mapping = either_dict_or_kwargs (chunks , chunks_kwargs , "chunk" )
26992701
2700- bad_dims = chunks_mapping .keys () - self .dims .keys ()
2702+ bad_dims = chunks_mapping .keys () - self .sizes .keys ()
27012703 if bad_dims :
27022704 raise ValueError (
2703- f"chunks keys { tuple (bad_dims )} not found in data dimensions { tuple (self .dims )} "
2705+ f"chunks keys { tuple (bad_dims )} not found in data dimensions { tuple (self .sizes . keys () )} "
27042706 )
27052707
27062708 chunkmanager = guess_chunkmanager (chunked_array_type )
@@ -3952,7 +3954,7 @@ def maybe_variable(obj, k):
39523954 try :
39533955 return obj ._variables [k ]
39543956 except KeyError :
3955- return as_variable ((k , range (obj .dims [k ])))
3957+ return as_variable ((k , range (obj .sizes [k ])))
39563958
39573959 def _validate_interp_indexer (x , new_x ):
39583960 # In the case of datetimes, the restrictions placed on indexers
@@ -4176,7 +4178,7 @@ def _rename_vars(
41764178 return variables , coord_names
41774179
41784180 def _rename_dims (self , name_dict : Mapping [Any , Hashable ]) -> dict [Hashable , int ]:
4179- return {name_dict .get (k , k ): v for k , v in self .dims .items ()}
4181+ return {name_dict .get (k , k ): v for k , v in self .sizes .items ()}
41804182
41814183 def _rename_indexes (
41824184 self , name_dict : Mapping [Any , Hashable ], dims_dict : Mapping [Any , Hashable ]
@@ -5168,7 +5170,7 @@ def _get_stack_index(
51685170 if dim in self ._variables :
51695171 var = self ._variables [dim ]
51705172 else :
5171- _ , _ , var = _get_virtual_variable (self ._variables , dim , self .dims )
5173+ _ , _ , var = _get_virtual_variable (self ._variables , dim , self .sizes )
51725174 # dummy index (only `stack_coords` will be used to construct the multi-index)
51735175 stack_index = PandasIndex ([0 ], dim )
51745176 stack_coords = {dim : var }
@@ -5195,7 +5197,7 @@ def _stack_once(
51955197 if any (d in var .dims for d in dims ):
51965198 add_dims = [d for d in dims if d not in var .dims ]
51975199 vdims = list (var .dims ) + add_dims
5198- shape = [self .dims [d ] for d in vdims ]
5200+ shape = [self .sizes [d ] for d in vdims ]
51995201 exp_var = var .set_dims (vdims , shape )
52005202 stacked_var = exp_var .stack (** {new_dim : dims })
52015203 new_variables [name ] = stacked_var
@@ -6351,15 +6353,15 @@ def dropna(
63516353 if subset is None :
63526354 subset = iter (self .data_vars )
63536355
6354- count = np .zeros (self .dims [dim ], dtype = np .int64 )
6356+ count = np .zeros (self .sizes [dim ], dtype = np .int64 )
63556357 size = np .int_ (0 ) # for type checking
63566358
63576359 for k in subset :
63586360 array = self ._variables [k ]
63596361 if dim in array .dims :
63606362 dims = [d for d in array .dims if d != dim ]
63616363 count += np .asarray (array .count (dims ))
6362- size += math .prod ([self .dims [d ] for d in dims ])
6364+ size += math .prod ([self .sizes [d ] for d in dims ])
63636365
63646366 if thresh is not None :
63656367 mask = count >= thresh
@@ -7136,7 +7138,7 @@ def _normalize_dim_order(
71367138 f"Dataset: { list (self .dims )} "
71377139 )
71387140
7139- ordered_dims = {k : self .dims [k ] for k in dim_order }
7141+ ordered_dims = {k : self .sizes [k ] for k in dim_order }
71407142
71417143 return ordered_dims
71427144
@@ -7396,7 +7398,7 @@ def to_dask_dataframe(
73967398 var = self .variables [name ]
73977399 except KeyError :
73987400 # dimension without a matching coordinate
7399- size = self .dims [name ]
7401+ size = self .sizes [name ]
74007402 data = da .arange (size , chunks = size , dtype = np .int64 )
74017403 var = Variable ((name ,), data )
74027404
@@ -7469,7 +7471,7 @@ def to_dict(
74697471 d : dict = {
74707472 "coords" : {},
74717473 "attrs" : decode_numpy_dict_values (self .attrs ),
7472- "dims" : dict (self .dims ),
7474+ "dims" : dict (self .sizes ),
74737475 "data_vars" : {},
74747476 }
74757477 for k in self .coords :
0 commit comments