diff --git a/nbodykit/algorithms/convpower/fkp.py b/nbodykit/algorithms/convpower/fkp.py index 1b642a984..4746d2613 100644 --- a/nbodykit/algorithms/convpower/fkp.py +++ b/nbodykit/algorithms/convpower/fkp.py @@ -121,6 +121,8 @@ class ConvolvedFFTPower(object): dk : float, optional the spacing in wavenumber to use; if not provided; the fundamental mode of the box is used + logkBins : bool, optional; default is False + use logarithmic binning References ---------- @@ -138,7 +140,8 @@ def __init__(self, first, poles, kmax=None, dk=None, use_fkp_weights=None, - P0_FKP=None): + P0_FKP=None, + logkBins=False): if use_fkp_weights is not None or P0_FKP is not None: raise ValueError("use_fkp_weights and P0_FKP are deprecated. Assign a FKPWeight column to source['randoms']['FKPWeight'] and source['data']['FKPWeight'] with the help of the FKPWeightFromNbar(nbar) function") @@ -193,6 +196,7 @@ def __init__(self, first, poles, self.attrs['dk'] = dk self.attrs['kmin'] = kmin self.attrs['kmax'] = kmax + self.attrs['logkBins'] = logkBins # store BoxSize and BoxCenter from source self.attrs['Nmesh'] = self.first.attrs['Nmesh'].copy() @@ -252,17 +256,28 @@ def run(self): """ pm = self.first.pm + if (self.attrs['dk'] is None or self.attrs['kmin'] == 0.0) and self.attrs["logkBins"]: + msg = ("Use of logkBins requires dk to be specified explicitly and kmin>=0.0") + raise ValueError(msg) + # setup the binning in k out to the minimum nyquist frequency dk = 2*numpy.pi/pm.BoxSize.min() if self.attrs['dk'] is None else self.attrs['dk'] kmin = self.attrs['kmin'] kmax = self.attrs['kmax'] if kmax is None: - kmax = numpy.pi*pm.Nmesh.min()/pm.BoxSize.max() + dk/2 + if self.attrs["logkBins"]: + kmax = 10.0 ** (numpy.log10(numpy.pi*pm.Nmesh.min()/pm.BoxSize.max()) + dk/2) + else: + kmax = numpy.pi*pm.Nmesh.min()/pm.BoxSize.max() + dk/2 if dk > 0: - kedges = numpy.arange(kmin, kmax, dk) - kcoords = None + if not self.attrs["logkBins"]: + kedges = numpy.arange(kmin, kmax, dk) + kcoords = None + else: + kedges = 10.**numpy.arange(numpy.log10(kmin), numpy.log10(kmax), dk) + kcoords = None else: k = pm.create_coords('complex') kedges, kcoords = _find_unique_edges(k, 2 * numpy.pi / pm.BoxSize, kmax, pm.comm) diff --git a/nbodykit/binned_statistic.py b/nbodykit/binned_statistic.py index b8cce2e50..59c2e65a9 100644 --- a/nbodykit/binned_statistic.py +++ b/nbodykit/binned_statistic.py @@ -142,6 +142,11 @@ class BinnedStatistic(object): """ def __init__(self, dims, edges, data, fields_to_sum=[], coords=None, **kwargs): + # save and track metadata + self.attrs = {} + for k in kwargs: + self.attrs[k] = kwargs[k] + # number of dimensions must match if len(dims) != len(edges): raise ValueError("size mismatch between specified `dims` and `edges`") @@ -163,6 +168,8 @@ def __init__(self, dims, edges, data, fields_to_sum=[], coords=None, **kwargs): for i, dim in enumerate(self.dims): if coords is not None and coords[i] is not None: self.coords[dim] = numpy.copy(coords[i]) + elif self.attrs["logkBins"] and dim == 'k': + self.coords[dim] = 10. ** (0.5 * (numpy.log10(edges[i][1:]) + numpy.log10(edges[i][:-1]))) else: self.coords[dim] = 0.5 * (edges[i][1:] + edges[i][:-1]) @@ -178,9 +185,6 @@ def __init__(self, dims, edges, data, fields_to_sum=[], coords=None, **kwargs): # fields that we wish to sum, instead of averaging self._fields_to_sum = fields_to_sum - # save and track metadata - self.attrs = {} - for k in kwargs: self.attrs[k] = kwargs[k] @classmethod def from_state(kls, state): @@ -276,7 +280,10 @@ def __slice_edges__(self, indices): else: idx = [0] edges[dim] = self.edges[dim][idx] - coords[dim] = 0.5 * (edges[dim][1:] + edges[dim][:-1]) + if self.attrs["logkBins"] and dim == "k": + coords[dim] = 0.5 * (edges[dim][1:] + edges[dim][:-1]) + else: + coords[dim] = 10.**( 0.5 * (numpy.log10(edges[dim][1:]) + numpy.log10(edges[dim][:-1])) ) return edges, coords @@ -815,7 +822,10 @@ def average(self, dim, **kwargs): A new BinnedStatistic, with data averaged along one dimension, which reduces the number of dimension by one """ - spacing = (self.edges[dim][-1] - self.edges[dim][0]) + if self.attrs["logkBins"] and dim == "k": + spacing = (numpy.log10(self.edges[dim][-1]) - numpy.log10(self.edges[dim][0])) + else: + spacing = (self.edges[dim][-1] - self.edges[dim][0]) toret = self.reindex(dim, spacing, **kwargs) return toret.sel(**{dim:toret.coords[dim][0]}) @@ -876,7 +886,10 @@ def reindex(self, fields_to_sum += self._fields_to_sum # determine the new binning - old_spacings = numpy.diff(self.coords[dim]) + if self.attrs["logkBins"] and dim == "k" : + old_spacings = numpy.diff(numpy.log10(self.coords[dim])) + else: + old_spacings = numpy.diff(self.coords[dim]) if not numpy.array_equal(old_spacings, old_spacings): raise ValueError("`reindex` requires even bin spacings") old_spacing = old_spacings[0] @@ -917,7 +930,10 @@ def reindex(self, # new edges new_shape[i] /= factor new_shape[i] = int(new_shape[i]) - new_edges = numpy.linspace(edges[0], edges[-1], new_shape[i]+1) + if self.attrs["logkBins"] and dim=="k" : + new_edges = 10. ** numpy.linspace(numpy.log10(edges[0]), numpy.log10(edges[-1]), new_shape[i]+1) + else: + new_edges = numpy.linspace(edges[0], edges[-1], new_shape[i]+1) # the re-binned data new_data = numpy.empty(new_shape, dtype=self.data.dtype) @@ -937,7 +953,10 @@ def reindex(self, # construct new object kw = self.__copy_attrs__() kw['edges'][dim] = new_edges - kw['coords'][dim] = 0.5*(new_edges[1:] + new_edges[:-1]) + if self.attrs["logkBins"] and dim == 'k': + kw['coords'][dim] = 10. ** (0.5*(numpy.log10(new_edges[1:]) + numpy.log10(new_edges[:-1]))) + else: + kw['coords'][dim] = 0.5*(new_edges[1:] + new_edges[:-1]) toret = self.__construct_direct__(new_data, new_mask, **kw) return (toret, spacing) if return_spacing else toret