Skip to content

Commit df7bdf6

Browse files
remove cache layer (#1019)
* remove cache layer * Update src/titiler/xarray/README.md Co-authored-by: Aimee Barciauskas <[email protected]> * add tile example --------- Co-authored-by: Aimee Barciauskas <[email protected]>
1 parent f50b400 commit df7bdf6

File tree

11 files changed

+355
-133
lines changed

11 files changed

+355
-133
lines changed

docs/mkdocs.yml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,11 @@ nav:
109109
- errors: api/titiler/mosaic/errors.md
110110
- models:
111111
- responses: api/titiler/mosaic/models/responses.md
112+
- titiler.xarray:
113+
- io: api/titiler/xarray/io.md
114+
- dependencies: api/titiler/xarray/dependencies.md
115+
- extensions: api/titiler/xarray/extensions.md
116+
- factory: api/titiler/xarray/factory.md
112117

113118
- Deployment:
114119
- Amazon Web Services:

docs/src/advanced/endpoints_factories.md

Lines changed: 125 additions & 59 deletions
Large diffs are not rendered by default.
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
::: titiler.xarray.dependencies
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
::: titiler.xarray.extensions
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
::: titiler.xarray.factory

docs/src/api/titiler/xarray/io.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
::: titiler.xarray.io

src/titiler/xarray/README.md

Lines changed: 51 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -8,29 +8,43 @@ Adds support for Xarray Dataset (NetCDF/Zarr) in Titiler.
88
$ python -m pip install -U pip
99

1010
# From Pypi
11-
$ python -m pip install titiler.xarray
11+
$ python -m pip install "titiler.xarray[all]"
1212

1313
# Or from sources
1414
$ git clone https://github.com/developmentseed/titiler.git
15-
$ cd titiler && python -m pip install -e src/titiler/core -e src/titiler/xarray
15+
$ cd titiler && python -m pip install -e src/titiler/core -e "src/titiler/xarray[all]"
1616
```
1717

1818
## How To
1919

2020
```python
2121
from fastapi import FastAPI
22+
23+
from titiler.xarray.extensions import VariablesExtension
2224
from titiler.xarray.factory import TilerFactory
2325

24-
# Create a FastAPI application
2526
app = FastAPI(
26-
description="A lightweight Cloud Optimized GeoTIFF tile server",
27-
)
27+
openapi_url="/api",
28+
docs_url="/api.html",
29+
description="""Xarray based tiles server for MultiDimensional dataset (Zarr/NetCDF).
30+
31+
---
32+
33+
**Documentation**: <a href="https://developmentseed.org/titiler/" target="_blank">https://developmentseed.org/titiler/</a>
2834
29-
# Create a set of MosaicJSON endpoints
30-
endpoint = TilerFactory()
35+
**Source Code**: <a href="https://github.com/developmentseed/titiler" target="_blank">https://github.com/developmentseed/titiler</a>
3136
32-
# Register the Mosaic endpoints to the application
33-
app.include_router(endpoint.router)
37+
---
38+
""",
39+
)
40+
41+
md = TilerFactory(
42+
router_prefix="/md",
43+
extensions=[
44+
VariablesExtension(),
45+
],
46+
)
47+
app.include_router(md.router, prefix="/md", tags=["Multi Dimensional"])
3448
```
3549

3650
## Package structure
@@ -41,6 +55,34 @@ titiler/
4155
├── tests/ - Tests suite
4256
└── titiler/xarray/ - `xarray` namespace package
4357
├── dependencies.py - titiler-xarray dependencies
58+
├── extentions.py - titiler-xarray extensions
4459
├── io.py - titiler-xarray Readers
4560
└── factory.py - endpoints factory
4661
```
62+
63+
## Custom Dataset Opener
64+
65+
A default Dataset IO is provided within `titiler.xarray.Reader` class but will require optional dependencies (fsspec, zarr, h5netcdf, ...) to be installed with `python -m pip install "titiler.xarray[all]"`.
66+
Dependencies are optional so the entire package size can be optimized to only include dependencies required by a given application.
67+
68+
```python
69+
from titiler.xarray.io import Reader
70+
71+
import xarray
72+
import h5netcdf # noqa
73+
74+
with Reader(
75+
"tests/fixtures/dataset_2d.nc",
76+
"dataset",
77+
opener=xarray.open_dataset,
78+
) as src:
79+
print(src.ds)
80+
81+
>>> <xarray.Dataset> Size: 16MB
82+
Dimensions: (x: 2000, y: 1000)
83+
Coordinates:
84+
* x (x) float64 16kB -170.0 -169.8 -169.7 -169.5 ... 169.5 169.7 169.8
85+
* y (y) float64 8kB -80.0 -79.84 -79.68 -79.52 ... 79.52 79.68 79.84
86+
Data variables:
87+
dataset (y, x) float64 16MB ...
88+
```

src/titiler/xarray/notebooks/xarray_dataset_cache.ipynb

Lines changed: 168 additions & 0 deletions
Large diffs are not rendered by default.

src/titiler/xarray/pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ classifiers = [
3030
dynamic = ["version"]
3131
dependencies = [
3232
"titiler.core==0.19.0.dev",
33+
"rio-tiler>=7.2,<8.0",
3334
"xarray",
3435
"rioxarray",
3536
]

src/titiler/xarray/titiler/xarray/io.py

Lines changed: 1 addition & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
"""titiler.xarray.io"""
22

3-
import pickle
4-
from typing import Any, Callable, Dict, List, Optional, Protocol
3+
from typing import Any, Callable, Dict, List, Optional
54
from urllib.parse import urlparse
65

76
import attr
@@ -12,36 +11,16 @@
1211
from rio_tiler.io.xarray import XarrayReader
1312

1413

15-
class CacheClient(Protocol):
16-
"""CacheClient Protocol."""
17-
18-
def get(self, key: str) -> bytes:
19-
"""Get key."""
20-
...
21-
22-
def set(self, key: str, body: bytes) -> None:
23-
"""Set key."""
24-
...
25-
26-
2714
def xarray_open_dataset( # noqa: C901
2815
src_path: str,
2916
group: Optional[Any] = None,
3017
decode_times: Optional[bool] = True,
31-
cache_client: Optional[CacheClient] = None,
3218
) -> xarray.Dataset:
3319
"""Open dataset."""
3420
import aiohttp # noqa
3521
import fsspec # noqa
3622
import s3fs # noqa
3723

38-
# Generate cache key and attempt to fetch the dataset from cache
39-
if cache_client:
40-
cache_key = f"{src_path}_{group}" if group is not None else src_path
41-
data_bytes = cache_client.get(cache_key)
42-
if data_bytes:
43-
return pickle.loads(data_bytes)
44-
4524
parsed = urlparse(src_path)
4625
protocol = parsed.scheme or "file"
4726
if protocol not in ["s3", "https", "http", "file"]:
@@ -102,11 +81,6 @@ def xarray_open_dataset( # noqa: C901
10281

10382
ds = xarray.open_zarr(file_handler, **xr_open_args)
10483

105-
if cache_client:
106-
# Serialize the dataset to bytes using pickle
107-
data_bytes = pickle.dumps(ds)
108-
cache_client.set(cache_key, data_bytes)
109-
11084
return ds
11185

11286

@@ -219,7 +193,6 @@ class Reader(XarrayReader):
219193

220194
group: Optional[Any] = attr.ib(default=None)
221195
decode_times: bool = attr.ib(default=False)
222-
cache_client: Optional[CacheClient] = attr.ib(default=None)
223196

224197
# xarray.DataArray options
225198
datetime: Optional[str] = attr.ib(default=None)
@@ -238,7 +211,6 @@ def __attrs_post_init__(self):
238211
self.src_path,
239212
group=self.group,
240213
decode_times=self.decode_times,
241-
cache_client=self.cache_client,
242214
)
243215

244216
self.input = get_variable(

0 commit comments

Comments
 (0)