Skip to content

Commit

Permalink
fix(DRAFT): Treat polars as exception, invalidate cache
Browse files Browse the repository at this point in the history
  • Loading branch information
dangotbanned committed Nov 10, 2024
1 parent ebc1bfa commit fe0ae88
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 5 deletions.
13 changes: 8 additions & 5 deletions altair/datasets/_readers.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,8 +99,8 @@ def scan_fn(self, source: StrPath, /) -> Callable[..., IntoFrameT]:
return self._scan_fn[suffix]

def _response_hook(self, f):
# HACK: pyarrow wants the file obj
return f.read()
# HACK: `pyarrow` + `pandas` wants the file obj
return f

def dataset(
self,
Expand Down Expand Up @@ -273,6 +273,9 @@ def __init__(self, name: _Polars, /) -> None:
}
self._scan_fn = {".parquet": pl.scan_parquet}

def _response_hook(self, f):
return f.read()


class _PolarsPyArrowReader(_Reader["pl.DataFrame", "pl.LazyFrame"]):
def __init__(self, name: Literal["polars[pyarrow]"], /) -> None:
Expand All @@ -289,6 +292,9 @@ def __init__(self, name: Literal["polars[pyarrow]"], /) -> None:
}
self._scan_fn = {".parquet": pl.scan_parquet}

def _response_hook(self, f):
return f.read()


class _PyArrowReader(_Reader["pa.Table", "pa.Table"]):
"""
Expand Down Expand Up @@ -333,9 +339,6 @@ def __init__(self, name: _PyArrow, /) -> None:
}
self._scan_fn = {".parquet": pa_read_parquet}

def _response_hook(self, f):
return f


def _filter_reduce(predicates: tuple[Any, ...], constraints: Metadata, /) -> nw.Expr:
"""
Expand Down
1 change: 1 addition & 0 deletions tests/test_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ def test_loader_url(backend: _Backend) -> None:
@backends
def test_loader_call(backend: _Backend) -> None:
data = Loader.with_backend(backend)
data.cache_dir = ""
frame = data("stocks", ".csv")
assert is_into_dataframe(frame)
nw_frame = nw.from_native(frame)
Expand Down

0 comments on commit fe0ae88

Please sign in to comment.