Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(datasets): make datasets arguments keywords only #358

Merged
merged 46 commits into from
Nov 27, 2023
Merged
Show file tree
Hide file tree
Changes from 43 commits
Commits
Show all changes
46 commits
Select commit Hold shift + click to select a range
55ea01c
feat(datasets): make `APIDataset.__init__` keyword only
felixscherz Sep 26, 2023
64ef19d
feat(datasets): make `BioSequenceDataset.__init__` keyword only
felixscherz Sep 26, 2023
6cc07d6
feat(datasets): make `ParquetDataset.__init__` keyword only
felixscherz Sep 26, 2023
b2dcb3e
feat(datasets): make `EmailMessageDataset.__init__` keyword only
felixscherz Sep 26, 2023
252afe4
feat(datasets): make `GeoJSONDataset.__init__` keyword only
felixscherz Sep 26, 2023
20e9f37
feat(datasets): make `HoloviewsWriter.__init__` keyword only
felixscherz Sep 26, 2023
8d6e21d
feat(datasets): make `JSONDataset.__init__` keyword only
felixscherz Sep 26, 2023
60e95fd
feat(datasets): make `MatplotlibWriter.__init__` keyword only
felixscherz Sep 26, 2023
2aeeb5b
feat(datasets): make `GMLDataset.__init__` keyword only
felixscherz Sep 26, 2023
df23134
feat(datasets): make `GraphMLDataset.__init__` keyword only
felixscherz Sep 26, 2023
5ae5f76
feat(datasets): make NetworkX `JSONDataset.__init__` keyword only
felixscherz Sep 26, 2023
0ce8ec1
feat(datasets): make `PickleDataset.__init__` keyword only
felixscherz Sep 26, 2023
e17ed76
feat(datasets): make `ImageDataset.__init__` keyword only
felixscherz Sep 26, 2023
a43cf63
feat(datasets): make plotly `JSONDataset.__init__` keyword only
felixscherz Sep 26, 2023
b61b412
feat(datasets): make `PlotlyDataset.__init__` keyword only
felixscherz Sep 26, 2023
58c419d
feat(datasets): make polars `CSVDataset.__init__` keyword only
felixscherz Sep 26, 2023
1fe877d
feat(datasets): make polars `GenericDataset.__init__` keyword only
felixscherz Sep 26, 2023
cef8b48
feat(datasets): make redis `PickleDataset.__init__` keyword only
felixscherz Sep 26, 2023
15410f0
feat(datasets): make `SnowparkTableDataset.__init__` keyword only
felixscherz Sep 26, 2023
7ee42dd
feat(datasets): make `SVMLightDataset.__init__` keyword only
felixscherz Sep 26, 2023
182ea26
feat(datasets): make `TensorFlowModelDataset.__init__` keyword only
felixscherz Sep 26, 2023
2f965a6
feat(datasets): make `TextDataset.__init__` keyword only
felixscherz Sep 26, 2023
2baa750
feat(datasets): make `YAMLDataset.__init__` keyword only
felixscherz Sep 26, 2023
4b2314a
feat(datasets): make `ManagedTableDataset.__init__` keyword only
felixscherz Sep 28, 2023
bdde0b1
feat(datasets): make `VideoDataset.__init__` keyword only
felixscherz Sep 28, 2023
37f780a
feat(datasets): make `CSVDataset.__init__` keyword only
felixscherz Sep 28, 2023
bd512c2
feat(datasets): make `DeltaTableDataset.__init__` keyword only
felixscherz Sep 28, 2023
a5c6ac3
feat(datasets): make `ExcelDataset.__init__` keyword only
felixscherz Sep 28, 2023
a506fd7
feat(datasets): make `FeatherDataset.__init__` keyword only
felixscherz Sep 28, 2023
7f68518
feat(datasets): make `GBQTableDataset.__init__` keyword only
felixscherz Sep 28, 2023
5d57b61
feat(datasets): make `GenericDataset.__init__` keyword only
felixscherz Sep 28, 2023
dc6abc3
feat(datasets): make pandas `JSONDataset.__init__` keyword only
felixscherz Sep 28, 2023
ff1cced
feat(datasets): make pandas `ParquerDataset.__init__` keyword only
felixscherz Sep 28, 2023
cb74ae9
feat(datasets): make `SQLTableDataset.__init__` keyword only
felixscherz Sep 28, 2023
5f1f95b
feat(datasets): make `XMLDataset.__init__` keyword only
felixscherz Sep 28, 2023
efd7dd0
feat(datasets): make `HDFDataset.__init__` keyword only
felixscherz Sep 28, 2023
eda9ae3
feat(datasets): make `DeltaTableDataset.__init__` keyword only
felixscherz Sep 28, 2023
b664d88
feat(datasets): make `SparkDataset.__init__` keyword only
felixscherz Sep 28, 2023
b51adc6
feat(datasets): make `SparkHiveDataset.__init__` keyword only
felixscherz Sep 28, 2023
963c5c6
feat(datasets): make `SparkJDBCDataset.__init__` keyword only
felixscherz Sep 28, 2023
16ac583
feat(datasets): make `SparkStreamingDataset.__init__` keyword only
felixscherz Sep 28, 2023
db63c0c
feat(datasets): make `IncrementalDataset.__init__` keyword only
felixscherz Oct 13, 2023
962ab1b
feat(datasets): make `LazyPolarsDataset.__init__` keyword only
felixscherz Oct 25, 2023
f8bdaa3
docs(datasets): update doctests for HoloviewsWriter
felixscherz Nov 12, 2023
c9e08e3
Merge branch 'main' into main
merelcht Nov 27, 2023
ea44289
Update release notes
merelcht Nov 27, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/api/api_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ class APIDataset(AbstractDataset[None, requests.Response]):

def __init__( # noqa: PLR0913
self,
*,
url: str,
method: str = "GET",
load_args: Dict[str, Any] = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ class BioSequenceDataset(AbstractDataset[List, List]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
load_args: Dict[str, Any] = None,
save_args: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/dask/parquet_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ class ParquetDataset(AbstractDataset[dd.DataFrame, dd.DataFrame]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
load_args: Dict[str, Any] = None,
save_args: Dict[str, Any] = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -199,14 +199,14 @@ class ManagedTableDataset(AbstractVersionedDataset):

def __init__( # noqa: PLR0913
self,
*,
table: str,
catalog: str = None,
database: str = "default",
write_mode: Union[str, None] = None,
dataframe_type: str = "spark",
primary_key: Optional[Union[str, List[str]]] = None,
version: Version = None,
*,
# the following parameters are used by project hooks
# to create or update table properties
schema: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/email/message_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ class EmailMessageDataset(AbstractVersionedDataset[Message, Message]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
load_args: Dict[str, Any] = None,
save_args: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/geopandas/geojson_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ class GeoJSONDataset(

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
load_args: Dict[str, Any] = None,
save_args: Dict[str, Any] = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ class HoloviewsWriter(AbstractVersionedDataset[HoloViews, NoReturn]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
fs_args: Dict[str, Any] = None,
credentials: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/json/json_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ class JSONDataset(AbstractVersionedDataset[Any, Any]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
save_args: Dict[str, Any] = None,
version: Version = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ class MatplotlibWriter(

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
fs_args: Dict[str, Any] = None,
credentials: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/networkx/gml_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ class GMLDataset(AbstractVersionedDataset[networkx.Graph, networkx.Graph]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
load_args: Dict[str, Any] = None,
save_args: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/networkx/graphml_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ class GraphMLDataset(AbstractVersionedDataset[networkx.Graph, networkx.Graph]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
load_args: Dict[str, Any] = None,
save_args: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/networkx/json_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ class JSONDataset(AbstractVersionedDataset[networkx.Graph, networkx.Graph]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
load_args: Dict[str, Any] = None,
save_args: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/pandas/csv_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ class CSVDataset(AbstractVersionedDataset[pd.DataFrame, pd.DataFrame]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
load_args: Dict[str, Any] = None,
save_args: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/pandas/deltatable_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ class DeltaTableDataset(AbstractDataset):

def __init__( # noqa: PLR0913
self,
*,
filepath: Optional[str] = None,
catalog_type: Optional[DataCatalog] = None,
catalog_name: Optional[str] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/pandas/excel_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ class ExcelDataset(

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
engine: str = "openpyxl",
load_args: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/pandas/feather_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ class FeatherDataset(AbstractVersionedDataset[pd.DataFrame, pd.DataFrame]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
load_args: Dict[str, Any] = None,
save_args: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/pandas/gbq_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ class GBQTableDataset(AbstractDataset[None, pd.DataFrame]):

def __init__( # noqa: PLR0913
self,
*,
dataset: str,
table_name: str,
project: str = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/pandas/generic_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ class GenericDataset(AbstractVersionedDataset[pd.DataFrame, pd.DataFrame]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
file_format: str,
load_args: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/pandas/hdf_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ class HDFDataset(AbstractVersionedDataset[pd.DataFrame, pd.DataFrame]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
key: str,
load_args: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/pandas/json_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ class JSONDataset(AbstractVersionedDataset[pd.DataFrame, pd.DataFrame]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
load_args: Dict[str, Any] = None,
save_args: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/pandas/parquet_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ class ParquetDataset(AbstractVersionedDataset[pd.DataFrame, pd.DataFrame]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
load_args: Dict[str, Any] = None,
save_args: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/pandas/sql_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,7 @@ class SQLTableDataset(AbstractDataset[pd.DataFrame, pd.DataFrame]):

def __init__( # noqa: PLR0913
self,
*,
table_name: str,
credentials: dict[str, Any],
load_args: dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/pandas/xml_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ class XMLDataset(AbstractVersionedDataset[pd.DataFrame, pd.DataFrame]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
load_args: Dict[str, Any] = None,
save_args: Dict[str, Any] = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ class IncrementalDataset(PartitionedDataset):

def __init__( # noqa: PLR0913
self,
*,
path: str,
dataset: str | type[AbstractDataset] | dict[str, Any],
checkpoint: str | dict[str, Any] | None = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/pickle/pickle_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ class PickleDataset(AbstractVersionedDataset[Any, Any]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
backend: str = "pickle",
load_args: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/pillow/image_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ class ImageDataset(AbstractVersionedDataset[Image.Image, Image.Image]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
save_args: Dict[str, Any] = None,
version: Version = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/plotly/json_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ class JSONDataset(

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
load_args: Dict[str, Any] = None,
save_args: Dict[str, Any] = None,
Expand Down
10 changes: 9 additions & 1 deletion kedro-datasets/kedro_datasets/plotly/plotly_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ class PlotlyDataset(JSONDataset):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
plotly_args: Dict[str, Any],
load_args: Dict[str, Any] = None,
Expand Down Expand Up @@ -115,7 +116,14 @@ def __init__( # noqa: PLR0913
metadata: Any arbitrary metadata.
This is ignored by Kedro, but may be consumed by users or external plugins.
"""
super().__init__(filepath, load_args, save_args, version, credentials, fs_args)
super().__init__(
filepath=filepath,
load_args=load_args,
save_args=save_args,
version=version,
credentials=credentials,
fs_args=fs_args,
)
self._plotly_args = plotly_args

_fs_args = deepcopy(fs_args) or {}
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/polars/csv_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ class CSVDataset(AbstractVersionedDataset[pl.DataFrame, pl.DataFrame]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
load_args: Dict[str, Any] = None,
save_args: Dict[str, Any] = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ class EagerPolarsDataset(AbstractVersionedDataset[pl.DataFrame, pl.DataFrame]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
file_format: str,
load_args: Dict[str, Any] = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ class LazyPolarsDataset(AbstractVersionedDataset[pl.LazyFrame, PolarsFrame]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
file_format: str,
load_args: Optional[Dict[str, Any]] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/redis/redis_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ class PickleDataset(AbstractDataset[Any, Any]):

def __init__( # noqa: PLR0913
self,
*,
key: str,
backend: str = "pickle",
load_args: Dict[str, Any] = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@ class SnowparkTableDataset(AbstractDataset):

def __init__( # noqa: PLR0913
self,
*,
table_name: str,
schema: str = None,
database: str = None,
Expand Down
2 changes: 1 addition & 1 deletion kedro-datasets/kedro_datasets/spark/deltatable_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ class DeltaTableDataset(AbstractDataset[None, DeltaTable]):
# using ``ThreadRunner`` instead
_SINGLE_PROCESS = True

def __init__(self, filepath: str, metadata: Dict[str, Any] = None) -> None:
def __init__(self, *, filepath: str, metadata: Dict[str, Any] = None) -> None:
"""Creates a new instance of ``DeltaTableDataset``.

Args:
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/spark/spark_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,6 +264,7 @@ class SparkDataset(AbstractVersionedDataset[DataFrame, DataFrame]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
file_format: str = "parquet",
load_args: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/spark/spark_hive_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ class SparkHiveDataset(AbstractDataset[DataFrame, DataFrame]):

def __init__( # noqa: PLR0913
self,
*,
database: str,
table: str,
write_mode: str = "errorifexists",
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/spark/spark_jdbc_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ class SparkJDBCDataset(AbstractDataset[DataFrame, DataFrame]):

def __init__( # noqa: PLR0913
self,
*,
url: str,
table: str,
credentials: Dict[str, Any] = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ class SparkStreamingDataset(AbstractDataset):

def __init__(
self,
*,
filepath: str = "",
file_format: str = "",
save_args: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/svmlight/svmlight_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ class SVMLightDataset(AbstractVersionedDataset[_DI, _DO]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
load_args: Dict[str, Any] = None,
save_args: Dict[str, Any] = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ class TensorFlowModelDataset(AbstractVersionedDataset[tf.keras.Model, tf.keras.M

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
load_args: Dict[str, Any] = None,
save_args: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/text/text_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ class TextDataset(AbstractVersionedDataset[str, str]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
version: Version = None,
credentials: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/video/video_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,6 +270,7 @@ class VideoDataset(AbstractDataset[AbstractVideo, AbstractVideo]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
fourcc: Optional[str] = "mp4v",
credentials: Dict[str, Any] = None,
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/kedro_datasets/yaml/yaml_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ class YAMLDataset(AbstractVersionedDataset[Dict, Dict]):

def __init__( # noqa: PLR0913
self,
*,
filepath: str,
save_args: Dict[str, Any] = None,
version: Version = None,
Expand Down
8 changes: 5 additions & 3 deletions kedro-datasets/tests/holoviews/test_holoviews_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,14 @@ def dummy_hv_object():

@pytest.fixture
def hv_writer(filepath_png, save_args, fs_args):
return HoloviewsWriter(filepath_png, save_args=save_args, fs_args=fs_args)
return HoloviewsWriter(filepath=filepath_png, save_args=save_args, fs_args=fs_args)


@pytest.fixture
def versioned_hv_writer(filepath_png, load_version, save_version):
return HoloviewsWriter(filepath_png, version=Version(load_version, save_version))
return HoloviewsWriter(
filepath=filepath_png, version=Version(load_version, save_version)
)


@pytest.mark.skipif(
Expand Down Expand Up @@ -63,7 +65,7 @@ def test_save_data(self, tmp_path, dummy_hv_object, hv_writer):
)
def test_open_extra_args(self, tmp_path, fs_args, mocker):
fs_mock = mocker.patch("fsspec.filesystem")
writer = HoloviewsWriter(str(tmp_path), fs_args)
writer = HoloviewsWriter(filepath=str(tmp_path), fs_args=fs_args)

fs_mock.assert_called_once_with("file", auto_mkdir=True, storage_option="value")
assert writer._fs_open_args_save == fs_args["open_args_save"]
Expand Down
Loading
Loading