Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Replace checks for string "MemoryDataSet" in tests #1407

Merged
merged 11 commits into from
Jun 21, 2023
2 changes: 1 addition & 1 deletion package/kedro_viz/data_access/repositories/catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def get_dataset(self, dataset_name: str) -> Optional[AbstractDataSet]:
else: # pragma: no cover
dataset_obj = self._catalog._get_dataset(dataset_name)
except DataSetNotFoundError:
dataset_obj = MemoryDataSet()
dataset_obj = MemoryDataSet() # type: ignore[assignment]

return dataset_obj

Expand Down
5 changes: 3 additions & 2 deletions package/test_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
-r requirements.txt
kedro >=0.17.0
kedro >=0.17.0
kedro-datasets[pandas.ParquetDataSet, pandas.CSVDataSet, pandas.ExcelDataSet, plotly.JSONDataSet]~=1.0
kedro-telemetry>=0.1.1 # for testing telemetry integration
bandit~=1.7
Expand All @@ -11,14 +11,15 @@ fastapi[all]>=0.73.0, <0.96.0
isort~=5.11
matplotlib~=3.5
mypy~=0.990
moto~=1.3.0
moto~=1.3.0
psutil==5.9.4 # same as Kedro for now
pylint~=2.17
pytest~=7.3
pytest-asyncio~=0.21
pytest-mock~=3.10
pytest-cov~=4.0
sqlalchemy-stubs~=0.4
strawberry-graphql<0.184.1 # https://github.com/strawberry-graphql/strawberry/pull/2569
trufflehog~=2.2

# mypy
Expand Down
2 changes: 1 addition & 1 deletion package/tests/test_api/example_pipelines.json
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@
"type": "data",
"modular_pipelines": ["uk", "uk.data_science"],
"layer": null,
"dataset_type": "io.memory_dataset.MemoryDataSet"
"dataset_type": "io.memory_dataset.MemoryDataset"
},
{
"id": "uk.data_processing",
Expand Down
10 changes: 5 additions & 5 deletions package/tests/test_api/test_rest/test_reponses.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ def assert_example_data(response_data):
"modular_pipelines": ["uk", "uk.data_science"],
"type": "data",
"layer": None,
"dataset_type": "io.memory_dataset.MemoryDataSet",
"dataset_type": "io.memory_dataset.MemoryDataset",
},
{
"id": "uk.data_processing",
Expand Down Expand Up @@ -366,7 +366,7 @@ def assert_example_data_from_file(response_data):
"modular_pipelines": ["uk", "uk.data_science"],
"type": "data",
"layer": None,
"dataset_type": "io.memory_dataset.MemoryDataSet",
"dataset_type": "io.memory_dataset.MemoryDataset",
},
{
"id": "uk.data_processing",
Expand Down Expand Up @@ -500,7 +500,7 @@ def assert_example_transcoded_data(response_data):
"type": "data",
"modular_pipelines": [],
"layer": None,
"dataset_type": "io.memory_dataset.MemoryDataSet",
"dataset_type": "io.memory_dataset.MemoryDataset",
},
{
"id": "f0ebef01",
Expand Down Expand Up @@ -554,7 +554,7 @@ def assert_example_transcoded_data(response_data):
"type": "data",
"modular_pipelines": [],
"layer": None,
"dataset_type": "io.memory_dataset.MemoryDataSet",
"dataset_type": "io.memory_dataset.MemoryDataset",
},
]

Expand Down Expand Up @@ -735,7 +735,7 @@ def test_get_pipeline(self, client):
"modular_pipelines": ["uk", "uk.data_science"],
"type": "data",
"layer": None,
"dataset_type": "io.memory_dataset.MemoryDataSet",
"dataset_type": "io.memory_dataset.MemoryDataset",
},
{
"id": "uk",
Expand Down
16 changes: 8 additions & 8 deletions package/tests/test_models/test_flowchart.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from unittest.mock import MagicMock, call, patch

import pytest
from kedro.io import MemoryDataSet, PartitionedDataSet
from kedro.io import MemoryDataset, PartitionedDataSet
from kedro.pipeline.node import node
from kedro_datasets.pandas import CSVDataSet, ParquetDataSet

Expand Down Expand Up @@ -173,7 +173,7 @@ def test_create_transcoded_data_node(
assert data_node.pipelines == set()

def test_create_parameters_all_parameters(self):
parameters_dataset = MemoryDataSet(
parameters_dataset = MemoryDataset(
data={"test_split_ratio": 0.3, "num_epochs": 1000}
)
parameters_node = GraphNode.create_parameters_node(
Expand Down Expand Up @@ -203,7 +203,7 @@ def test_create_parameters_all_parameters(self):
def test_create_parameters_node_single_parameter(
self, dataset_name, expected_modular_pipelines
):
parameters_dataset = MemoryDataSet(data=0.3)
parameters_dataset = MemoryDataset(data=0.3)
parameters_node = GraphNode.create_parameters_node(
full_name=dataset_name, layer=None, tags={}, parameters=parameters_dataset
)
Expand All @@ -228,8 +228,8 @@ def test_create_non_existing_parameter_node(self, patched_warning):

@patch("logging.Logger.warning")
def test_create_non_existing_parameter_node_empty_dataset(self, patched_warning):
"""Test the case where ``parameters`` is equal to a MemoryDataSet with no data"""
parameters_dataset = MemoryDataSet()
"""Test the case where ``parameters`` is equal to a MemoryDataset with no data"""
parameters_dataset = MemoryDataset()
parameters_node = GraphNode.create_parameters_node(
full_name="non_existing",
layer=None,
Expand Down Expand Up @@ -269,7 +269,7 @@ def test_add_node_to_pipeline(self):

class TestGraphNodeMetadata:
@pytest.mark.parametrize(
"dataset,has_metadata", [(MemoryDataSet(data=1), True), (None, False)]
"dataset,has_metadata", [(MemoryDataset(data=1), True), (None, False)]
)
def test_node_has_metadata(self, dataset, has_metadata):
data_node = GraphNode.create_data_node(
Expand Down Expand Up @@ -573,15 +573,15 @@ def test_data_node_metadata_latest_tracking_data_not_exist(self):

def test_parameters_metadata_all_parameters(self):
parameters = {"test_split_ratio": 0.3, "num_epochs": 1000}
parameters_dataset = MemoryDataSet(data=parameters)
parameters_dataset = MemoryDataset(data=parameters)
parameters_node = GraphNode.create_parameters_node(
full_name="parameters", layer=None, tags={}, parameters=parameters_dataset
)
parameters_node_metadata = ParametersNodeMetadata(parameters_node)
assert parameters_node_metadata.parameters == parameters

def test_parameters_metadata_single_parameter(self):
parameters_dataset = MemoryDataSet(data=0.3)
parameters_dataset = MemoryDataset(data=0.3)
parameters_node = GraphNode.create_parameters_node(
full_name="params:test_split_ratio",
layer=None,
Expand Down