Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Conversion validation #184

Merged
merged 8 commits into from
Oct 18, 2024
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 35 additions & 25 deletions nibe/console_scripts/convert_csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,17 @@
return d


def _convert_series_to_dict(obj):
if isinstance(obj, pandas.Series):
return obj.to_dict()
elif isinstance(obj, dict):
return {k: _convert_series_to_dict(v) for k, v in obj.items()}
elif isinstance(obj, list):
return [_convert_series_to_dict(item) for item in obj]

Check warning on line 40 in nibe/console_scripts/convert_csv.py

View check run for this annotation

Codecov / codecov/patch

nibe/console_scripts/convert_csv.py#L40

Added line #L40 was not covered by tests
else:
return obj


class ValidationFailed(Exception):
pass

Expand Down Expand Up @@ -68,11 +79,9 @@

self._ensure_no_duplicate_ids()

self._export_to_file()

if mode == "export":
self._export_to_file()
elif mode == "validate":
elif mode == "verify":
self._verify_export()

def _make_mapping_parameter(self):
Expand All @@ -92,15 +101,15 @@
mappings = mappings.reset_index("match", drop=True)
self.data["mappings"] = pandas.Series(
{
str(k): self._make_mapping_series(g)
k: self._make_mapping_series(g)
for k, g in mappings.groupby("value", level=0)
}
).where(self._is_mapping_allowed)

def _is_mapping_allowed(self, s):
return self.data["factor"] == 1

def _make_mapping_series(self, g):
def _make_mapping_series(self, g: pandas.DataFrame):
return g.set_index("value", drop=True)["key"].drop_duplicates()

def _unset_equal_min_max_default_values(self):
Expand Down Expand Up @@ -248,12 +257,6 @@

self.data = self.data.set_index("id")

def _convert_series_to_dict(self, o):
if isinstance(o, pandas.Series):
return o.sort_index(key=lambda i: i.astype(int)).to_dict()

raise TypeError(f"Object of type {type(o)} is not JSON serializable")

def _ensure_no_duplicate_ids(self):
if self.data.index.has_duplicates:
logger.error(
Expand All @@ -262,14 +265,17 @@
raise ValueError("Duplicate IDs found")

def _make_dict(self):
return {index: row.dropna().to_dict() for index, row in self.data.iterrows()}
return {
index: _convert_series_to_dict(row.dropna().to_dict())
for index, row in self.data.iterrows()
}

def _export_to_file(self):
o = self._make_dict()
update_dict(o, self.extensions, True)

with open(self.out_file, "w", encoding="utf-8") as fh:
json.dump(o, fh, indent=2, default=self._convert_series_to_dict)
json.dump(o, fh, indent=2)

Check warning on line 278 in nibe/console_scripts/convert_csv.py

View check run for this annotation

Codecov / codecov/patch

nibe/console_scripts/convert_csv.py#L278

Added line #L278 was not covered by tests
fh.write("\n")

def _verify_export(self):
Expand All @@ -285,21 +291,20 @@
raise ValidationFailed(f"File {self.out_file} not found")

if o != file_contents:
expected = json.dumps(o, indent=4, sort_keys=True)
actual = json.dumps(file_contents, indent=4, sort_keys=True)
expected = json.dumps(o, indent=2, sort_keys=True)
actual = json.dumps(file_contents, indent=2, sort_keys=True)

Check warning on line 295 in nibe/console_scripts/convert_csv.py

View check run for this annotation

Codecov / codecov/patch

nibe/console_scripts/convert_csv.py#L294-L295

Added lines #L294 - L295 were not covered by tests
diff = difflib.unified_diff(
expected.splitlines(),
actual.splitlines(),
fromfile="expected",
tofile="actual",
lineterm="",
)
diff_text = "\n".join(diff)
raise ValidationFailed(
f"File {self.out_file} does not match the expected content\nDiff:\n{diff_text}"
)
raise ValidationFailed(f"File {self.out_file} does not match:\n{diff_text}")

Check warning on line 304 in nibe/console_scripts/convert_csv.py

View check run for this annotation

Codecov / codecov/patch

nibe/console_scripts/convert_csv.py#L304

Added line #L304 was not covered by tests


async def _validate_initialization(out_file):
async def _verify_heat_pump_initialization(out_file):
model = Model.CUSTOM
model.data_file = out_file
hp = HeatPump(model)
Expand All @@ -311,7 +316,7 @@
all_extensions = json.load(fp)

processed_files = []
convert_failed = []
processing_failed = []

for in_file in files("nibe.data").glob("*.csv"):
out_file = in_file.with_suffix(".json")
Expand All @@ -326,23 +331,28 @@
try:
CSVConverter(in_file, out_file, extensions).convert(mode=mode)

await _validate_initialization(out_file)
await _verify_heat_pump_initialization(out_file)

if mode == "verify":
logger.info(f"Verified {out_file}")
else:
logger.info(f"Converted {in_file} to {out_file}")
except ValidationFailed as ex:
processing_failed.append(in_file)
logger.error("Validation failed for %s: %s", in_file, ex)

Check warning on line 342 in nibe/console_scripts/convert_csv.py

View check run for this annotation

Codecov / codecov/patch

nibe/console_scripts/convert_csv.py#L340-L342

Added lines #L340 - L342 were not covered by tests
except Exception as ex:
convert_failed.append(in_file)
processing_failed.append(in_file)

Check warning on line 344 in nibe/console_scripts/convert_csv.py

View check run for this annotation

Codecov / codecov/patch

nibe/console_scripts/convert_csv.py#L344

Added line #L344 was not covered by tests
logger.exception("Failed to process %s: %s", in_file, ex)
finally:
processed_files.append(in_file)

if convert_failed:
logger.error("Failed to process the following files: %s", convert_failed)
if processing_failed:
logger.error("Failed to process the following files: %s", processing_failed)

Check warning on line 350 in nibe/console_scripts/convert_csv.py

View check run for this annotation

Codecov / codecov/patch

nibe/console_scripts/convert_csv.py#L350

Added line #L350 was not covered by tests
raise ValueError("Failed to process all files")

logger.info("Processed files: %s", list(map(lambda x: x.name, processed_files)))
logger.info(
"Successfully processed files: %s", list(map(lambda x: x.name, processed_files))
)


def main():
Expand Down