From 251eb428dbefda157a37e3d219fb3288ff65fc90 Mon Sep 17 00:00:00 2001 From: Yunchu Lee Date: Fri, 22 Dec 2023 16:45:13 +0900 Subject: [PATCH 1/2] Mergeback 1.4.4 to 1.5.0 (#2745) * Update MAPI version (#2730) * Update anomaly ov inference task * Update reqs in exportable code * Fix one more place of conversion of anomaly map * Update dependency for exportable code (#2732) * Fix unsupported dtype in ov graph constant converter --------- Co-authored-by: Vladislav Sovrasov --- .gitignore | 1 + requirements/openvino.txt | 2 +- src/otx/algorithms/anomaly/tasks/openvino.py | 11 +++++++---- .../usecases/exportable_code/demo/requirements.txt | 2 +- .../prediction_to_annotation_converter.py | 2 +- src/otx/core/ov/ops/infrastructures.py | 2 ++ src/otx/core/ov/ops/type_conversions.py | 1 + tests/unit/core/ov/graph/test_ov_graph_utils.py | 2 ++ 8 files changed, 16 insertions(+), 7 deletions(-) diff --git a/.gitignore b/.gitignore index 934218f75c1..bc5e647b1a4 100644 --- a/.gitignore +++ b/.gitignore @@ -18,6 +18,7 @@ results/ build/ dist/ !src/otx/recipes/** +src/otx/recipes/**/__pycache__/ *egg-info *.pth diff --git a/requirements/openvino.txt b/requirements/openvino.txt index e91ed69252a..16acdba61be 100644 --- a/requirements/openvino.txt +++ b/requirements/openvino.txt @@ -2,7 +2,7 @@ # OpenVINO Requirements. # nncf==2.6.0 onnx==1.13.0 -openvino-model-api==0.1.6 +openvino-model-api==0.1.8 openvino==2023.0 openvino-dev==2023.0 openvino-telemetry==2023.2.* diff --git a/src/otx/algorithms/anomaly/tasks/openvino.py b/src/otx/algorithms/anomaly/tasks/openvino.py index a8dfa580e15..d96516a5752 100644 --- a/src/otx/algorithms/anomaly/tasks/openvino.py +++ b/src/otx/algorithms/anomaly/tasks/openvino.py @@ -188,13 +188,17 @@ def infer(self, dataset: DatasetEntity, inference_parameters: InferenceParameter label = self.anomalous_label if image_result.pred_score >= 0.5 else self.normal_label elif self.task_type == TaskType.ANOMALY_SEGMENTATION: annotations = create_annotation_from_segmentation_map( - pred_mask, image_result.anomaly_map.squeeze(), {0: self.normal_label, 1: self.anomalous_label} + pred_mask, + image_result.anomaly_map.squeeze() / 255.0, + {0: self.normal_label, 1: self.anomalous_label}, ) dataset_item.append_annotations(annotations) label = self.normal_label if len(annotations) == 0 else self.anomalous_label elif self.task_type == TaskType.ANOMALY_DETECTION: annotations = create_detection_annotation_from_anomaly_heatmap( - pred_mask, image_result.anomaly_map.squeeze(), {0: self.normal_label, 1: self.anomalous_label} + pred_mask, + image_result.anomaly_map.squeeze() / 255.0, + {0: self.normal_label, 1: self.anomalous_label}, ) dataset_item.append_annotations(annotations) label = self.normal_label if len(annotations) == 0 else self.anomalous_label @@ -202,13 +206,12 @@ def infer(self, dataset: DatasetEntity, inference_parameters: InferenceParameter raise ValueError(f"Unknown task type: {self.task_type}") dataset_item.append_labels([ScoredLabel(label=label, probability=float(probability))]) - anomaly_map = (image_result.anomaly_map * 255).astype(np.uint8) heatmap_media = ResultMediaEntity( name="Anomaly Map", type="anomaly_map", label=label, annotation_scene=dataset_item.annotation_scene, - numpy=anomaly_map, + numpy=image_result.anomaly_map, ) dataset_item.append_metadata_item(heatmap_media) update_progress_callback(int((idx + 1) / len(dataset) * 100)) diff --git a/src/otx/api/usecases/exportable_code/demo/requirements.txt b/src/otx/api/usecases/exportable_code/demo/requirements.txt index 79abb91c63c..c1a5a57a318 100644 --- a/src/otx/api/usecases/exportable_code/demo/requirements.txt +++ b/src/otx/api/usecases/exportable_code/demo/requirements.txt @@ -1,4 +1,4 @@ openvino==2023.0 -openvino-model-api==0.1.6 +openvino-model-api==0.1.8 otx==1.5.0 numpy>=1.21.0,<=1.23.5 # np.bool was removed in 1.24.0 which was used in openvino runtime diff --git a/src/otx/api/usecases/exportable_code/prediction_to_annotation_converter.py b/src/otx/api/usecases/exportable_code/prediction_to_annotation_converter.py index 40d1f4beec2..36667057a92 100644 --- a/src/otx/api/usecases/exportable_code/prediction_to_annotation_converter.py +++ b/src/otx/api/usecases/exportable_code/prediction_to_annotation_converter.py @@ -380,7 +380,7 @@ def convert_to_annotation(self, predictions: AnomalyResult, metadata: Dict[str, assert predictions.pred_mask is not None assert predictions.anomaly_map is not None annotations = create_annotation_from_segmentation_map( - predictions.pred_mask, predictions.anomaly_map, self.label_map + predictions.pred_mask, predictions.anomaly_map / 255.0, self.label_map ) if len(annotations) == 0: # TODO: add confidence to this label diff --git a/src/otx/core/ov/ops/infrastructures.py b/src/otx/core/ov/ops/infrastructures.py index 2572ac7af01..a8fd0a475e7 100644 --- a/src/otx/core/ov/ops/infrastructures.py +++ b/src/otx/core/ov/ops/infrastructures.py @@ -233,6 +233,8 @@ def from_ov(cls, ov_op): if not np.array_equal(data, data_): logger.warning(f"Overflow detected in {op_name}") data = torch.from_numpy(data_) + elif data.dtype == np.uint16: + data = torch.from_numpy(data.astype(np.int32)) else: data = torch.from_numpy(data) diff --git a/src/otx/core/ov/ops/type_conversions.py b/src/otx/core/ov/ops/type_conversions.py index 25454053c22..267ae7ea37d 100644 --- a/src/otx/core/ov/ops/type_conversions.py +++ b/src/otx/core/ov/ops/type_conversions.py @@ -25,6 +25,7 @@ "u1": torch.uint8, # no type in torch "u4": torch.uint8, # no type in torch "u8": torch.uint8, + "u16": torch.int32, # no type in torch "u32": torch.int32, # no type in torch "u64": torch.int64, # no type in torch "i4": torch.int8, # no type in torch diff --git a/tests/unit/core/ov/graph/test_ov_graph_utils.py b/tests/unit/core/ov/graph/test_ov_graph_utils.py index 7133f523da4..9e3a865dfc4 100644 --- a/tests/unit/core/ov/graph/test_ov_graph_utils.py +++ b/tests/unit/core/ov/graph/test_ov_graph_utils.py @@ -2,6 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 # +import pytest from otx.core.ov.graph.graph import Graph from otx.core.ov.graph.utils import ( get_constant_input_nodes, @@ -38,6 +39,7 @@ def test_handle_merging_into_batchnorm(): @e2e_pytest_unit +@pytest.mark.skip(reason="Updated models are not compatible with the paired batchnorm converter") def test_handle_paired_batchnorm(): graph = get_graph() handle_paired_batchnorm(graph) From c3541baff4e122ec2278dbb7dc762a74ae480491 Mon Sep 17 00:00:00 2001 From: Yunchu Lee Date: Tue, 23 Jan 2024 13:35:01 +0900 Subject: [PATCH 2/2] Mergeback 1.4.0 to 1.5.0 (#2826) * Update MAPI version (#2730) * Update dependency for exportable code (#2732) * Filter invalid polygon shapes (#2795) --------- Co-authored-by: Vladislav Sovrasov Co-authored-by: Eugene Liu --- src/otx/core/data/adapter/base_dataset_adapter.py | 13 +++++++++---- .../core/data/adapter/detection_dataset_adapter.py | 2 +- .../adapter/visual_prompting_dataset_adapter.py | 2 +- .../mmseg/datasets/pipelines/test_transforms.py | 4 ++-- 4 files changed, 13 insertions(+), 8 deletions(-) diff --git a/src/otx/core/data/adapter/base_dataset_adapter.py b/src/otx/core/data/adapter/base_dataset_adapter.py index 51b62a0cede..c73ffbbf9aa 100644 --- a/src/otx/core/data/adapter/base_dataset_adapter.py +++ b/src/otx/core/data/adapter/base_dataset_adapter.py @@ -272,11 +272,16 @@ def _prepare_label_information( return {"category_items": category_items, "label_groups": label_groups, "label_entities": label_entities} - def _is_normal_polygon(self, annotation: DatumAnnotationType.polygon) -> bool: + def _is_normal_polygon(self, annotation: DatumAnnotationType.polygon, width: int, height: int) -> bool: """To filter out the abnormal polygon.""" - x_points = [annotation.points[i] for i in range(0, len(annotation.points), 2)] - y_points = [annotation.points[i + 1] for i in range(0, len(annotation.points), 2)] - return min(x_points) < max(x_points) and min(y_points) < max(y_points) + x_points = annotation.points[::2] # Extract x-coordinates + y_points = annotation.points[1::2] # Extract y-coordinates + + return ( + min(x_points) < max(x_points) < width + and min(y_points) < max(y_points) < height + and annotation.get_area() > 0 + ) def _is_normal_bbox(self, x1: float, y1: float, x2: float, y2: float) -> bool: """To filter out the abrnormal bbox.""" diff --git a/src/otx/core/data/adapter/detection_dataset_adapter.py b/src/otx/core/data/adapter/detection_dataset_adapter.py index a6ce1b2bce5..dbcc8ca16ea 100644 --- a/src/otx/core/data/adapter/detection_dataset_adapter.py +++ b/src/otx/core/data/adapter/detection_dataset_adapter.py @@ -41,7 +41,7 @@ def get_otx_dataset(self) -> DatasetEntity: self.task_type in (TaskType.INSTANCE_SEGMENTATION, TaskType.ROTATED_DETECTION) and ann.type == DatumAnnotationType.polygon ): - if self._is_normal_polygon(ann): + if self._is_normal_polygon(ann, image.width, image.height): shapes.append(self._get_polygon_entity(ann, image.width, image.height)) if self.task_type is TaskType.DETECTION and ann.type == DatumAnnotationType.bbox: if self._is_normal_bbox(ann.points[0], ann.points[1], ann.points[2], ann.points[3]): diff --git a/src/otx/core/data/adapter/visual_prompting_dataset_adapter.py b/src/otx/core/data/adapter/visual_prompting_dataset_adapter.py index d428dc6afad..7a5c235f792 100644 --- a/src/otx/core/data/adapter/visual_prompting_dataset_adapter.py +++ b/src/otx/core/data/adapter/visual_prompting_dataset_adapter.py @@ -53,7 +53,7 @@ def get_otx_dataset(self) -> DatasetEntity: for ann in datumaro_item.annotations: if ann.type == DatumAnnotationType.polygon: # save polygons as-is, they will be converted to masks. - if self._is_normal_polygon(ann): + if self._is_normal_polygon(ann, image.width, image.height): shapes.append(self._get_polygon_entity(ann, image.width, image.height)) if ann.type == DatumAnnotationType.mask: diff --git a/tests/unit/algorithms/segmentation/adapters/mmseg/datasets/pipelines/test_transforms.py b/tests/unit/algorithms/segmentation/adapters/mmseg/datasets/pipelines/test_transforms.py index facded59996..dee7c6be234 100644 --- a/tests/unit/algorithms/segmentation/adapters/mmseg/datasets/pipelines/test_transforms.py +++ b/tests/unit/algorithms/segmentation/adapters/mmseg/datasets/pipelines/test_transforms.py @@ -110,8 +110,8 @@ class TestNormalize: @pytest.mark.parametrize( "mean,std,to_rgb,expected", [ - (1.0, 1.0, True, np.array([[[1.0, 0.0, 0.0]]], dtype=np.float32)), - (1.0, 1.0, False, np.array([[[-1.0, 0.0, 0.0]]], dtype=np.float32)), + ([[[1.0, 1.0, 1.0]]], [[[1.0, 1.0, 1.0]]], True, np.array([[[1.0, 0.0, -1.0]]], dtype=np.float32)), + ([[[1.0, 1.0, 1.0]]], [[[1.0, 1.0, 1.0]]], False, np.array([[[-1.0, 0.0, 1.0]]], dtype=np.float32)), ], ) def test_call(self, mean: float, std: float, to_rgb: bool, expected: np.array) -> None: