Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merge back 1.2.0rc1 #2026

Merged
4 changes: 2 additions & 2 deletions .github/workflows/code_scan.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.10
python-version: "3.10"
- name: Install dependencies
run: python -m pip install tox
- name: Bandit Scanning
Expand All @@ -71,7 +71,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.10
python-version: "3.10"
- name: Install dependencies
run: python -m pip install -r requirements/dev.txt
- name: Snyk Scanning
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
"""Configs Initialization of OTX Action Classification Tasks."""

# Copyright (C) 2021-2023 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
2 changes: 1 addition & 1 deletion otx/algorithms/common/tasks/nncf_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def _prepare_optimize(self):
# last batch size of 1 causes undefined behaviour for batch normalization
# when initializing and training NNCF
if self._data_cfg is not None:
data_loader = self._recipe_cfg.data.get("train_dataloader", {})
data_loader = self._recipe_cfg.data.get("train_dataloader", ConfigDict())
samples_per_gpu = data_loader.get("samples_per_gpu", self._recipe_cfg.data.get("samples_per_gpu"))
otx_dataset = get_configs_by_keys(self._data_cfg.data.train, "otx_dataset")
assert len(otx_dataset) == 1
Expand Down
51 changes: 21 additions & 30 deletions otx/algorithms/detection/adapters/mmdet/datasets/tiling.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,25 +96,6 @@ def __init__(

self.dataset = dataset
self.tiles = self.gen_tile_ann()
self.cache_tiles()

@timeit
def cache_tiles(self):
"""Cache tiles to disk."""
pbar = tqdm(total=len(self.tiles))
pre_img_idx = None
for i, tile in enumerate(self.tiles):
tile["tile_path"] = osp.join(
self.tmp_folder, "_".join([str(i), tile["uuid"], tile["ori_filename"], ".jpg"])
)
x_1, y_1, x_2, y_2 = tile["tile_box"]
dataset_idx = tile["dataset_idx"]
if dataset_idx != pre_img_idx:
ori_img = self.dataset[dataset_idx]["img"]
pre_img_idx = dataset_idx

mmcv.imwrite(ori_img[y_1:y_2, x_1:x_2, :], tile["tile_path"])
pbar.update(1)

@timeit
def gen_tile_ann(self) -> List[Dict]:
Expand All @@ -125,12 +106,16 @@ def gen_tile_ann(self) -> List[Dict]:
coordinates relative to the original image.
"""
tiles = []
pbar = tqdm(total=len(self.dataset))
cache_result = []
for result in tqdm(self.dataset, desc="Loading dataset annotations..."):
cache_result.append(result)

for idx, result in enumerate(self.dataset):
pbar = tqdm(total=len(self.dataset) * 2, desc="Generating tile annotations...")
for idx, result in enumerate(cache_result):
tiles.append(self.gen_single_img(result, dataset_idx=idx))
pbar.update(1)

for idx, result in enumerate(self.dataset):
for idx, result in enumerate(cache_result):
tiles.extend(self.gen_tiles_single_img(result, dataset_idx=idx))
pbar.update(1)
return tiles
Expand All @@ -145,7 +130,7 @@ def gen_single_img(self, result: Dict, dataset_idx: int) -> Dict:
Returns:
Dict: annotation with some other useful information for data pipeline.
"""
result["tile_box"] = (0, 0, result["dataset_item"].width, result["dataset_item"].height)
result["tile_box"] = (0, 0, result["img_shape"][1], result["img_shape"][0])
result["dataset_idx"] = dataset_idx
result["original_shape_"] = result["img_shape"]
result["uuid"] = str(uuid.uuid4())
Expand All @@ -163,11 +148,11 @@ def gen_tiles_single_img(self, result: Dict, dataset_idx: int) -> List[Dict]:
List[Dict]: a list of tile annotation with some other useful information for data pipeline.
"""
tile_list = []
gt_bboxes = result.pop("gt_bboxes", np.zeros((0, 4), dtype=np.float32))
gt_masks = result.pop("gt_masks", None)
gt_bboxes_ignore = result.pop("gt_bboxes_ignore", np.zeros((0, 4), dtype=np.float32))
gt_labels = result.pop("gt_labels", np.array([], dtype=np.int64))
img_shape = result.pop("img_shape")
gt_bboxes = result.get("gt_bboxes", np.zeros((0, 4), dtype=np.float32))
gt_masks = result.get("gt_masks", None)
gt_bboxes_ignore = result.get("gt_bboxes_ignore", np.zeros((0, 4), dtype=np.float32))
gt_labels = result.get("gt_labels", np.array([], dtype=np.int64))
img_shape = result.get("img_shape")
height, width = img_shape[:2]
_tile = self.prepare_result(result)

Expand Down Expand Up @@ -380,9 +365,15 @@ def __getitem__(self, idx):
dataset_idx = result["dataset_idx"]
x_1, y_1, x_2, y_2 = result["tile_box"]
ori_img = self.dataset[dataset_idx]["img"]
cropped_tile = ori_img[y_1:y_2, x_1:x_2, :]
tile_path = osp.join(
self.tmp_folder, "_".join([str(dataset_idx), result["uuid"], result["ori_filename"], ".jpg"])
)
self.tiles[idx]["tile_path"] = tile_path
mmcv.imwrite(cropped_tile, tile_path)
if self.img2fp32:
ori_img = ori_img.astype(np.float32)
result["img"] = ori_img[y_1:y_2, x_1:x_2, :]
cropped_tile = cropped_tile.astype(np.float32)
result["img"] = cropped_tile
return result

@staticmethod
Expand Down
2 changes: 1 addition & 1 deletion otx/algorithms/detection/adapters/openvino/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,9 +84,9 @@
IOptimizationTask,
OptimizationType,
)
from otx.api.utils import Tiler
from otx.api.utils.dataset_utils import add_saliency_maps_to_dataset_item
from otx.api.utils.detection_utils import detection2array
from otx.api.utils.tiler import Tiler

logger = get_logger()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from otx.api.entities.label_schema import LabelSchemaEntity
from otx.api.entities.model_template import TaskType
from otx.api.serialization.label_mapper import LabelSchemaMapper
from otx.api.utils import Tiler
from otx.api.utils.tiler import Tiler
from otx.api.utils.detection_utils import detection2array

from .utils import get_model_path, get_parameters
Expand Down
2 changes: 1 addition & 1 deletion otx/api/usecases/exportable_code/demo/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
openvino==2022.3.0
openmodelzoo-modelapi==2022.3.0
otx @ git+https://github.com/openvinotoolkit/training_extensions/@8c11c3d42c726e6e0eda7364f00cf8ed4dbdc2e9#egg=otx
otx @ git+https://github.com/openvinotoolkit/training_extensions/@861157a27b5e65f0cde710ee332d4aa55b2ddf45#egg=otx
numpy>=1.21.0,<=1.23.5 # np.bool was removed in 1.24.0 which was used in openvino runtime
5 changes: 0 additions & 5 deletions otx/api/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,3 @@
# Copyright (C) 2021-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#

from .async_pipeline import OTXDetectionAsyncPipeline
from .tiler import Tiler

__all__ = ["Tiler", "OTXDetectionAsyncPipeline"]
7 changes: 7 additions & 0 deletions otx/cli/utils/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,11 @@ def _get_leaf_node(curr_dict: Dict[str, dict], curr_key: str):
value_type = None
if type_hint is not None:
value_type = type_hint.get(origin_key, {}).get("type", None)
# FIXME[HARIM]: There's no template in args, and it's not inside the workspace, but with --workspace,
# the template is not found in args, so params, which are all bools, go into str.
# This is a temporary solution.
if isinstance(value, str) and value.lower() in ("true", "false"):
value_type = str2bool

leaf_node_dict, node_key = _get_leaf_node(params_dict, origin_key)
leaf_node_dict[node_key] = {"value": value_type(value) if value_type else value}
Expand Down Expand Up @@ -249,6 +254,8 @@ def get_parser_and_hprams_data():
template_config = parse_model_template("./template.yaml")
hyper_parameters = template_config.hyper_parameters.data
parser.add_argument("template", nargs="?", default="./template.yaml", help=template_help_str)
# TODO: Need fix for how to get hyper_parameters when no template is given and ./template.yaml doesn't exist
# Ex. When using --workspace outside of a workspace, but cannot access --workspace from this function.
else:
parser.add_argument("template", nargs="?", default=None, help=template_help_str)

Expand Down
34 changes: 24 additions & 10 deletions tests/regression/action/test_action_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,16 +19,21 @@
)
from tests.test_suite.e2e_test_system import e2e_pytest_component
from tests.test_suite.run_test_command import (
otx_eval_compare,
otx_eval_e2e_eval_time,
otx_eval_e2e_train_time,
otx_eval_openvino_testing,
otx_export_testing,
otx_train_testing,
pot_eval_testing,
pot_optimize_testing,
)

from tests.regression.regression_command import (
regression_eval_testing,
regression_openvino_testing,
regression_deployment_testing,
regression_nncf_eval_testing,
regression_pot_eval_testing,
regression_train_time_testing,
regression_eval_time_testing,
)

# Configurations for regression test.
TASK_TYPE = "action_classification"
TRAIN_TYPE = "supervised"
Expand Down Expand Up @@ -68,7 +73,7 @@ def test_otx_train(self, template, tmp_dir_path):
train_elapsed_time = timer() - train_start_time

infer_start_time = timer()
otx_eval_compare(
test_result = regression_eval_testing(
template,
tmp_dir_path,
otx_dir,
Expand All @@ -82,24 +87,29 @@ def test_otx_train(self, template, tmp_dir_path):
self.performance[template.name][TIME_LOG["infer_time"]] = round(infer_elapsed_time, 3)
result_dict[TASK_TYPE][LABEL_TYPE][TRAIN_TYPE]["train"].append(self.performance)

assert test_result["passed"] is True, test_result["log"]

@e2e_pytest_component
@pytest.mark.parametrize("template", templates, ids=templates_ids)
def test_otx_train_kpi_test(self, template):
results = result_dict[TASK_TYPE][self.label_type][TRAIN_TYPE]["train"]
performance = get_template_performance(results, template)

otx_eval_e2e_train_time(
kpi_train_result = regression_train_time_testing(
train_time_criteria=action_cls_regression_config["kpi_e2e_train_time_criteria"]["train"],
e2e_train_time=performance[template.name][TIME_LOG["train_time"]],
template=template,
)

otx_eval_e2e_eval_time(
kpi_eval_result = regression_eval_time_testing(
eval_time_criteria=action_cls_regression_config["kpi_e2e_eval_time_criteria"]["train"],
e2e_eval_time=performance[template.name][TIME_LOG["infer_time"]],
template=template,
)

assert kpi_train_result["passed"] is True, kpi_train_result["log"]
assert kpi_eval_result["passed"] is True, kpi_eval_result["log"]

@e2e_pytest_component
@pytest.mark.parametrize("template", templates, ids=templates_ids)
def test_otx_export_eval_openvino(self, template, tmp_dir_path):
Expand All @@ -113,7 +123,7 @@ def test_otx_export_eval_openvino(self, template, tmp_dir_path):
export_elapsed_time = timer() - export_start_time

export_eval_start_time = timer()
otx_eval_openvino_testing(
test_result = regression_openvino_testing(
template,
tmp_dir_path,
otx_dir,
Expand All @@ -129,6 +139,8 @@ def test_otx_export_eval_openvino(self, template, tmp_dir_path):
self.performance[template.name][TIME_LOG["export_eval_time"]] = round(export_eval_elapsed_time, 3)
result_dict[TASK_TYPE][self.label_type][TRAIN_TYPE]["export"].append(self.performance)

assert test_result["passed"] is True, test_result["log"]

@e2e_pytest_component
@pytest.mark.parametrize("template", templates, ids=templates_ids)
def test_pot_optimize_eval(self, template, tmp_dir_path):
Expand All @@ -140,7 +152,7 @@ def test_pot_optimize_eval(self, template, tmp_dir_path):
pot_elapsed_time = timer() - pot_start_time

pot_eval_start_time = timer()
pot_eval_testing(
test_result = regression_pot_eval_testing(
template,
tmp_dir_path,
otx_dir,
Expand All @@ -154,3 +166,5 @@ def test_pot_optimize_eval(self, template, tmp_dir_path):
self.performance[template.name][TIME_LOG["pot_time"]] = round(pot_elapsed_time, 3)
self.performance[template.name][TIME_LOG["pot_eval_time"]] = round(pot_eval_elapsed_time, 3)
result_dict[TASK_TYPE][self.label_type][TRAIN_TYPE]["pot"].append(self.performance)

assert test_result["passed"] is True, test_result["log"]
24 changes: 18 additions & 6 deletions tests/regression/action/test_action_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,19 @@
)
from tests.test_suite.e2e_test_system import e2e_pytest_component
from tests.test_suite.run_test_command import (
otx_eval_compare,
otx_eval_e2e_eval_time,
otx_eval_e2e_train_time,
otx_train_testing,
)

from tests.regression.regression_command import (
regression_eval_testing,
regression_openvino_testing,
regression_deployment_testing,
regression_nncf_eval_testing,
regression_pot_eval_testing,
regression_train_time_testing,
regression_eval_time_testing,
)

# Configurations for regression test.
TASK_TYPE = "action_detection"
TRAIN_TYPE = "supervised"
Expand Down Expand Up @@ -64,7 +71,7 @@ def test_otx_train(self, template, tmp_dir_path):
train_elapsed_time = timer() - train_start_time

infer_start_time = timer()
otx_eval_compare(
test_result = regression_eval_testing(
template,
tmp_dir_path,
otx_dir,
Expand All @@ -78,20 +85,25 @@ def test_otx_train(self, template, tmp_dir_path):
self.performance[template.name][TIME_LOG["infer_time"]] = round(infer_elapsed_time, 3)
result_dict[TASK_TYPE][LABEL_TYPE][TRAIN_TYPE]["train"].append(self.performance)

assert test_result["passed"] is True, test_result["log"]

@e2e_pytest_component
@pytest.mark.parametrize("template", templates, ids=templates_ids)
def test_otx_train_kpi_test(self, template):
results = result_dict[TASK_TYPE][self.label_type][TRAIN_TYPE]["train"]
performance = get_template_performance(results, template)

otx_eval_e2e_train_time(
kpi_train_result = regression_train_time_testing(
train_time_criteria=action_det_regression_config["kpi_e2e_train_time_criteria"]["train"],
e2e_train_time=performance[template.name][TIME_LOG["train_time"]],
template=template,
)

otx_eval_e2e_eval_time(
kpi_eval_result = regression_eval_time_testing(
eval_time_criteria=action_det_regression_config["kpi_e2e_eval_time_criteria"]["train"],
e2e_eval_time=performance[template.name][TIME_LOG["infer_time"]],
template=template,
)

assert kpi_train_result["passed"] is True, kpi_train_result["log"]
assert kpi_eval_result["passed"] is True, kpi_eval_result["log"]
Loading