From 07a6e6ce9440b692761f574eab5420e335b85869 Mon Sep 17 00:00:00 2001 From: "malik.mohrat@gmail.com" Date: Fri, 5 Jul 2024 19:51:05 +0300 Subject: [PATCH 01/24] .venv added to gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index b763c8d..a6fb039 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ Readme_files/* concept-graphs/yolov8l-world.pt concept-graphs/outputs/* assets.zip +.venv \ No newline at end of file From 847500350a7685f9e4e71d5da2621d721b0bfbb2 Mon Sep 17 00:00:00 2001 From: "malik.mohrat@gmail.com" Date: Fri, 5 Jul 2024 19:53:16 +0300 Subject: [PATCH 02/24] grad slam script update: cpu device support --- concept-graphs/conceptgraph/scripts/run_slam_rgb.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/concept-graphs/conceptgraph/scripts/run_slam_rgb.py b/concept-graphs/conceptgraph/scripts/run_slam_rgb.py index 5810f9d..b7797cd 100644 --- a/concept-graphs/conceptgraph/scripts/run_slam_rgb.py +++ b/concept-graphs/conceptgraph/scripts/run_slam_rgb.py @@ -43,6 +43,7 @@ def get_parser() -> argparse.ArgumentParser: return parser + def main(args: argparse.Namespace): if args.load_semseg: load_embeddings = True @@ -69,6 +70,7 @@ def main(args: argparse.Namespace): embedding_dir = embedding_dir, embedding_dim = embedding_dim, relative_pose=False, + device=args.device ) slam = PointFusion( @@ -103,10 +105,12 @@ def main(args: argparse.Namespace): _pose.unsqueeze(0).unsqueeze(0), embeddings = _embedding, confidence_image = _confidence, + device = args.device ) pointclouds, _ = slam.step(pointclouds, frame_cur, frame_prev) # frame_prev = frame_cur # Keep it None when we use the gt odom + torch.cuda.empty_cache() dir_to_save_map = os.path.join(args.dataset_root, args.scene_id, "rgb_cloud") From b7cd1c09217dd6e33faeb9c54681cced4be61515 Mon Sep 17 00:00:00 2001 From: "malik.mohrat@gmail.com" Date: Fri, 5 Jul 2024 23:44:23 +0300 Subject: [PATCH 03/24] dataset class update: added semantic processing --- .../conceptgraph/dataset/datasets_common.py | 73 +++++++++++++++++++ 1 file changed, 73 insertions(+) diff --git a/concept-graphs/conceptgraph/dataset/datasets_common.py b/concept-graphs/conceptgraph/dataset/datasets_common.py index 1ba1e21..fd28b6e 100644 --- a/concept-graphs/conceptgraph/dataset/datasets_common.py +++ b/concept-graphs/conceptgraph/dataset/datasets_common.py @@ -20,8 +20,10 @@ import torch import torch.nn.functional as F import yaml +from PIL import Image from natsort import natsorted from scipy.spatial.transform import Rotation as R +from torchvision.transforms.functional import pil_to_tensor from gradslam.datasets import datautils from gradslam.geometry.geometryutils import relative_transformation @@ -43,6 +45,7 @@ def as_intrinsics_matrix(intrinsics): K[1, 2] = intrinsics[3] return K + def from_intrinsics_matrix(K: torch.Tensor) -> tuple[float, float, float, float]: ''' Get fx, fy, cx, cy from the intrinsics matrix @@ -481,6 +484,71 @@ def load_poses(self): def read_embedding_from_file(self, embedding_file_path): embedding = torch.load(embedding_file_path) return embedding.permute(0, 2, 3, 1) # (1, H, W, embedding_dim) + + +class SemanticReplicaDataset(GradSLAMDataset): + def __init__( + self, + config_dict, + basedir, + sequence, + stride: Optional[int] = None, + start: Optional[int] = 0, + end: Optional[int] = -1, + desired_height: Optional[int] = 480, + desired_width: Optional[int] = 640, + load_embeddings: Optional[bool] = False, + embedding_dir: Optional[str] = "embeddings", + embedding_dim: Optional[int] = 512, + **kwargs, + ): + self.input_folder = os.path.join(basedir, sequence) + self.pose_path = os.path.join(self.input_folder, "traj.txt") + super().__init__( + config_dict, + stride=stride, + start=start, + end=end, + desired_height=desired_height, + desired_width=desired_width, + load_embeddings=load_embeddings, + embedding_dir=embedding_dir, + embedding_dim=embedding_dim, + **kwargs, + ) + + def get_filepaths(self): + color_paths = natsorted(glob.glob(f"{self.input_folder}/results/frame*.jpg")) + depth_paths = natsorted(glob.glob(f"{self.input_folder}/results/depth*.png")) + embedding_paths = None + if self.load_embeddings: + embedding_paths = natsorted( + glob.glob(f"{self.input_folder}/{self.embedding_dir}/*.png") + ) + return color_paths, depth_paths, embedding_paths + + def load_poses(self): + poses = [] + with open(self.pose_path, "r") as f: + lines = f.readlines() + for i in range(self.num_imgs): + line = lines[i] + # c2w = np.diag([1, -1, -1, 1]) @ np.array(list(map(float, line.split()))).reshape(4, 4) + c2w = np.array(list(map(float, line.split()))).reshape(4, 4) + # c2w[:3, 1] *= -1 + # c2w[:3, 2] *= -1 + c2w = torch.from_numpy(c2w).float() + poses.append(c2w) + return poses + + def read_embedding_from_file(self, embedding_file_path): + with torch.no_grad(): + semantic_image = Image.open(embedding_file_path) + semantic_tensor = pil_to_tensor(semantic_image) + + embedding = F.one_hot(semantic_tensor.long(), num_classes=self.embedding_dim) + + return embedding class ScannetDataset(GradSLAMDataset): @@ -607,6 +675,7 @@ def read_embedding_from_file(self, embedding_file_path): embedding = torch.load(embedding_file_path, map_location="cpu") return embedding.permute(0, 2, 3, 1) # (1, H, W, embedding_dim) + class AzureKinectDataset(GradSLAMDataset): def __init__( self, @@ -1003,6 +1072,7 @@ def load_poses(self): return poses + def load_dataset_config(path, default_path=None): """ Loads config file. @@ -1087,6 +1157,7 @@ def common_dataset_to_batch(dataset): embeddings = embeddings.float() return colors, depths, intrinsics, poses, embeddings + @measure_time def get_dataset(dataconfig, basedir, sequence, **kwargs): config_dict = load_dataset_config(dataconfig) @@ -1096,6 +1167,8 @@ def get_dataset(dataconfig, basedir, sequence, **kwargs): return ReplicaDataset(config_dict, basedir, sequence, **kwargs) elif config_dict["dataset_name"].lower() in ["light"]: return ReplicaDataset(config_dict, basedir, sequence, **kwargs) + elif config_dict["dataset_name"].lower() in ["semantic_replica"]: + return SemanticReplicaDataset(config_dict, basedir, sequence, **kwargs) elif config_dict["dataset_name"].lower() in ["azure", "azurekinect"]: return AzureKinectDataset(config_dict, basedir, sequence, **kwargs) elif config_dict["dataset_name"].lower() in ["scannet"]: From 90b4834f8048a3294d09b37d61a7d77daeb5e9b2 Mon Sep 17 00:00:00 2001 From: "malik.mohrat@gmail.com" Date: Fri, 5 Jul 2024 23:54:15 +0300 Subject: [PATCH 04/24] eval scripts update: minor fixes, visual update --- .../scripts/eval_replica_semseg.py | 22 +++++++++++++++---- concept-graphs/conceptgraph/utils/eval.py | 3 ++- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/concept-graphs/conceptgraph/scripts/eval_replica_semseg.py b/concept-graphs/conceptgraph/scripts/eval_replica_semseg.py index ded41bc..4be0304 100644 --- a/concept-graphs/conceptgraph/scripts/eval_replica_semseg.py +++ b/concept-graphs/conceptgraph/scripts/eval_replica_semseg.py @@ -13,7 +13,8 @@ import open_clip -from chamferdist.chamfer import knn_points +# from chamferdist.chamfer import knn_points +from pytorch3d.ops.knn import knn_points from gradslam.structures.pointclouds import Pointclouds from conceptgraph.dataset.replica_constants import ( @@ -54,6 +55,7 @@ def get_parser(): ) return parser + def eval_replica( scene_id: str, scene_id_: str, @@ -86,8 +88,8 @@ def eval_replica( assert gt_class.min() >= 0 assert gt_class.max() < len(REPLICA_EXISTING_CLASSES) - # transform pred_xyz and gt_xyz according to the first pose in gt_poses - gt_xyz = gt_xyz @ gt_poses[0, :3, :3].t() + gt_poses[0, :3, 3] + # # transform pred_xyz and gt_xyz according to the first pose in gt_poses + # gt_xyz = gt_xyz @ gt_poses[0, :3, :3].t() + gt_poses[0, :3, 3] # Get the set of classes that are used for evaluation all_class_index = np.arange(len(class_names)) @@ -197,6 +199,7 @@ def eval_replica( # Resample the pred_xyz and pred_class based on slam_nn_in_pred pred_xyz = slam_xyz + # pred_xyz = pred_xyz @ gt_poses[0, :3, :3].t() + gt_poses[0, :3, 3] pred_class = pred_class[idx_slam_to_pred.cpu()] pred_color = pred_color[idx_slam_to_pred.cpu()] @@ -207,6 +210,16 @@ def eval_replica( # pred_pcd.colors = o3d.utility.Vector3dVector(class2color[pred_class.numpy()]) # o3d.visualization.draw_geometries([pred_pcd]) + # # GT point cloud in open3d + # print("GT pointcloud") + # gt_pcd = o3d.geometry.PointCloud() + # gt_pcd.points = o3d.utility.Vector3dVector(gt_xyz.numpy()) + # gt_pcd.colors = o3d.utility.Vector3dVector(class2color[gt_class.numpy()]) + # o3d.visualization.draw_geometries([gt_pcd]) + + # print("Merged pointcloud") + # o3d.visualization.draw_geometries([gt_pcd, pred_pcd]) + # Compute the associations between the predicted and ground truth point clouds idx_pred_to_gt, idx_gt_to_pred = compute_pred_gt_associations( pred_xyz.unsqueeze(0).cuda().contiguous().float(), @@ -233,7 +246,8 @@ def eval_replica( assert confmatrix.sum(0)[ignore_index].sum() == 0 assert confmatrix.sum(1)[ignore_index].sum() == 0 - '''Visualization for debugging''' + # '''Visualization for debugging''' + # print('GT point cloud in open3d') # class2color = get_random_colors(len(class_names)) # # GT point cloud in open3d diff --git a/concept-graphs/conceptgraph/utils/eval.py b/concept-graphs/conceptgraph/utils/eval.py index d76abdd..0f615ea 100644 --- a/concept-graphs/conceptgraph/utils/eval.py +++ b/concept-graphs/conceptgraph/utils/eval.py @@ -5,7 +5,8 @@ def compute_pred_gt_associations(pred, gt): # pred: predicted pointcloud # gt: GT pointcloud - from chamferdist.chamfer import knn_points + # from chamferdist.chamfer import knn_points + from pytorch3d.ops.knn import knn_points # pred = pointclouds.points_padded.cuda().contiguous() # gt = pts_gt.unsqueeze(0).cuda().contiguous() From 5bddd3f521c739f53705964612a3af9b2a9afd10 Mon Sep 17 00:00:00 2001 From: "malik.mohrat@gmail.com" Date: Fri, 5 Jul 2024 23:56:30 +0300 Subject: [PATCH 05/24] pipeline run script update: full evaluating pipeline --- export/export_concept_graphs_replica_none.sh | 137 ++++++++++++++----- 1 file changed, 104 insertions(+), 33 deletions(-) diff --git a/export/export_concept_graphs_replica_none.sh b/export/export_concept_graphs_replica_none.sh index 1f17952..065c9ef 100644 --- a/export/export_concept_graphs_replica_none.sh +++ b/export/export_concept_graphs_replica_none.sh @@ -4,42 +4,113 @@ export GSA_PATH=/tmp/Grounded-Segment-Anything export LLAVA_PYTHON_PATH=/tmp/LLaVA export LLAVA_MODEL_PATH=/assets/llm/llava-v1.5-13b export LLAVA_CKPT_PATH=/assets/llm/llava-v1.5-13b -export REPLICA_ROOT=/data/Datasets/Replica -export CG_FOLDER=/opt/src -export REPLICA_CONFIG_PATH=${CG_FOLDER}/conceptgraph/dataset/dataconfigs/replica/replica.yaml export OPENAI_API_KEY="" + +export DATASET_ROOT=/data/Datasets/Replica +# export SEMANTIC_DATASET_ROOT=/data/Datasets/replica_semantic_gt +export SEMANTIC_DATASET_ROOT=/data/Datasets/Replica_nerf + +export CG_FOLDER=/opt/src/conceptgraph/ +export DATASET_CONFIG_PATH=${CG_FOLDER}/dataset/dataconfigs/replica/replica.yaml +export SEMANTIC_DATASET_CONFIG_PATH=${CG_FOLDER}/dataset/dataconfigs/replica/replica_semantic.yaml + export SCENE_NAMES=room1 export SCENE_NAME=room1 export CLASS_SET=none export THRESHOLD=1.2 -python ${CG_FOLDER}/conceptgraph/scripts/generate_gsa_results.py \ - --dataset_root $REPLICA_ROOT \ - --dataset_config $REPLICA_CONFIG_PATH \ - --scene_id $SCENE_NAME \ - --class_set $CLASS_SET \ - --stride 5 - -python ${CG_FOLDER}/conceptgraph/slam/cfslam_pipeline_batch.py \ - dataset_root=$REPLICA_ROOT \ - dataset_config=$REPLICA_CONFIG_PATH \ - stride=5 \ - scene_id=$SCENE_NAME \ - spatial_sim_type=overlap \ - mask_conf_threshold=0.95 \ - match_method=sim_sum \ - sim_threshold=${THRESHOLD} \ - dbscan_eps=0.1 \ - gsa_variant=none \ - class_agnostic=True \ - skip_bg=True \ - max_bbox_area_ratio=0.5 \ - save_suffix=overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub \ - merge_interval=20 \ - merge_visual_sim_thresh=0.8 \ - merge_text_sim_thresh=0.8 \ - save_objects_all_frames=True - - -# python ${CG_FOLDER}/conceptgraph/scripts/animate_mapping_interactive.py --input_folder $REPLICA_ROOT/$SCENE_NAME/objects_all_frames/ -python ${CG_FOLDER}/conceptgraph/scripts/visualize_cfslam_results.py --result_path ${REPLICA_ROOT}/${SCENE_NAME}/pcd_saves/full_pcd_none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub_post.pkl.gz + +# ###### +# # ConceptGraphs uses SAM in the "segment all" mode and extract class-agnostic masks +# ###### +# python ${CG_FOLDER}/scripts/generate_gsa_results.py \ +# --dataset_root ${DATASET_ROOT} \ +# --dataset_config ${DATASET_CONFIG_PATH} \ +# --scene_id ${SCENE_NAME} \ +# --class_set ${CLASS_SET} \ +# --stride 5 + + +# ###### +# # The following command builds an object-based 3D map of the scene, using the image segmentation results from above. +# ###### +# python ${CG_FOLDER}/slam/cfslam_pipeline_batch.py \ +# dataset_root=${DATASET_ROOT} \ +# dataset_config=${DATASET_CONFIG_PATH} \ +# stride=5 \ +# scene_id=${SCENE_NAME} \ +# spatial_sim_type=overlap \ +# mask_conf_threshold=0.95 \ +# match_method=sim_sum \ +# sim_threshold=${THRESHOLD} \ +# dbscan_eps=0.1 \ +# gsa_variant=none \ +# class_agnostic=True \ +# skip_bg=True \ +# max_bbox_area_ratio=0.5 \ +# save_suffix=overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub \ +# merge_interval=20 \ +# merge_visual_sim_thresh=0.8 \ +# merge_text_sim_thresh=0.8 \ +# save_objects_all_frames=True + + +# ###### +# # If save_objects_all_frames=True was used to save the mapping results at every frame, which can be used for animated visualization by: +# ###### +# python ${CG_FOLDER}/scripts/animate_mapping_interactive.py \ +# --input_folder ${DATASET_ROOT}/${SCENE_NAME}/objects_all_frames/none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub + + +# ###### +# # If save_objects_all_frames=True was used to save the mapping results at every frame, which can be used for animated visualization by: +# ###### +# python ${CG_FOLDER}/scripts/animate_mapping_save.py \ +# --input_folder ${DATASET_ROOT}/${SCENE_NAME}/objects_all_frames/none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub + + +# ###### +# # Visualize the object-based mapping results. You can use keys b, c, r, f, i. +# ###### +# python ${CG_FOLDER}/scripts/visualize_cfslam_results.py \ +# --result_path ${DATASET_ROOT}/${SCENE_NAME}/pcd_saves/full_pcd_none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub.pkl.gz + + +# ###### +# # The following command runs a 3D RGB reconstruction (GradSLAM) of a scene and also visualize it. +# # This is useful for sanity check and evaluation. +# ###### +# python ${CG_FOLDER}/scripts/run_slam_rgb.py \ +# --dataset_root ${DATASET_ROOT} \ +# --dataset_config ${DATASET_CONFIG_PATH} \ +# --scene_id ${SCENE_NAME} \ +# --image_height 680 \ +# --image_width 1200 \ +# --stride 5 \ +# --visualize + + +# # ##### +# # The following command runs a 3D RGB reconstruction (GradSLAM) of a scene with semantic labels and also visualize it. +# # This is useful for evaluation. +# # ##### +# python ${CG_FOLDER}/scripts/run_slam_rgb.py \ +# --dataset_root ${SEMANTIC_DATASET_ROOT} \ +# --dataset_config ${SEMANTIC_DATASET_CONFIG_PATH} \ +# --scene_id 'room_1' \ +# --image_height 480 \ +# --image_width 640 \ +# --stride 15 \ +# --visualize \ +# --load_semseg + + +# ###### +# # Then run the following command to evaluate the semantic segmentation results. +# ###### +# python ${CG_FOLDER}/scripts/eval_replica_semseg.py \ +# --replica_root ${DATASET_ROOT} \ +# --replica_semantic_root ${SEMANTIC_DATASET_ROOT} \ +# --n_exclude 6 \ +# --pred_exp_name none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub # \ +# # --device "cpu" \ No newline at end of file From 4b821c157d5ef1bc080d057dc8b1ad8ca8a48eb2 Mon Sep 17 00:00:00 2001 From: Asus ROG Be2R Date: Sat, 20 Jul 2024 00:12:44 +0300 Subject: [PATCH 06/24] replica cad dataset support was added --- .../dataconfigs/replica/replica_cad.yaml | 33 ++++ .../replica/replica_cad_semantic.yaml | 33 ++++ .../dataconfigs/replica/replica_semantic.yaml | 33 ++++ .../conceptgraph/dataset/datasets_common.py | 75 ++++++++- .../conceptgraph/scripts/run_slam_rgb.py | 2 +- .../export_concept_graphs_replica_cad_none.sh | 146 +++++++++++++----- 6 files changed, 283 insertions(+), 39 deletions(-) create mode 100644 concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_cad.yaml create mode 100644 concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_cad_semantic.yaml create mode 100644 concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_semantic.yaml diff --git a/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_cad.yaml b/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_cad.yaml new file mode 100644 index 0000000..6b4c613 --- /dev/null +++ b/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_cad.yaml @@ -0,0 +1,33 @@ +dataset_name: 'replica_cad' +meshing: + eval_rec: True +tracking: + vis_freq: 50 + vis_inside_freq: 25 + ignore_edge_W: 100 + ignore_edge_H: 100 + seperate_LR: False + const_speed_assumption: True + lr: 0.001 + pixels: 200 + iters: 10 +mapping: + every_frame: 5 + vis_freq: 50 + vis_inside_freq: 30 + mesh_freq: 50 + ckpt_freq: 500 + keyframe_every: 50 + mapping_window_size: 5 + pixels: 1000 + iters_first: 1500 + iters: 60 +camera_params: + image_height: 680 + image_width: 1200 + fx: 600.0 + fy: 600.0 + cx: 599.5 + cy: 339.5 + png_depth_scale: 6553.5 #for depth image in png format + crop_edge: 0 \ No newline at end of file diff --git a/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_cad_semantic.yaml b/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_cad_semantic.yaml new file mode 100644 index 0000000..ce2a3f7 --- /dev/null +++ b/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_cad_semantic.yaml @@ -0,0 +1,33 @@ +dataset_name: 'semantic_replica' +meshing: + eval_rec: True +tracking: + vis_freq: 50 + vis_inside_freq: 25 + ignore_edge_W: 100 + ignore_edge_H: 100 + seperate_LR: False + const_speed_assumption: True + lr: 0.001 + pixels: 200 + iters: 10 +mapping: + every_frame: 5 + vis_freq: 50 + vis_inside_freq: 30 + mesh_freq: 50 + ckpt_freq: 500 + keyframe_every: 50 + mapping_window_size: 5 + pixels: 1000 + iters_first: 1500 + iters: 60 +camera_params: + image_height: 480 + image_width: 640 + fx: 320.0 + fy: 320.0 + cx: 319.5 + cy: 239.5 + png_depth_scale: 1000 # for depth image in png format + crop_edge: 0 \ No newline at end of file diff --git a/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_semantic.yaml b/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_semantic.yaml new file mode 100644 index 0000000..ce2a3f7 --- /dev/null +++ b/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_semantic.yaml @@ -0,0 +1,33 @@ +dataset_name: 'semantic_replica' +meshing: + eval_rec: True +tracking: + vis_freq: 50 + vis_inside_freq: 25 + ignore_edge_W: 100 + ignore_edge_H: 100 + seperate_LR: False + const_speed_assumption: True + lr: 0.001 + pixels: 200 + iters: 10 +mapping: + every_frame: 5 + vis_freq: 50 + vis_inside_freq: 30 + mesh_freq: 50 + ckpt_freq: 500 + keyframe_every: 50 + mapping_window_size: 5 + pixels: 1000 + iters_first: 1500 + iters: 60 +camera_params: + image_height: 480 + image_width: 640 + fx: 320.0 + fy: 320.0 + cx: 319.5 + cy: 239.5 + png_depth_scale: 1000 # for depth image in png format + crop_edge: 0 \ No newline at end of file diff --git a/concept-graphs/conceptgraph/dataset/datasets_common.py b/concept-graphs/conceptgraph/dataset/datasets_common.py index fd28b6e..9229173 100644 --- a/concept-graphs/conceptgraph/dataset/datasets_common.py +++ b/concept-graphs/conceptgraph/dataset/datasets_common.py @@ -498,7 +498,73 @@ def __init__( desired_height: Optional[int] = 480, desired_width: Optional[int] = 640, load_embeddings: Optional[bool] = False, - embedding_dir: Optional[str] = "embeddings", + embedding_dir: Optional[str] = "semantic_class", + embedding_dim: Optional[int] = 512, + **kwargs, + ): + self.input_folder = os.path.join(basedir, sequence) + self.pose_path = os.path.join(self.input_folder, "Sequence_1/traj_w_c.txt") + super().__init__( + config_dict, + stride=stride, + start=start, + end=end, + desired_height=desired_height, + desired_width=desired_width, + load_embeddings=load_embeddings, + embedding_dir=embedding_dir, + embedding_dim=embedding_dim, + **kwargs, + ) + + def get_filepaths(self): + color_paths = natsorted(glob.glob(f"{self.input_folder}/Sequence_1/rgb/rgb*.png")) + depth_paths = natsorted(glob.glob(f"{self.input_folder}/Sequence_1/depth/depth*.png")) + embedding_paths = None + if self.load_embeddings: + embedding_paths = natsorted( + glob.glob(f"{self.input_folder}/Sequence_1/{self.embedding_dir}/semantic_class*.png") + ) + + return color_paths, depth_paths, embedding_paths + + def load_poses(self): + poses = [] + with open(self.pose_path, "r") as f: + lines = f.readlines() + for i in range(self.num_imgs): + line = lines[i] + # c2w = np.diag([1, -1, -1, 1]) @ np.array(list(map(float, line.split()))).reshape(4, 4) + c2w = np.array(list(map(float, line.split()))).reshape(4, 4) + # c2w[:3, 1] *= -1 + # c2w[:3, 2] *= -1 + c2w = torch.from_numpy(c2w).float() + poses.append(c2w) + return poses + + def read_embedding_from_file(self, embedding_file_path): + with torch.no_grad(): + semantic_image = Image.open(embedding_file_path) + semantic_tensor = pil_to_tensor(semantic_image) + + embedding = F.one_hot(semantic_tensor.long(), num_classes=self.embedding_dim) + + return embedding + + +class ReplicaCADDataset(GradSLAMDataset): + def __init__( + self, + config_dict, + basedir, + sequence, + stride: Optional[int] = None, + start: Optional[int] = 0, + end: Optional[int] = -1, + desired_height: Optional[int] = 480, + desired_width: Optional[int] = 640, + load_embeddings: Optional[bool] = False, + embedding_dir: Optional[str] = "semantic_class", embedding_dim: Optional[int] = 512, **kwargs, ): @@ -523,8 +589,9 @@ def get_filepaths(self): embedding_paths = None if self.load_embeddings: embedding_paths = natsorted( - glob.glob(f"{self.input_folder}/{self.embedding_dir}/*.png") + glob.glob(f"{self.input_folder}/results/semantic*.png") ) + return color_paths, depth_paths, embedding_paths def load_poses(self): @@ -543,7 +610,7 @@ def load_poses(self): def read_embedding_from_file(self, embedding_file_path): with torch.no_grad(): - semantic_image = Image.open(embedding_file_path) + semantic_image = Image.open(embedding_file_path).convert('I') semantic_tensor = pil_to_tensor(semantic_image) embedding = F.one_hot(semantic_tensor.long(), num_classes=self.embedding_dim) @@ -1169,6 +1236,8 @@ def get_dataset(dataconfig, basedir, sequence, **kwargs): return ReplicaDataset(config_dict, basedir, sequence, **kwargs) elif config_dict["dataset_name"].lower() in ["semantic_replica"]: return SemanticReplicaDataset(config_dict, basedir, sequence, **kwargs) + elif config_dict["dataset_name"].lower() in ["replica_cad"]: + return ReplicaCADDataset(config_dict, basedir, sequence, **kwargs) elif config_dict["dataset_name"].lower() in ["azure", "azurekinect"]: return AzureKinectDataset(config_dict, basedir, sequence, **kwargs) elif config_dict["dataset_name"].lower() in ["scannet"]: diff --git a/concept-graphs/conceptgraph/scripts/run_slam_rgb.py b/concept-graphs/conceptgraph/scripts/run_slam_rgb.py index b7797cd..1be1551 100644 --- a/concept-graphs/conceptgraph/scripts/run_slam_rgb.py +++ b/concept-graphs/conceptgraph/scripts/run_slam_rgb.py @@ -67,7 +67,7 @@ def main(args: argparse.Namespace): end=args.end, stride=args.stride, load_embeddings=load_embeddings, - embedding_dir = embedding_dir, + # embedding_dir = embedding_dir, embedding_dim = embedding_dim, relative_pose=False, device=args.device diff --git a/export/export_concept_graphs_replica_cad_none.sh b/export/export_concept_graphs_replica_cad_none.sh index 2a5250e..19af57a 100644 --- a/export/export_concept_graphs_replica_cad_none.sh +++ b/export/export_concept_graphs_replica_cad_none.sh @@ -4,42 +4,118 @@ export GSA_PATH=/tmp/Grounded-Segment-Anything export LLAVA_PYTHON_PATH=/tmp/LLaVA export LLAVA_MODEL_PATH=/assets/llm/llava-v1.5-13b export LLAVA_CKPT_PATH=/assets/llm/llava-v1.5-13b -export REPLICA_ROOT=/data/generated/replica_cad/ -export CG_FOLDER=/opt/src -export REPLICA_CONFIG_PATH=${CG_FOLDER}/conceptgraph/dataset/dataconfigs/replica/replica.yaml export OPENAI_API_KEY="" -export SCENE_NAMES="v3_sc0_staging_00/default_lights_0/" -export SCENE_NAME="v3_sc0_staging_00/default_lights_0/" + +export DATASET_ROOT=/data/Datasets/generated/replica_cad +# export SEMANTIC_DATASET_ROOT=/data/Datasets/replica_semantic_gt +export SEMANTIC_DATASET_ROOT=/data/Datasets/generated/replica_cad_semantic + +export CG_FOLDER=/opt/src/conceptgraph/ +export DATASET_CONFIG_PATH=${CG_FOLDER}/dataset/dataconfigs/replica/replica.yaml +export SEMANTIC_DATASET_CONFIG_PATH=${CG_FOLDER}/dataset/dataconfigs/replica/replica_cad.yaml + +# export SCENE_NAMES=("v3_sc3_staging_03/test") +export SCENE_NAMES=("v3_sc0_staging_00/default_lights_0") +# export SCENE_NAMES=(office2) # room2 +# export SCENE_NAME=room1 export CLASS_SET=none export THRESHOLD=1.2 -python ${CG_FOLDER}/conceptgraph/scripts/generate_gsa_results.py \ - --dataset_root $REPLICA_ROOT \ - --dataset_config $REPLICA_CONFIG_PATH \ - --scene_id $SCENE_NAME \ - --class_set $CLASS_SET \ - --stride 5 - -python ${CG_FOLDER}/conceptgraph/slam/cfslam_pipeline_batch.py \ - dataset_root=$REPLICA_ROOT \ - dataset_config=$REPLICA_CONFIG_PATH \ - stride=5 \ - scene_id=$SCENE_NAME \ - spatial_sim_type=overlap \ - mask_conf_threshold=0.95 \ - match_method=sim_sum \ - sim_threshold=${THRESHOLD} \ - dbscan_eps=0.1 \ - gsa_variant=none \ - class_agnostic=True \ - skip_bg=True \ - max_bbox_area_ratio=0.5 \ - save_suffix=overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub \ - merge_interval=20 \ - merge_visual_sim_thresh=0.8 \ - merge_text_sim_thresh=0.8 \ - save_objects_all_frames=True - - -# python ${CG_FOLDER}/conceptgraph/scripts/animate_mapping_interactive.py --input_folder $REPLICA_ROOT/$SCENE_NAME/objects_all_frames/ -python ${CG_FOLDER}/conceptgraph/scripts/visualize_cfslam_results.py --result_path ${REPLICA_ROOT}/${SCENE_NAME}/pcd_saves/full_pcd_none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub_post.pkl.gz + +for SCENE_NAME in ${SCENE_NAMES[*]} +do + printf "Running scene: %s\n" $SCENE_NAME + + # ###### + # # ConceptGraphs uses SAM in the "segment all" mode and extract class-agnostic masks. + # ###### + # python ${CG_FOLDER}/scripts/generate_gsa_results.py \ + # --dataset_root ${DATASET_ROOT} \ + # --dataset_config ${DATASET_CONFIG_PATH} \ + # --scene_id ${SCENE_NAME} \ + # --class_set ${CLASS_SET} \ + # --stride 5 + + + # # ###### + # # The following command builds an object-based 3D map of the scene, using the image segmentation results from above. + # ###### + # python ${CG_FOLDER}/slam/cfslam_pipeline_batch.py \ + # dataset_root=${DATASET_ROOT} \ + # dataset_config=${DATASET_CONFIG_PATH} \ + # stride=5 \ + # scene_id=${SCENE_NAME} \ + # spatial_sim_type=overlap \ + # mask_conf_threshold=0.95 \ + # match_method=sim_sum \ + # sim_threshold=${THRESHOLD} \ + # dbscan_eps=0.1 \ + # gsa_variant=none \ + # class_agnostic=True \ + # skip_bg=True \ + # max_bbox_area_ratio=0.5 \ + # save_suffix=overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub \ + # merge_interval=20 \ + # merge_visual_sim_thresh=0.8 \ + # merge_text_sim_thresh=0.8 \ + # save_objects_all_frames=True + + + # ###### + # # If save_objects_all_frames=True was used to save the mapping results at every frame, which can be used for animated visualization by: + # ###### + # python ${CG_FOLDER}/scripts/animate_mapping_interactive.py \ + # --input_folder ${DATASET_ROOT}/${SCENE_NAME}/objects_all_frames/none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub + + + # ###### + # # If save_objects_all_frames=True was used to save the mapping results at every frame, which can be used for animated visualization by: + # ###### + # python ${CG_FOLDER}/scripts/animate_mapping_save.py \ + # --input_folder ${DATASET_ROOT}/${SCENE_NAME}/objects_all_frames/none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub + + + # ###### + # # Visualize the object-based mapping results. You can use keys b, c, r, f, i. + # ###### + # python ${CG_FOLDER}/scripts/visualize_cfslam_results.py \ + # --result_path ${DATASET_ROOT}/${SCENE_NAME}/pcd_saves/full_pcd_none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub.pkl.gz + + + # ###### + # # The following command runs a 3D RGB reconstruction (GradSLAM) of a scene and also visualize (--visualize) it. + # # This is useful for sanity check and evaluation. + # ###### + # python ${CG_FOLDER}/scripts/run_slam_rgb.py \ + # --dataset_root ${DATASET_ROOT} \ + # --dataset_config ${DATASET_CONFIG_PATH} \ + # --scene_id ${SCENE_NAME} \ + # --image_height 680 \ + # --image_width 1200 \ + # --stride 5 + + + # ##### + # The following command runs a 3D RGB reconstruction (GradSLAM) of a scene with semantic labels and also visualize (--visualize) it. + # This is useful for evaluation. + # ##### + python ${CG_FOLDER}/scripts/run_slam_rgb.py \ + --dataset_root ${SEMANTIC_DATASET_ROOT} \ + --dataset_config ${SEMANTIC_DATASET_CONFIG_PATH} \ + --scene_id ${SCENE_NAME} \ + --image_height 480 \ + --image_width 640 \ + --stride 15 \ + --load_semseg + +done + +# ###### +# # Then run the following command to evaluate the semantic segmentation results. +# ###### +# python ${CG_FOLDER}/scripts/eval_replica_semseg.py \ +# --replica_root ${DATASET_ROOT} \ +# --replica_semantic_root ${SEMANTIC_DATASET_ROOT} \ +# --n_exclude 6 \ +# --pred_exp_name none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub # \ +# # --device "cpu" \ No newline at end of file From fb62294da48a06123997919e0ad0245757e08a02 Mon Sep 17 00:00:00 2001 From: Asus ROG Be2R Date: Thu, 15 Aug 2024 15:19:25 +0300 Subject: [PATCH 07/24] index embedding in poincloud [update is needed] --- .../conceptgraph/scripts/run_slam_rgb.py | 38 +++++++++++++++++-- 1 file changed, 34 insertions(+), 4 deletions(-) diff --git a/concept-graphs/conceptgraph/scripts/run_slam_rgb.py b/concept-graphs/conceptgraph/scripts/run_slam_rgb.py index 1be1551..1a636bd 100644 --- a/concept-graphs/conceptgraph/scripts/run_slam_rgb.py +++ b/concept-graphs/conceptgraph/scripts/run_slam_rgb.py @@ -74,9 +74,12 @@ def main(args: argparse.Namespace): ) slam = PointFusion( - odom="gt", + odom="gt", + dist_th=1e-10, + sigma=1e+10, dsratio=1, - device=args.device + device=args.device, + # embedding_fusion_method="bayes" ) frame_cur, frame_prev = None, None @@ -84,11 +87,27 @@ def main(args: argparse.Namespace): device=args.device ) + # if load_embeddings: + # unique_classes = torch.tensor([]).to(args.device) + + # for idx in trange(len(dataset)): + # _color, _depth, intrinsics, _pose, _embedding = dataset[idx] + # _embedding = _embedding.argmax(dim=-1) + + # unique_classes = torch.unique(torch.cat((unique_classes, torch.unique(_embedding)))) + + # exist2all = unique_classes.sort()[0] + for idx in trange(len(dataset)): if load_embeddings: _color, _depth, intrinsics, _pose, _embedding = dataset[idx] _embedding = _embedding.unsqueeze(0).half() + _embedding = _embedding.argmax(dim=-1, keepdims=True).half() _confidence = torch.ones_like(_embedding) + + # print(_embedding, _embedding.dtype) + # # print(_embedding.shape, _embedding.dtype) + # print(_embedding.unique()) else: _color, _depth, intrinsics, _pose = dataset[idx] _embedding = None @@ -104,15 +123,26 @@ def main(args: argparse.Namespace): intrinsics.unsqueeze(0).unsqueeze(0), _pose.unsqueeze(0).unsqueeze(0), embeddings = _embedding, - confidence_image = _confidence, + # confidence_image = _confidence, device = args.device ) - pointclouds, _ = slam.step(pointclouds, frame_cur, frame_prev) + pointclouds, _ = slam.step(pointclouds, frame_cur, frame_prev) #, inplace=True) # frame_prev = frame_cur # Keep it None when we use the gt odom + + # print(pointclouds.embeddings_list[0].unique()) torch.cuda.empty_cache() + # print(pointclouds.embeddings_list[0].unique()) + # print(pointclouds.confidences_list[0].unique()) + + # embeddings = pointclouds.embeddings_list[0].round().long() + # print(embeddings, embeddings.shape) + # pointclouds.embeddings_list[0] = torch.nn.functional.one_hot(embeddings, num_classes=embedding_dim).half() + + # print(pointclouds.embeddings_list[0]) + dir_to_save_map = os.path.join(args.dataset_root, args.scene_id, "rgb_cloud") print(f"Saving the map to {dir_to_save_map}") os.makedirs(dir_to_save_map, exist_ok=True) From 2124321f32ef1dd69b77dfb5669dfe17d5659da2 Mon Sep 17 00:00:00 2001 From: Asus ROG Be2R Date: Wed, 11 Sep 2024 11:59:14 +0300 Subject: [PATCH 08/24] Old export bash scripts deleting --- export/export_concept_graphs_gibson_ram.sh | 46 -------------------- export/export_concept_graphs_light_none.sh | 45 -------------------- export/export_concept_graphs_mp3d_none.sh | 45 -------------------- export/export_concept_graphs_mp3d_ram.sh | 45 -------------------- export/export_concept_graphs_replica_ram.sh | 47 --------------------- 5 files changed, 228 deletions(-) delete mode 100644 export/export_concept_graphs_gibson_ram.sh delete mode 100644 export/export_concept_graphs_light_none.sh delete mode 100644 export/export_concept_graphs_mp3d_none.sh delete mode 100644 export/export_concept_graphs_mp3d_ram.sh delete mode 100644 export/export_concept_graphs_replica_ram.sh diff --git a/export/export_concept_graphs_gibson_ram.sh b/export/export_concept_graphs_gibson_ram.sh deleted file mode 100644 index 8c2be96..0000000 --- a/export/export_concept_graphs_gibson_ram.sh +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/bash - -export GSA_PATH=/tmp/Grounded-Segment-Anything -export LLAVA_PYTHON_PATH=/tmp/LLaVA -export LLAVA_MODEL_PATH=/assets/llm/llava-v1.5-13b -export LLAVA_CKPT_PATH=/assets/llm/llava-v1.5-13b -export REPLICA_ROOT=/data/Datasets/Gibson -export CG_FOLDER=/opt/src -export REPLICA_CONFIG_PATH=${CG_FOLDER}/conceptgraph/dataset/dataconfigs/light/light.yaml -export OPENAI_API_KEY="" -export SCENE_NAMES=Adrian -export SCENE_NAME=Adrian -export CLASS_SET=ram -export THRESHOLD=1.2 - -# CLASS_SET=ram -# python ${CG_FOLDER}/conceptgraph/scripts/generate_gsa_results.py \ -# --dataset_root $REPLICA_ROOT \ -# --dataset_config $REPLICA_CONFIG_PATH \ -# --scene_id $SCENE_NAME \ -# --class_set $CLASS_SET \ -# --box_threshold 0.2 \ -# --text_threshold 0.2 \ -# --stride 5 \ -# --add_bg_classes \ -# --accumu_classes \ -# --exp_suffix withbg_allclasses - -# python ${CG_FOLDER}/conceptgraph/slam/cfslam_pipeline_batch.py \ -# dataset_root=$REPLICA_ROOT \ -# dataset_config=$REPLICA_CONFIG_PATH \ -# stride=5 \ -# scene_id=$SCENE_NAME \ -# spatial_sim_type=overlap \ -# mask_conf_threshold=0.25 \ -# match_method=sim_sum \ -# sim_threshold=${THRESHOLD} \ -# dbscan_eps=0.1 \ -# gsa_variant=ram_withbg_allclasses \ -# skip_bg=False \ -# max_bbox_area_ratio=0.5 \ -# save_suffix=overlap_maskconf0.25_simsum${THRESHOLD}_dbscan.1 \ -# save_objects_all_frames=True - - -python ${CG_FOLDER}/conceptgraph/scripts/visualize_cfslam_results.py --result_path ${REPLICA_ROOT}/${SCENE_NAME}/pcd_saves/full_pcd_ram_withbg_allclasses_overlap_maskconf0.25_simsum${THRESHOLD}_dbscan.1_post.pkl.gz diff --git a/export/export_concept_graphs_light_none.sh b/export/export_concept_graphs_light_none.sh deleted file mode 100644 index 69c2d65..0000000 --- a/export/export_concept_graphs_light_none.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash - -export GSA_PATH=/tmp/Grounded-Segment-Anything -export LLAVA_PYTHON_PATH=/tmp/LLaVA -export LLAVA_MODEL_PATH=/assets/llm/llava-v1.5-13b -export LLAVA_CKPT_PATH=/assets/llm/llava-v1.5-13b -export REPLICA_ROOT=/data/Datasets/Light -export CG_FOLDER=/opt/src -export REPLICA_CONFIG_PATH=${CG_FOLDER}/conceptgraph/dataset/dataconfigs/light/light.yaml -export OPENAI_API_KEY="" -export SCENE_NAMES=higher_freq_small -export SCENE_NAME=higher_freq_small -export CLASS_SET=none -export THRESHOLD=1.2 - -python ${CG_FOLDER}/conceptgraph/scripts/generate_gsa_results.py \ - --dataset_root $REPLICA_ROOT \ - --dataset_config $REPLICA_CONFIG_PATH \ - --scene_id $SCENE_NAME \ - --class_set $CLASS_SET \ - --stride 5 - -python ${CG_FOLDER}/conceptgraph/slam/cfslam_pipeline_batch.py \ - dataset_root=$REPLICA_ROOT \ - dataset_config=$REPLICA_CONFIG_PATH \ - stride=5 \ - scene_id=$SCENE_NAME \ - spatial_sim_type=overlap \ - mask_conf_threshold=0.95 \ - match_method=sim_sum \ - sim_threshold=${THRESHOLD} \ - dbscan_eps=0.1 \ - gsa_variant=none \ - class_agnostic=True \ - skip_bg=True \ - max_bbox_area_ratio=0.5 \ - save_suffix=overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub \ - merge_interval=20 \ - merge_visual_sim_thresh=0.8 \ - merge_text_sim_thresh=0.8 \ - save_objects_all_frames=True - - -# python ${CG_FOLDER}/conceptgraph/scripts/animate_mapping_interactive.py --input_folder $REPLICA_ROOT/$SCENE_NAME/objects_all_frames/ -python ${CG_FOLDER}/conceptgraph/scripts/visualize_cfslam_results.py --result_path ${REPLICA_ROOT}/${SCENE_NAME}/pcd_saves/full_pcd_none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub_post.pkl.gz diff --git a/export/export_concept_graphs_mp3d_none.sh b/export/export_concept_graphs_mp3d_none.sh deleted file mode 100644 index 5a37c6d..0000000 --- a/export/export_concept_graphs_mp3d_none.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash - -export GSA_PATH=/tmp/Grounded-Segment-Anything -export LLAVA_PYTHON_PATH=/tmp/LLaVA -export LLAVA_MODEL_PATH=/assets/llm/llava-v1.5-13b -export LLAVA_CKPT_PATH=/assets/llm/llava-v1.5-13b -export REPLICA_ROOT=/data/generated/hm3d_minival/ -export CG_FOLDER=/opt/src -export REPLICA_CONFIG_PATH=${CG_FOLDER}/conceptgraph/dataset/dataconfigs/light/light.yaml -export OPENAI_API_KEY="" -export SCENE_NAMES="00800-TEEsavR23oF" -export SCENE_NAME="00800-TEEsavR23oF" -export CLASS_SET=none -export THRESHOLD=1.2 - -python ${CG_FOLDER}/conceptgraph/scripts/generate_gsa_results.py \ - --dataset_root $REPLICA_ROOT \ - --dataset_config $REPLICA_CONFIG_PATH \ - --scene_id $SCENE_NAME \ - --class_set $CLASS_SET \ - --stride 5 - -python ${CG_FOLDER}/conceptgraph/slam/cfslam_pipeline_batch.py \ - dataset_root=$REPLICA_ROOT \ - dataset_config=$REPLICA_CONFIG_PATH \ - stride=5 \ - scene_id=$SCENE_NAME \ - spatial_sim_type=overlap \ - mask_conf_threshold=0.95 \ - match_method=sim_sum \ - sim_threshold=${THRESHOLD} \ - dbscan_eps=0.1 \ - gsa_variant=none \ - class_agnostic=True \ - skip_bg=True \ - max_bbox_area_ratio=0.5 \ - save_suffix=overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub \ - merge_interval=20 \ - merge_visual_sim_thresh=0.8 \ - merge_text_sim_thresh=0.8 \ - save_objects_all_frames=True - - -# python ${CG_FOLDER}/conceptgraph/scripts/animate_mapping_interactive.py --input_folder $REPLICA_ROOT/$SCENE_NAME/objects_all_frames/ -python ${CG_FOLDER}/conceptgraph/scripts/visualize_cfslam_results.py --result_path ${REPLICA_ROOT}/${SCENE_NAME}/pcd_saves/full_pcd_none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub_post.pkl.gz diff --git a/export/export_concept_graphs_mp3d_ram.sh b/export/export_concept_graphs_mp3d_ram.sh deleted file mode 100644 index 657a879..0000000 --- a/export/export_concept_graphs_mp3d_ram.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash - -export GSA_PATH=/tmp/Grounded-Segment-Anything -export LLAVA_PYTHON_PATH=/tmp/LLaVA -export LLAVA_MODEL_PATH=/assets/llm/llava-v1.5-13b -export LLAVA_CKPT_PATH=/assets/llm/llava-v1.5-13b -export REPLICA_ROOT=/data/generated/hm3d_minival/ -export CG_FOLDER=/opt/src -export REPLICA_CONFIG_PATH=${CG_FOLDER}/conceptgraph/dataset/dataconfigs/light/light.yaml -export OPENAI_API_KEY="" -export SCENE_NAMES="00800-TEEsavR23oF" -export SCENE_NAME="00800-TEEsavR23oF" -export CLASS_SET=ram -export THRESHOLD=1.2 - -python ${CG_FOLDER}/conceptgraph/scripts/generate_gsa_results.py \ - --dataset_root $REPLICA_ROOT \ - --dataset_config $REPLICA_CONFIG_PATH \ - --scene_id $SCENE_NAME \ - --class_set $CLASS_SET \ - --stride 5 - -python ${CG_FOLDER}/conceptgraph/slam/cfslam_pipeline_batch.py \ - dataset_root=$REPLICA_ROOT \ - dataset_config=$REPLICA_CONFIG_PATH \ - stride=5 \ - scene_id=$SCENE_NAME \ - spatial_sim_type=overlap \ - mask_conf_threshold=0.95 \ - match_method=sim_sum \ - sim_threshold=${THRESHOLD} \ - dbscan_eps=0.1 \ - gsa_variant=none \ - class_agnostic=True \ - skip_bg=True \ - max_bbox_area_ratio=0.5 \ - save_suffix=overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub \ - merge_interval=20 \ - merge_visual_sim_thresh=0.8 \ - merge_text_sim_thresh=0.8 \ - save_objects_all_frames=True - - -# python ${CG_FOLDER}/conceptgraph/scripts/animate_mapping_interactive.py --input_folder $REPLICA_ROOT/$SCENE_NAME/objects_all_frames/ -python ${CG_FOLDER}/conceptgraph/scripts/visualize_cfslam_results.py --result_path ${REPLICA_ROOT}/${SCENE_NAME}/pcd_saves/full_pcd_none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub_post.pkl.gz diff --git a/export/export_concept_graphs_replica_ram.sh b/export/export_concept_graphs_replica_ram.sh deleted file mode 100644 index ea79e54..0000000 --- a/export/export_concept_graphs_replica_ram.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash - -export GSA_PATH=/tmp/Grounded-Segment-Anything -export LLAVA_PYTHON_PATH=/tmp/LLaVA -export LLAVA_MODEL_PATH=/assets/llm/llava-v1.5-13b -export LLAVA_CKPT_PATH=/assets/llm/llava-v1.5-13b -export REPLICA_ROOT=/data/Datasets/Replica -export CG_FOLDER=/opt/src -export REPLICA_CONFIG_PATH=${CG_FOLDER}/conceptgraph/dataset/dataconfigs/replica/replica.yaml -export OPENAI_API_KEY="" -export SCENE_NAMES=room2 -export SCENE_NAME=room2 -export CLASS_SET=ram -export THRESHOLD=1.2 - -CLASS_SET=ram -python ${CG_FOLDER}/conceptgraph/scripts/generate_gsa_results.py \ - --dataset_root $REPLICA_ROOT \ - --dataset_config $REPLICA_CONFIG_PATH \ - --scene_id $SCENE_NAME \ - --class_set $CLASS_SET \ - --box_threshold 0.2 \ - --text_threshold 0.2 \ - --stride 5 \ - --add_bg_classes \ - --accumu_classes \ - --exp_suffix withbg_allclasses - -python ${CG_FOLDER}/conceptgraph/slam/cfslam_pipeline_batch.py \ - dataset_root=$REPLICA_ROOT \ - dataset_config=$REPLICA_CONFIG_PATH \ - stride=5 \ - scene_id=$SCENE_NAME \ - spatial_sim_type=overlap \ - mask_conf_threshold=0.25 \ - match_method=sim_sum \ - sim_threshold=${THRESHOLD} \ - dbscan_eps=0.1 \ - gsa_variant=ram_withbg_allclasses \ - skip_bg=False \ - max_bbox_area_ratio=0.5 \ - save_suffix=overlap_maskconf0.25_simsum${THRESHOLD}_dbscan.1 \ - save_objects_all_frames=True - - -# python ${CG_FOLDER}/conceptgraph/scripts/animate_mapping_interactive.py --input_folder $REPLICA_ROOT/$SCENE_NAME/objects_all_frames/ -# python ${CG_FOLDER}/conceptgraph/scripts/visualize_cfslam_results.py --result_path ${REPLICA_ROOT}/${SCENE_NAME}/pcd_saves/full_pcd_none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub_post.pkl.gz From 1c2de349c94a9f591c681a9e89e22d3e258c9404 Mon Sep 17 00:00:00 2001 From: Asus ROG Be2R Date: Wed, 11 Sep 2024 12:03:51 +0300 Subject: [PATCH 09/24] dataconfigs update --- .../dataset/dataconfigs/replica/replica.yaml | 11 +---------- .../dataset/dataconfigs/replica/replica_cad.yaml | 11 +---------- .../dataconfigs/replica/replica_cad_semantic.yaml | 13 ++----------- 3 files changed, 4 insertions(+), 31 deletions(-) diff --git a/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica.yaml b/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica.yaml index 9c53875..ed36832 100755 --- a/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica.yaml +++ b/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica.yaml @@ -21,13 +21,4 @@ mapping: mapping_window_size: 5 pixels: 1000 iters_first: 1500 - iters: 60 -camera_params: - image_height: 680 - image_width: 1200 - fx: 600.0 - fy: 600.0 - cx: 599.5 - cy: 339.5 - png_depth_scale: 6553.5 #for depth image in png format - crop_edge: 0 \ No newline at end of file + iters: 60 \ No newline at end of file diff --git a/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_cad.yaml b/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_cad.yaml index 6b4c613..5fdbb3d 100644 --- a/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_cad.yaml +++ b/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_cad.yaml @@ -21,13 +21,4 @@ mapping: mapping_window_size: 5 pixels: 1000 iters_first: 1500 - iters: 60 -camera_params: - image_height: 680 - image_width: 1200 - fx: 600.0 - fy: 600.0 - cx: 599.5 - cy: 339.5 - png_depth_scale: 6553.5 #for depth image in png format - crop_edge: 0 \ No newline at end of file + iters: 60 \ No newline at end of file diff --git a/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_cad_semantic.yaml b/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_cad_semantic.yaml index ce2a3f7..5fdbb3d 100644 --- a/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_cad_semantic.yaml +++ b/concept-graphs/conceptgraph/dataset/dataconfigs/replica/replica_cad_semantic.yaml @@ -1,4 +1,4 @@ -dataset_name: 'semantic_replica' +dataset_name: 'replica_cad' meshing: eval_rec: True tracking: @@ -21,13 +21,4 @@ mapping: mapping_window_size: 5 pixels: 1000 iters_first: 1500 - iters: 60 -camera_params: - image_height: 480 - image_width: 640 - fx: 320.0 - fy: 320.0 - cx: 319.5 - cy: 239.5 - png_depth_scale: 1000 # for depth image in png format - crop_edge: 0 \ No newline at end of file + iters: 60 \ No newline at end of file From cbab26cafd4ee5f1a1830344ce0adec071758ef7 Mon Sep 17 00:00:00 2001 From: Asus ROG Be2R Date: Wed, 11 Sep 2024 13:29:49 +0300 Subject: [PATCH 10/24] adding adaptors --- adaptors/eval_semseg.py | 417 ++++++++++++++++++++++++++++++++++++++++ adaptors/run_slam.py | 299 ++++++++++++++++++++++++++++ 2 files changed, 716 insertions(+) create mode 100644 adaptors/eval_semseg.py create mode 100644 adaptors/run_slam.py diff --git a/adaptors/eval_semseg.py b/adaptors/eval_semseg.py new file mode 100644 index 0000000..05d5b41 --- /dev/null +++ b/adaptors/eval_semseg.py @@ -0,0 +1,417 @@ +import gzip +import os +import glob +from pathlib import Path +import argparse +import pickle +import json +from tqdm import tqdm + +import numpy as np +import open3d as o3d +import pandas as pd + +import torch + +import open_clip + +# from chamferdist.chamfer import knn_points +from pytorch3d.ops.knn import knn_points +from gradslam.structures.pointclouds import Pointclouds + +# from conceptgraph.dataset.replica_constants import ( +# # REPLICA_EXISTING_CLASSES, +# # REPLICA_CLASSES, +# # REPLICA_SCENE_IDS, +# # REPLICA_SCENE_IDS_, +# ) +from conceptgraph.slam.slam_classes import MapObjectList +from conceptgraph.utils.vis import get_random_colors +from conceptgraph.utils.eval import compute_confmatrix, compute_pred_gt_associations, compute_metrics + + +def get_parser(): + parser = argparse.ArgumentParser() + parser.add_argument( + "--replica_root", type=Path, default=Path("~/rdata/Replica/").expanduser() + ) + parser.add_argument( + "--replica_semantic_root", + type=Path, + default=Path("~/rdata/Replica-semantic/").expanduser() + ) + parser.add_argument( + "--pred_exp_name", + type=str, + default="ram_withbg_allclasses_overlap_maskconf0.25_simsum1.2_dbscan.1_masksub", + help="The name of cfslam experiment. Will be used to load the result. " + ) + parser.add_argument( + "--label", + type=str, + default="replica" + ) + parser.add_argument( + "--n_exclude", type=int, default=1, choices=[1, 4, 6], + help='''Number of classes to exclude: + 1: exclude "other" + 4: exclude "other", "floor", "wall", "ceiling" + 6: exclude "other", "floor", "wall", "ceiling", "door", "window" + ''', + ) + parser.add_argument( + "--device", type=str, default="cuda:0" + ) + parser.add_argument( + "--scene_ids_str", type=str + ) + parser.add_argument( + "--semseg_classes_path", type=str, default=None + ) + return parser + + +def eval_replica( + scene_id: str, + # scene_id_: str, + class_names: list[str], + class_feats: torch.Tensor, + args: argparse.Namespace, + # class_all2existing: torch.Tensor, + ignore_index=[], + gt_class_only: bool = True, # only compute the conf matrix for the GT classes +): + class2color = get_random_colors(len(class_names)) + + '''Load the GT point cloud''' + gt_pc_path = os.path.join( + args.replica_semantic_root, scene_id, "rgb_cloud" + # args.replica_semantic_root, scene_id_, "Sequence_1", "saved-maps-gt" + ) + gt_pose_path = os.path.join( + # args.replica_semantic_root, scene_id, "Sequence_1", "traj_w_c.txt" + args.replica_semantic_root, scene_id, "traj.txt" + ) + + gt_map = o3d.io.read_point_cloud(os.path.join(gt_pc_path, "semantic.pcd")) + gt_poses = np.loadtxt(gt_pose_path) + gt_poses = torch.from_numpy(gt_poses.reshape(-1, 4, 4)).float() + + gt_xyz = torch.tensor(np.asarray(gt_map.points)) + # gt_color = gt_map.colors_padded[0] + gt_class_np = (np.asarray(gt_map.colors)[..., 0] * 255).round() + gt_class = torch.tensor(gt_class_np, dtype=torch.int) # (N,) + # gt_class = class_all2existing[gt_class] # (N,) + assert gt_class.min() >= 0 + # assert gt_class.max() < len(REPLICA_EXISTING_CLASSES) + assert gt_class.max() < len(class_names) + + print(gt_class, gt_class.shape, gt_class.dtype, gt_class) + + # # transform pred_xyz and gt_xyz according to the first pose in gt_poses + # gt_xyz = gt_xyz @ gt_poses[0, :3, :3].t() + gt_poses[0, :3, 3] + + # Get the set of classes that are used for evaluation + all_class_index = np.arange(len(class_names)) + ignore_index = np.asarray(ignore_index) + if gt_class_only: + # Only consider the classes that exist in the current scene + existing_index = gt_class.unique().cpu().numpy() + non_existing_index = np.setdiff1d(all_class_index, existing_index) + ignore_index = np.append(ignore_index, non_existing_index) + print( + "Using only the classes that exists in GT of this scene: ", + len(existing_index), + ) + + keep_index = np.setdiff1d(all_class_index, ignore_index) + + print( + f"{len(keep_index)} classes remains. They are: ", + [(i, class_names[i]) for i in keep_index], + ) + + '''Load the predicted point cloud''' + result_paths = glob.glob( + os.path.join( + args.replica_root, scene_id, "pcd_saves", + f"full_pcd_{args.pred_exp_name}*.pkl.gz" + ) + ) + if len(result_paths) == 0: + raise ValueError(f"No result found for {scene_id} with {args.pred_exp_name}") + + # Get the newest result over result_paths + result_paths = sorted(result_paths, key=os.path.getmtime) + result_path = result_paths[-1] + print(f"Loading mapping result from {result_path}") + + with gzip.open(result_path, "rb") as f: + results = pickle.load(f) + + objects = MapObjectList() + objects.load_serializable(results['objects']) + + # Compute the CLIP similarity for the mapped objects and assign class to them + object_feats = objects.get_stacked_values_torch("clip_ft").to(args.device) + object_feats = object_feats / object_feats.norm(dim=-1, keepdim=True) # (num_objects, D) + object_class_sim = object_feats @ class_feats.T # (num_objects, num_classes) + + # suppress the logits to -inf that are not in torch.from_numpy(keep_class_index) + object_class_sim[:, ignore_index] = -1e10 + object_class = object_class_sim.argmax(dim=-1) # (num_objects,) + + if args.n_exclude == 1: + if results['bg_objects'] is None: + print("Warning: no background objects found. This is expected if only SAM is used, but not the detector. ") + else: + # Also add the background objects + bg_objects = MapObjectList() + bg_objects.load_serializable(results['bg_objects']) + + # Assign class to the background objects (hard assignment) + for obj in bg_objects: + cn = obj['class_name'][0] + c = class_names.index(cn.lower()) + object_class = torch.cat([object_class, object_class.new_full([1], c)]) + + objects += bg_objects + + pred_xyz = [] + pred_color = [] + pred_class = [] + for i in range(len(objects)): + obj_pcd = objects[i]['pcd'] + pred_xyz.append(np.asarray(obj_pcd.points)) + pred_color.append(np.asarray(obj_pcd.colors)) + pred_class.append(np.ones(len(obj_pcd.points)) * object_class[i].item()) + + pred_xyz = torch.from_numpy(np.concatenate(pred_xyz, axis=0)) + pred_color = torch.from_numpy(np.concatenate(pred_color, axis=0)) + pred_class = torch.from_numpy(np.concatenate(pred_class, axis=0)).long() + + '''Load the SLAM reconstruction results, to ensure fair comparison''' + slam_path = os.path.join( + args.replica_root, scene_id, "rgb_cloud" + ) + # slam_pointclouds = Pointclouds.load_pointcloud_from_h5(slam_path) + # slam_xyz = slam_pointclouds.points_padded[0] + + slam_pointclouds = o3d.io.read_point_cloud(os.path.join(slam_path, "pointcloud.pcd")) + slam_xyz = torch.tensor(np.asarray(slam_pointclouds.points)) + + # To ensure fair comparison, build the prediction point cloud based on the slam results + # Search for NN of slam_xyz in pred_xyz + slam_nn_in_pred = knn_points( + slam_xyz.unsqueeze(0).cuda().contiguous().float(), + pred_xyz.unsqueeze(0).cuda().contiguous().float(), + lengths1=None, + lengths2=None, + return_nn=True, + return_sorted=True, + K=1, + ) + idx_slam_to_pred = slam_nn_in_pred.idx.squeeze(0).squeeze(-1) + + # # predicted point cloud in open3d + # print("Before resampling") + # pred_pcd = o3d.geometry.PointCloud() + # pred_pcd.points = o3d.utility.Vector3dVector(pred_xyz.numpy()) + # pred_pcd.colors = o3d.utility.Vector3dVector(class2color[pred_class.numpy()]) + # o3d.visualization.draw_geometries([pred_pcd]) + + # Resample the pred_xyz and pred_class based on slam_nn_in_pred + pred_xyz = slam_xyz + # pred_xyz = pred_xyz @ gt_poses[0, :3, :3].t() + gt_poses[0, :3, 3] + pred_class = pred_class[idx_slam_to_pred.cpu()] + pred_color = pred_color[idx_slam_to_pred.cpu()] + + # # predicted point cloud in open3d + # print("After resampling") + # pred_pcd = o3d.geometry.PointCloud() + # pred_pcd.points = o3d.utility.Vector3dVector(pred_xyz.numpy()) + # pred_pcd.colors = o3d.utility.Vector3dVector(class2color[pred_class.numpy()]) + # o3d.visualization.draw_geometries([pred_pcd]) + + # # GT point cloud in open3d + # print("GT pointcloud") + # gt_pcd = o3d.geometry.PointCloud() + # gt_pcd.points = o3d.utility.Vector3dVector(gt_xyz.numpy()) + # gt_pcd.colors = o3d.utility.Vector3dVector(class2color[gt_class.numpy()]) + # o3d.visualization.draw_geometries([gt_pcd]) + + # print("Merged pointcloud") + # o3d.visualization.draw_geometries([gt_pcd, pred_pcd]) + + # Compute the associations between the predicted and ground truth point clouds + idx_pred_to_gt, idx_gt_to_pred = compute_pred_gt_associations( + pred_xyz.unsqueeze(0).cuda().contiguous().float(), + gt_xyz.unsqueeze(0).cuda().contiguous().float(), + ) + + # Only keep the points on the 3D reconstructions that are mapped to + # GT point that is in keep_index + label_gt = gt_class[idx_pred_to_gt.cpu()] + pred_keep_idx = torch.isin(label_gt, torch.from_numpy(keep_index)) + pred_class = pred_class[pred_keep_idx] + idx_pred_to_gt = idx_pred_to_gt[pred_keep_idx] + idx_gt_to_pred = None # not to be used + + # Compute the confusion matrix + confmatrix = compute_confmatrix( + pred_class.cuda(), + gt_class.cuda(), + idx_pred_to_gt, + idx_gt_to_pred, + class_names, + ) + + assert confmatrix.sum(0)[ignore_index].sum() == 0 + assert confmatrix.sum(1)[ignore_index].sum() == 0 + + # '''Visualization for debugging''' + # print('GT point cloud in open3d') + # class2color = get_random_colors(len(class_names)) + + # # GT point cloud in open3d + # gt_pcd = gt_map.open3d(0) + # gt_pcd.transform(gt_poses[0].numpy()) + # gt_pcd.colors = o3d.utility.Vector3dVector(class2color[gt_class]) + + # # predicted point cloud in open3d + # pred_pcd = o3d.geometry.PointCloud() + # pred_pcd.points = o3d.utility.Vector3dVector(pred_xyz.numpy()) + # pred_pcd.colors = o3d.utility.Vector3dVector(class2color[pred_class.numpy()]) + + # o3d.visualization.draw_geometries([pred_pcd]) + # o3d.visualization.draw_geometries([gt_pcd]) + + return confmatrix, keep_index + + +def main(args: argparse.Namespace): + + # map REPLICA_CLASSES to REPLICA_EXISTING_CLASSES + # class_all2existing = torch.ones(len(REPLICA_CLASSES)).long() * -1 + # for i, c in enumerate(REPLICA_EXISTING_CLASSES): + # class_all2existing[c] = i + # class_names = [REPLICA_CLASSES[i] for i in REPLICA_EXISTING_CLASSES] + # class_names = REPLICA_CLASSES + + if args.semseg_classes_path is not None: + semseg_classes = json.load(open( + args.semseg_classes_path, "r" + ))['classes'] + else: + semseg_classes = json.load(open( + args.replica_semantic_root / "embed_semseg_classes.json", "r" + ))['classes'] + + class_ids = [class_param['id'] for class_param in semseg_classes] + assert set(class_ids) == set([i for i in range(len(semseg_classes))]), np.unique(class_ids) + id_name_dict = {class_param['id']: class_param['name'] for class_param in semseg_classes} + class_names = [id_name_dict[i] for i in range(len(semseg_classes))] + + if args.n_exclude == 1: + exclude_class = [class_names.index(c) for c in [ + "other" + ]] + elif args.n_exclude == 4: + exclude_class = [class_names.index(c) for c in [ + "other", "floor", "wall", "ceiling" + ]] + elif args.n_exclude == 6: + exclude_class = [class_names.index(c) for c in [ + "other", "floor", "wall", "ceiling", "door", "window" + ]] + else: + raise ValueError("Invalid n_exclude: %d" % args.n_exclude) + + print("Excluding classes: ", [(i, class_names[i]) for i in exclude_class]) + + # Compute the CLIP embedding for each class + clip_model, _, clip_preprocess = open_clip.create_model_and_transforms("ViT-H-14", "laion2b_s32b_b79k") + clip_model = clip_model.to(args.device) + clip_tokenizer = open_clip.get_tokenizer("ViT-H-14") + prompts = [f"an image of {c}" for c in class_names] + text = clip_tokenizer(prompts) + text = text.to(args.device) + batch_size = 64 + class_feats = [] + for i in range(int(np.ceil(len(text)/ batch_size))): + with torch.no_grad(): + class_feats.append(clip_model.encode_text(text[i*batch_size:(i+1)*batch_size])) + + class_feats = torch.cat(class_feats, dim=0) + class_feats /= class_feats.norm(dim=-1, keepdim=True) # (num_classes, D) + + # scene_ids = REPLICA_SCENE_IDS + scene_ids = list(args.scene_ids_str.split()) + + conf_matrices = {} + conf_matrix_all = 0 + for scene_id in scene_ids: + print("Evaluating on:", scene_id) + conf_matrix, keep_index = eval_replica( + scene_id = scene_id, + # scene_id_ = scene_id_, + class_names = class_names, + class_feats = class_feats, + args = args, + # class_all2existing = class_all2existing, + ignore_index = exclude_class, + ) + + conf_matrix = conf_matrix.detach().cpu() + conf_matrix_all += conf_matrix + + conf_matrices[scene_id] = { + "conf_matrix": conf_matrix, + "keep_index": keep_index, + } + + # Remove the rows and columns that are not in keep_class_index + conf_matrices["all"] = { + "conf_matrix": conf_matrix_all, + "keep_index": conf_matrix_all.sum(axis=1).nonzero().reshape(-1), # Looks like a mistake + # "keep_index": conf_matrix_all.sum(axis=0).nonzero().reshape(-1) + } + + results = [] + for scene_id, res in conf_matrices.items(): + conf_matrix = res["conf_matrix"] + keep_index = res["keep_index"] + conf_matrix = conf_matrix[keep_index, :][:, keep_index] + keep_class_names = [class_names[i] for i in keep_index] + + mdict = compute_metrics(conf_matrix, keep_class_names) + results.append( + { + "scene_id": scene_id, + "miou": mdict["miou"] * 100.0, + "mrecall": np.mean(mdict["recall"]) * 100.0, + "mprecision": np.mean(mdict["precision"]) * 100.0, + "mf1score": np.mean(mdict["f1score"]) * 100.0, + "fmiou": mdict["fmiou"] * 100.0, + } + ) + + df_result = pd.DataFrame(results) + + save_path = "./results/%s/%s_ex%d_results.csv" % ( + args.pred_exp_name, args.label, args.n_exclude + ) + os.makedirs(os.path.dirname(save_path), exist_ok=True) + df_result.to_csv(save_path, index=False) + + # Also save the conf_matrices + save_path = "./results/%s/%s_ex%d_conf_matrices.pkl" % ( + args.pred_exp_name, args.label, args.n_exclude + ) + pickle.dump(conf_matrices, open(save_path, "wb")) + + +if __name__ == '__main__': + parser = get_parser() + args = parser.parse_args() + main(args) \ No newline at end of file diff --git a/adaptors/run_slam.py b/adaptors/run_slam.py new file mode 100644 index 0000000..633738b --- /dev/null +++ b/adaptors/run_slam.py @@ -0,0 +1,299 @@ +import argparse +import json +import yaml +import os +import glob +from pathlib import Path +import numpy as np +import torch +import torch.nn.functional as F +from tqdm import tqdm +from dataclasses import dataclass + +import open3d as o3d + +from conceptgraph.dataset.datasets_common import get_dataset + +from gradslam.slam.pointfusion import PointFusion +from gradslam.structures.pointclouds import Pointclouds +from gradslam.structures.rgbdimages import RGBDImages + +from typing import Dict, List, Optional, Union + + +@dataclass +class Intrinsic: + """Camera intrinsics""" + + def __init__(self, width, height, fx, fy, cx, cy, depth_scale): + self.width = width + self.height = height + self.fx = fx + self.fy = fy + self.cx = cx + self.cy = cy + self.depth_scale = depth_scale + + def __repr__(self): + return f"Intrinsic(\ + width={self.width}, \ + height={self.height}, \ + fx={self.fx}, \ + fy={self.fy}, \ + cx={self.cx}, \ + cy={self.cy}, \ + depth_scale={self.depth_scale}, \ + )" + + +class MappingDataset(torch.utils.data.Dataset): + def __init__( + self, + data_path, + stride: Optional[int] = 1, + start: Optional[int] = 0, + end: Optional[int] = -1, + load_semantics: bool = False, + ): + self._data_path = data_path + self._slice = slice(start, end, stride) + self._load_semantics = load_semantics + + self._rgb_paths = sorted(glob.glob(os.path.join(self._data_path, 'results/frame*.jpg'))) + self._depth_paths = sorted(glob.glob(os.path.join(self._data_path, 'results/depth*.png'))) + + assert len(self._rgb_paths) == len(self._depth_paths) + + if self._load_semantics: + self._semantic_paths = sorted(glob.glob(os.path.join(self._data_path, 'results/semantic*.png'))) + + assert len(self._rgb_paths) == len(self._semantic_paths) + + self._poses = self._load_poses() + + assert len(self._poses) == len(self._rgb_paths) + + self._intrinsics = self._load_intrinsics() + + + def __len__(self): + return len(self._rgb_paths[self._slice]) + + + def __getitem__(self, index): + rgb = o3d.io.read_image(self._rgb_paths[self._slice][index]) + depth = o3d.io.read_image(self._depth_paths[self._slice][index]) + + if self._load_semantics: + semantics = o3d.io.read_image(self._semantic_paths[self._slice][index]) + else: + semantics = None + + pose = self._poses[self._slice][index] + intrinsics = self._intrinsics + + return rgb, depth, semantics, pose, intrinsics + + + def _load_poses(self): + with open(os.path.join(self._data_path, "traj.txt"), "r") as file: + poses = [] + for line in file: + pose = np.fromstring(line, dtype=float, sep=" ") + pose = np.reshape(pose, (4, 4)) + poses.append(pose) + + return poses + + + def _load_intrinsics(self): + yaml_file = os.path.join(self._data_path, "camera_params.yaml") + + with open(yaml_file, "r") as file: + data = yaml.safe_load(file) + + camera_params = data["camera_params"] + + intrinsic = Intrinsic( + width = camera_params["image_width"], + height = camera_params["image_height"], + fx = camera_params["fx"], + fy = camera_params["fy"], + cx = camera_params["cx"], + cy = camera_params["cy"], + depth_scale = camera_params["png_depth_scale"], + ) + + return intrinsic + + +def get_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser() + parser.add_argument( + "--dataset_root", type=Path, required=True, + ) + parser.add_argument( + "--scene_id", type=str, required=True + ) + parser.add_argument( + "--dataset_config", type=str, required=True, + help="This path may need to be changed depending on where you run this script. " + ) + # parser.add_argument("--image_height", type=int, default=480) + # parser.add_argument("--image_width", type=int, default=640) + + parser.add_argument("--start", type=int, default=0) + parser.add_argument("--end", type=int, default=-1) + parser.add_argument("--stride", type=int, default=1) + + parser.add_argument("--downsample_rate", type=int, default=1) + + parser.add_argument("--visualize", action="store_true") + parser.add_argument("--save_pcd", action="store_true", default=True) + parser.add_argument("--save_ply", action="store_true") + parser.add_argument("--save_h5", action="store_true") + + parser.add_argument("--seed", type=int, default=42) + parser.add_argument("--device", type=str, default="cuda") + + parser.add_argument("--load_semseg", action="store_true", + help="Load GT semantic segmentation and run fusion on them.") + + return parser + + +def create_semantic_point_cloud( + rgb, depth, intrinsic, pose, semantics=None +): + # Create RGBD image + rgbd = o3d.geometry.RGBDImage.create_from_color_and_depth( + o3d.geometry.Image(rgb), + o3d.geometry.Image(depth), + depth_scale=intrinsic.depth_scale, + depth_trunc=np.inf, + convert_rgb_to_intensity=False, + ) + + # Create point cloud + color_pcd = o3d.geometry.PointCloud.create_from_rgbd_image( + rgbd, + o3d.camera.PinholeCameraIntrinsic( + width=intrinsic.width, + height=intrinsic.height, + fx=intrinsic.fx, + fy=intrinsic.fy, + cx=intrinsic.cx, + cy=intrinsic.cy, + ), + ) + + color_pcd.transform(pose) + + if semantics is not None: + # Create semantic image + semantic_color = np.repeat(np.asarray(semantics, dtype=np.uint8)[..., None], 3, axis=-1) + + semantic_d = o3d.geometry.RGBDImage.create_from_color_and_depth( + o3d.geometry.Image(semantic_color), + o3d.geometry.Image(depth), + depth_scale=intrinsic.depth_scale, + depth_trunc=np.inf, + convert_rgb_to_intensity=False, + ) + + # Create semantic point cloud + semantic_pcd = o3d.geometry.PointCloud.create_from_rgbd_image( + semantic_d, + o3d.camera.PinholeCameraIntrinsic( + width = intrinsic.width, + height = intrinsic.height, + fx = intrinsic.fx, + fy = intrinsic.fy, + cx = intrinsic.cx, + cy = intrinsic.cy, + ), + ) + + semantic_pcd.transform(pose) + else: + semantic_pcd = None + + return color_pcd, semantic_pcd + + +def main(args: argparse.Namespace): + dataset = MappingDataset( + data_path = os.path.join(args.dataset_root, args.scene_id), + stride = args.stride, + start = args.start, + end = args.end, + load_semantics = args.load_semseg + ) + + color_map = o3d.geometry.PointCloud() + semantic_map = o3d.geometry.PointCloud() if args.load_semseg else None + + for (rgb, depth, semantics, pose, intrinsics) in tqdm(dataset): + color_pcd, semantic_pcd = create_semantic_point_cloud( + rgb, depth, intrinsics, pose, semantics + ) + + color_map += color_pcd + + if semantic_map is not None: + semantic_map += semantic_pcd + + color_map = color_map.uniform_down_sample(every_k_points=args.downsample_rate) + + if semantic_map is not None: + semantic_map = semantic_map.uniform_down_sample(every_k_points=args.downsample_rate) + + if args.visualize: + o3d.visualization.draw_geometries([color_map]) + + if semantic_map is not None: + o3d.visualization.draw_geometries([semantic_map]) + + dir_to_save_map = os.path.join(args.dataset_root, args.scene_id, "rgb_cloud") + + if args.save_pcd or args.save_ply: + try: + os.makedirs(dir_to_save_map, exist_ok=False) + except Exception as _: + pass + + if args.save_pcd: + print(f'Saving .pcd files to "{dir_to_save_map}"') + + o3d.io.write_point_cloud( + os.path.join(dir_to_save_map, "pointcloud.pcd"), + color_map + ) + + if semantic_map is not None: + o3d.io.write_point_cloud( + os.path.join(dir_to_save_map, "semantic.pcd"), + semantic_map + ) + + if args.save_ply: + print(f'Saving .ply files to "{dir_to_save_map}"') + + o3d.io.write_point_cloud( + os.path.join(dir_to_save_map, "pointcloud.ply"), + color_map + ) + + if semantic_map is not None: + o3d.io.write_point_cloud( + os.path.join(dir_to_save_map, "semantic.ply"), + semantic_map + ) + + +if __name__ == "__main__": + args = get_parser().parse_args() + torch.manual_seed(args.seed) + np.random.seed(args.seed) + + main(args) \ No newline at end of file From 880a056bd5cb2da12fb16ff1f13bc7924cde71a0 Mon Sep 17 00:00:00 2001 From: Asus ROG Be2R Date: Wed, 11 Sep 2024 13:30:41 +0300 Subject: [PATCH 11/24] gitignore update --- .gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index a6fb039..610f1df 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,5 @@ Readme_files/* concept-graphs/yolov8l-world.pt concept-graphs/outputs/* assets.zip -.venv \ No newline at end of file +.venv +.ipynb_checkpoints \ No newline at end of file From d427a84e3b3fa8d5b339fb9b9c1cbd20a7e7ec57 Mon Sep 17 00:00:00 2001 From: Asus ROG Be2R Date: Wed, 11 Sep 2024 13:35:26 +0300 Subject: [PATCH 12/24] export scripts updating --- .../export_concept_graphs_replica_cad_none.sh | 292 ++++++++++++------ export/export_concept_graphs_replica_none.sh | 191 ++++++------ 2 files changed, 301 insertions(+), 182 deletions(-) diff --git a/export/export_concept_graphs_replica_cad_none.sh b/export/export_concept_graphs_replica_cad_none.sh index 19af57a..371e722 100644 --- a/export/export_concept_graphs_replica_cad_none.sh +++ b/export/export_concept_graphs_replica_cad_none.sh @@ -8,114 +8,218 @@ export OPENAI_API_KEY="" export DATASET_ROOT=/data/Datasets/generated/replica_cad # export SEMANTIC_DATASET_ROOT=/data/Datasets/replica_semantic_gt -export SEMANTIC_DATASET_ROOT=/data/Datasets/generated/replica_cad_semantic - -export CG_FOLDER=/opt/src/conceptgraph/ -export DATASET_CONFIG_PATH=${CG_FOLDER}/dataset/dataconfigs/replica/replica.yaml -export SEMANTIC_DATASET_CONFIG_PATH=${CG_FOLDER}/dataset/dataconfigs/replica/replica_cad.yaml - -# export SCENE_NAMES=("v3_sc3_staging_03/test") -export SCENE_NAMES=("v3_sc0_staging_00/default_lights_0") -# export SCENE_NAMES=(office2) # room2 +# export SEMANTIC_DATASET_ROOT=/data/Datasets/Replica_nerf_full + +export CG_FOLDER=/opt/src/conceptgraph +export DATASET_CONFIG_PATH=${CG_FOLDER}/dataset/dataconfigs/replica/replica_cad.yaml +export SEMANTIC_DATASET_CONFIG_PATH=${CG_FOLDER}/dataset/dataconfigs/replica/replica_cad_semantic.yaml + +# export SCENE_NAMES=(office0 office1 office2 office3 office4 room0 room1) # room2 +export SCENE_NAMES=( +v3_sc0_staging_00 +v3_sc0_staging_12 +v3_sc0_staging_16 +v3_sc0_staging_19 +v3_sc0_staging_20 +v3_sc1_staging_00 +v3_sc1_staging_06 +v3_sc1_staging_12 +v3_sc1_staging_19 +v3_sc1_staging_20 +v3_sc2_staging_00 +v3_sc2_staging_11 +v3_sc2_staging_13 +v3_sc2_staging_19 +v3_sc2_staging_20 +v3_sc3_staging_03 +v3_sc3_staging_04 +v3_sc3_staging_08 +v3_sc3_staging_15 +v3_sc3_staging_20 +) +# v3_sc0_staging_00 +# v3_sc0_staging_12 +# v3_sc0_staging_16 +# v3_sc0_staging_19 +# v3_sc0_staging_20 +# v3_sc1_staging_00 +# v3_sc1_staging_06 +# v3_sc1_staging_12 +# v3_sc1_staging_19 +# v3_sc1_staging_20 +# v3_sc2_staging_00 +# v3_sc2_staging_11 +# v3_sc2_staging_13 +# v3_sc2_staging_19 +# v3_sc2_staging_20 +# v3_sc3_staging_03 +# v3_sc3_staging_04 +# v3_sc3_staging_08 +# v3_sc3_staging_15 +# v3_sc3_staging_20 +# ) +export SCENE_LABELS=(baseline camera_lights dynamic_lights no_lights velocity) # baseline camera_light dynamic_lights FOV no_lights resolution # export SCENE_NAME=room1 export CLASS_SET=none export THRESHOLD=1.2 +mkdir -p "/tmp/config" for SCENE_NAME in ${SCENE_NAMES[*]} do - printf "Running scene: %s\n" $SCENE_NAME - - # ###### - # # ConceptGraphs uses SAM in the "segment all" mode and extract class-agnostic masks. - # ###### - # python ${CG_FOLDER}/scripts/generate_gsa_results.py \ - # --dataset_root ${DATASET_ROOT} \ - # --dataset_config ${DATASET_CONFIG_PATH} \ - # --scene_id ${SCENE_NAME} \ - # --class_set ${CLASS_SET} \ - # --stride 5 - - - # # ###### - # # The following command builds an object-based 3D map of the scene, using the image segmentation results from above. - # ###### - # python ${CG_FOLDER}/slam/cfslam_pipeline_batch.py \ - # dataset_root=${DATASET_ROOT} \ - # dataset_config=${DATASET_CONFIG_PATH} \ - # stride=5 \ - # scene_id=${SCENE_NAME} \ - # spatial_sim_type=overlap \ - # mask_conf_threshold=0.95 \ - # match_method=sim_sum \ - # sim_threshold=${THRESHOLD} \ - # dbscan_eps=0.1 \ - # gsa_variant=none \ - # class_agnostic=True \ - # skip_bg=True \ - # max_bbox_area_ratio=0.5 \ - # save_suffix=overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub \ - # merge_interval=20 \ - # merge_visual_sim_thresh=0.8 \ - # merge_text_sim_thresh=0.8 \ - # save_objects_all_frames=True - - - # ###### - # # If save_objects_all_frames=True was used to save the mapping results at every frame, which can be used for animated visualization by: - # ###### - # python ${CG_FOLDER}/scripts/animate_mapping_interactive.py \ - # --input_folder ${DATASET_ROOT}/${SCENE_NAME}/objects_all_frames/none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub - - - # ###### - # # If save_objects_all_frames=True was used to save the mapping results at every frame, which can be used for animated visualization by: - # ###### - # python ${CG_FOLDER}/scripts/animate_mapping_save.py \ - # --input_folder ${DATASET_ROOT}/${SCENE_NAME}/objects_all_frames/none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub - - - # ###### - # # Visualize the object-based mapping results. You can use keys b, c, r, f, i. - # ###### - # python ${CG_FOLDER}/scripts/visualize_cfslam_results.py \ - # --result_path ${DATASET_ROOT}/${SCENE_NAME}/pcd_saves/full_pcd_none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub.pkl.gz - - - # ###### - # # The following command runs a 3D RGB reconstruction (GradSLAM) of a scene and also visualize (--visualize) it. - # # This is useful for sanity check and evaluation. - # ###### - # python ${CG_FOLDER}/scripts/run_slam_rgb.py \ - # --dataset_root ${DATASET_ROOT} \ - # --dataset_config ${DATASET_CONFIG_PATH} \ - # --scene_id ${SCENE_NAME} \ - # --image_height 680 \ - # --image_width 1200 \ - # --stride 5 - + for SCENE_LABEL in ${SCENE_LABELS[*]} + do + printf "Running scene: %s\n" "$SCENE_NAME/${SCENE_LABEL}" + + cp ${DATASET_CONFIG_PATH} "/tmp/config/data_config.yaml" + echo "" >> "/tmp/config/data_config.yaml" + cat "${DATASET_ROOT}/${SCENE_LABEL}/${SCENE_NAME}/camera_params.yaml" >> "/tmp/config/data_config.yaml" + + # ###### + # # ConceptGraphs uses SAM in the "segment all" mode and extract class-agnostic masks. + # ###### + # python ${CG_FOLDER}/scripts/generate_gsa_results.py \ + # --dataset_root ${DATASET_ROOT} \ + # --dataset_config "/tmp/config/data_config.yaml" \ + # --scene_id "${SCENE_LABEL}/${SCENE_NAME}/" \ + # --class_set ${CLASS_SET} \ + # --stride 5 + + + # ###### + # # The following command builds an object-based 3D map of the scene, using the image segmentation results from above. + # ###### + # python ${CG_FOLDER}/slam/cfslam_pipeline_batch.py \ + # dataset_root=${DATASET_ROOT} \ + # dataset_config="/tmp/config/data_config.yaml" \ + # stride=5 \ + # scene_id="${SCENE_LABEL}/${SCENE_NAME}/" \ + # spatial_sim_type=overlap \ + # mask_conf_threshold=0.95 \ + # match_method=sim_sum \ + # sim_threshold=${THRESHOLD} \ + # dbscan_eps=0.1 \ + # gsa_variant=none \ + # class_agnostic=True \ + # skip_bg=True \ + # max_bbox_area_ratio=0.5 \ + # save_suffix=overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub \ + # merge_interval=20 \ + # merge_visual_sim_thresh=0.8 \ + # merge_text_sim_thresh=0.8 \ + # save_objects_all_frames=True + + + # ###### + # # If save_objects_all_frames=True was used to save the mapping results at every frame, which can be used for animated visualization by: + # ###### + # python ${CG_FOLDER}/scripts/animate_mapping_interactive.py \ + # --input_folder ${DATASET_ROOT}/${SCENE_LABEL}/${SCENE_NAME}/objects_all_frames/none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub + + + # ###### + # # If save_objects_all_frames=True was used to save the mapping results at every frame, which can be used for animated visualization by: + # ###### + # python ${CG_FOLDER}/scripts/animate_mapping_save.py \ + # --input_folder ${DATASET_ROOT}/${SCENE_LABEL}/${SCENE_NAME}/objects_all_frames/none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub + + + # ###### + # # Visualize the object-based mapping results. You can use keys b, c, r, f, i. + # ###### + # python ${CG_FOLDER}/scripts/visualize_cfslam_results.py \ + # --result_path ${DATASET_ROOT}/${SCENE_LABEL}/${SCENE_NAME}/pcd_saves/full_pcd_none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub.pkl.gz + + + # ###### + # # The following command runs a 3D RGB reconstruction (GradSLAM) of a scene and also visualize (--visualize) it. + # # This is useful for sanity check and evaluation. + # ###### + # python ${CG_FOLDER}/scripts/run_slam_rgb.py \ + # --dataset_root ${DATASET_ROOT} \ + # --dataset_config "/tmp/config/data_config.yaml" \ + # --scene_id "${SCENE_LABEL}/${SCENE_NAME}" \ + # --image_height 480 \ + # --image_width 640 \ + # --stride 5 + + python /adaptors/run_slam.py \ + --dataset_root "${DATASET_ROOT}" \ + --dataset_config "/tmp/config/data_config.yaml" \ + --scene_id "${SCENE_LABEL}/${SCENE_NAME}" \ + --stride 5 \ + --downsample_rate 10 + + done # ##### # The following command runs a 3D RGB reconstruction (GradSLAM) of a scene with semantic labels and also visualize (--visualize) it. # This is useful for evaluation. # ##### - python ${CG_FOLDER}/scripts/run_slam_rgb.py \ - --dataset_root ${SEMANTIC_DATASET_ROOT} \ - --dataset_config ${SEMANTIC_DATASET_CONFIG_PATH} \ + # cp ${SEMANTIC_DATASET_CONFIG_PATH} "/tmp/config/data_semantic_config.yaml" + # echo "" >> "/tmp/config/data_semantic_config.yaml" + # cat "${DATASET_ROOT}/test/${SCENE_NAME}/camera_params.yaml" >> "/tmp/config/data_semantic_config.yaml" + + # python ${CG_FOLDER}/scripts/run_slam_rgb.py \ + # --dataset_root "${DATASET_ROOT}/test/" \ + # --dataset_config "/tmp/config/data_semantic_config.yaml" \ + # --scene_id ${SCENE_NAME} \ + # --image_height 480 \ + # --image_width 640 \ + # --visualize \ + # --stride 10 \ + # --load_semseg + + python /adaptors/run_slam.py \ + --dataset_root "${DATASET_ROOT}/baseline/" \ + --dataset_config "/tmp/config/data_semantic_config.yaml" \ --scene_id ${SCENE_NAME} \ - --image_height 480 \ - --image_width 640 \ - --stride 15 \ + --stride 5 \ + --downsample_rate 10 \ --load_semseg done -# ###### -# # Then run the following command to evaluate the semantic segmentation results. -# ###### -# python ${CG_FOLDER}/scripts/eval_replica_semseg.py \ -# --replica_root ${DATASET_ROOT} \ -# --replica_semantic_root ${SEMANTIC_DATASET_ROOT} \ -# --n_exclude 6 \ -# --pred_exp_name none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub # \ -# # --device "cpu" \ No newline at end of file +for SCENE_LABEL in ${SCENE_LABELS[*]} +do + printf "Evaluating label: %s\n" "${SCENE_LABEL}" + ###### + # Then run the following commands to evaluate the semantic segmentation results. + ###### + export SCENE_NAMES_STR=$(IFS=' '; echo "${SCENE_NAME[*]}") + + # echo $SCENE_NAMES_STR + + # SCENE_NAMES_STR="" + # # Iterate over each label in SCENE_LABELS + # for label in "${SCENE_LABELS[@]}"; do + # SCENE_NAMES_STR+="$SCENE_NAME/$label " + # done + + # Trim the trailing space + # export SCENE_NAMES_STR=$(echo "$SCENE_NAMES_STR" | sed 's/ $//') + + # echo $SCENE_NAMES_STR + + # python ${CG_FOLDER}/scripts/eval_replica_semseg.py \ + # --replica_root "${DATASET_ROOT}/${SCENE_LABEL}" \ + # --replica_semantic_root "${DATASET_ROOT}/gt" \ + # --n_exclude 6 \ + # --label "${SCENE_LABEL}" \ + # --scene_ids_str "${SCENE_NAMES_STR}" \ + # --semseg_classes "${DATASET_ROOT}/embed_semseg_classes.json" \ + # --pred_exp_name "none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub" # \ + # # --device "cpu" + + + # python /adaptors/eval_semseg.py \ + # --replica_root "${DATASET_ROOT}/${SCENE_LABEL}" \ + # --replica_semantic_root "${DATASET_ROOT}/baseline" \ + # --n_exclude 6 \ + # --label "${SCENE_LABEL}" \ + # --scene_ids_str "${SCENE_NAMES_STR}" \ + # --pred_exp_name "none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub" # \ + # # --semseg_classes "${DATASET_ROOT}/baseline/embed_semseg_classes.json" \ + # # --device "cpu" + +done \ No newline at end of file diff --git a/export/export_concept_graphs_replica_none.sh b/export/export_concept_graphs_replica_none.sh index 065c9ef..df83f29 100644 --- a/export/export_concept_graphs_replica_none.sh +++ b/export/export_concept_graphs_replica_none.sh @@ -8,109 +8,124 @@ export OPENAI_API_KEY="" export DATASET_ROOT=/data/Datasets/Replica # export SEMANTIC_DATASET_ROOT=/data/Datasets/replica_semantic_gt -export SEMANTIC_DATASET_ROOT=/data/Datasets/Replica_nerf +export SEMANTIC_DATASET_ROOT=/data/Datasets/Replica_nerf_full export CG_FOLDER=/opt/src/conceptgraph/ export DATASET_CONFIG_PATH=${CG_FOLDER}/dataset/dataconfigs/replica/replica.yaml export SEMANTIC_DATASET_CONFIG_PATH=${CG_FOLDER}/dataset/dataconfigs/replica/replica_semantic.yaml -export SCENE_NAMES=room1 -export SCENE_NAME=room1 +# export SCENE_NAMES=(office0 office1 office2 office3 office4 room0 room1) # room2 +export SCENE_NAMES=(room0) # room2 +# export SCENE_NAME=room1 export CLASS_SET=none export THRESHOLD=1.2 +mkdir -p "/tmp/config" + +for SCENE_NAME in ${SCENE_NAMES[*]} +do + printf "Running scene: %s\n" $SCENE_NAME + + cp ${DATASET_CONFIG_PATH} "/tmp/config/data_config.yaml" + echo "" >> "/tmp/config/data_config.yaml" + cat "${DATASET_ROOT}/${SCENE_NAME}/camera_params.yml" >> "/tmp/config/data_config.yaml" + + # cp ${SEMANTIC_DATASET_CONFIG_PATH} "/tmp/config/semantic_data_config.yaml" + # cat "${DATASET_ROOT}/${SCENE_NAME}/camera_params.yml" >> "/tmp/config/data_config.yaml" + + ###### + # ConceptGraphs uses SAM in the "segment all" mode and extract class-agnostic masks. + ###### + python ${CG_FOLDER}/scripts/generate_gsa_results.py \ + --dataset_root ${DATASET_ROOT} \ + --dataset_config "/tmp/config/data_config.yaml" \ + --scene_id ${SCENE_NAME} \ + --class_set ${CLASS_SET} \ + --stride 5 + + + # # ###### + # # The following command builds an object-based 3D map of the scene, using the image segmentation results from above. + # ###### + # python ${CG_FOLDER}/slam/cfslam_pipeline_batch.py \ + # dataset_root=${DATASET_ROOT} \ + # dataset_config="/tmp/config/data_config.yaml" \ + # stride=5 \ + # scene_id=${SCENE_NAME} \ + # spatial_sim_type=overlap \ + # mask_conf_threshold=0.95 \ + # match_method=sim_sum \ + # sim_threshold=${THRESHOLD} \ + # dbscan_eps=0.1 \ + # gsa_variant=none \ + # class_agnostic=True \ + # skip_bg=True \ + # max_bbox_area_ratio=0.5 \ + # save_suffix=overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub \ + # merge_interval=20 \ + # merge_visual_sim_thresh=0.8 \ + # merge_text_sim_thresh=0.8 \ + # save_objects_all_frames=True + + + # ###### + # # If save_objects_all_frames=True was used to save the mapping results at every frame, which can be used for animated visualization by: + # ###### + # python ${CG_FOLDER}/scripts/animate_mapping_interactive.py \ + # --input_folder ${DATASET_ROOT}/${SCENE_NAME}/objects_all_frames/none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub + + + # ###### + # # If save_objects_all_frames=True was used to save the mapping results at every frame, which can be used for animated visualization by: + # ###### + # python ${CG_FOLDER}/scripts/animate_mapping_save.py \ + # --input_folder ${DATASET_ROOT}/${SCENE_NAME}/objects_all_frames/none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub + + + # ###### + # # Visualize the object-based mapping results. You can use keys b, c, r, f, i. + # ###### + # python ${CG_FOLDER}/scripts/visualize_cfslam_results.py \ + # --result_path ${DATASET_ROOT}/${SCENE_NAME}/pcd_saves/full_pcd_none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub.pkl.gz + + + # ###### + # # The following command runs a 3D RGB reconstruction (GradSLAM) of a scene and also visualize (--visualize) it. + # # This is useful for sanity check and evaluation. + # ###### + # python ${CG_FOLDER}/scripts/run_slam_rgb.py \ + # --dataset_root ${DATASET_ROOT} \ + # --dataset_config "/tmp/config/data_config.yaml" \ + # --scene_id ${SCENE_NAME} \ + # --image_height 680 \ + # --image_width 1200 \ + # --stride 5 + + + # # ##### + # # The following command runs a 3D RGB reconstruction (GradSLAM) of a scene with semantic labels and also visualize (--visualize) it. + # # This is useful for evaluation. + # # ##### + # python ${CG_FOLDER}/scripts/run_slam_rgb.py \ + # --dataset_root ${SEMANTIC_DATASET_ROOT} \ + # --dataset_config ${SEMANTIC_DATASET_CONFIG_PATH} \ + # --scene_id ${SCENE_NAME} \ + # --image_height 480 \ + # --image_width 640 \ + # --stride 15 \ + # --load_semseg + +done # ###### -# # ConceptGraphs uses SAM in the "segment all" mode and extract class-agnostic masks +# # Then run the following commands to evaluate the semantic segmentation results. # ###### -# python ${CG_FOLDER}/scripts/generate_gsa_results.py \ -# --dataset_root ${DATASET_ROOT} \ -# --dataset_config ${DATASET_CONFIG_PATH} \ -# --scene_id ${SCENE_NAME} \ -# --class_set ${CLASS_SET} \ -# --stride 5 +# export SCENE_NAMES_STR=$(IFS=' '; echo "${SCENE_NAMES[*]}") - -# ###### -# # The following command builds an object-based 3D map of the scene, using the image segmentation results from above. -# ###### -# python ${CG_FOLDER}/slam/cfslam_pipeline_batch.py \ -# dataset_root=${DATASET_ROOT} \ -# dataset_config=${DATASET_CONFIG_PATH} \ -# stride=5 \ -# scene_id=${SCENE_NAME} \ -# spatial_sim_type=overlap \ -# mask_conf_threshold=0.95 \ -# match_method=sim_sum \ -# sim_threshold=${THRESHOLD} \ -# dbscan_eps=0.1 \ -# gsa_variant=none \ -# class_agnostic=True \ -# skip_bg=True \ -# max_bbox_area_ratio=0.5 \ -# save_suffix=overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub \ -# merge_interval=20 \ -# merge_visual_sim_thresh=0.8 \ -# merge_text_sim_thresh=0.8 \ -# save_objects_all_frames=True - - -# ###### -# # If save_objects_all_frames=True was used to save the mapping results at every frame, which can be used for animated visualization by: -# ###### -# python ${CG_FOLDER}/scripts/animate_mapping_interactive.py \ -# --input_folder ${DATASET_ROOT}/${SCENE_NAME}/objects_all_frames/none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub - - -# ###### -# # If save_objects_all_frames=True was used to save the mapping results at every frame, which can be used for animated visualization by: -# ###### -# python ${CG_FOLDER}/scripts/animate_mapping_save.py \ -# --input_folder ${DATASET_ROOT}/${SCENE_NAME}/objects_all_frames/none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub - - -# ###### -# # Visualize the object-based mapping results. You can use keys b, c, r, f, i. -# ###### -# python ${CG_FOLDER}/scripts/visualize_cfslam_results.py \ -# --result_path ${DATASET_ROOT}/${SCENE_NAME}/pcd_saves/full_pcd_none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub.pkl.gz - - -# ###### -# # The following command runs a 3D RGB reconstruction (GradSLAM) of a scene and also visualize it. -# # This is useful for sanity check and evaluation. -# ###### -# python ${CG_FOLDER}/scripts/run_slam_rgb.py \ -# --dataset_root ${DATASET_ROOT} \ -# --dataset_config ${DATASET_CONFIG_PATH} \ -# --scene_id ${SCENE_NAME} \ -# --image_height 680 \ -# --image_width 1200 \ -# --stride 5 \ -# --visualize - - -# # ##### -# # The following command runs a 3D RGB reconstruction (GradSLAM) of a scene with semantic labels and also visualize it. -# # This is useful for evaluation. -# # ##### -# python ${CG_FOLDER}/scripts/run_slam_rgb.py \ -# --dataset_root ${SEMANTIC_DATASET_ROOT} \ -# --dataset_config ${SEMANTIC_DATASET_CONFIG_PATH} \ -# --scene_id 'room_1' \ -# --image_height 480 \ -# --image_width 640 \ -# --stride 15 \ -# --visualize \ -# --load_semseg - - -# ###### -# # Then run the following command to evaluate the semantic segmentation results. -# ###### # python ${CG_FOLDER}/scripts/eval_replica_semseg.py \ # --replica_root ${DATASET_ROOT} \ # --replica_semantic_root ${SEMANTIC_DATASET_ROOT} \ # --n_exclude 6 \ +# --scene_ids_str "${SCENE_NAMES_STR}" \ # --pred_exp_name none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub # \ # # --device "cpu" \ No newline at end of file From 056c93909b7efa80b27688ed13565276edabee78 Mon Sep 17 00:00:00 2001 From: Asus ROG Be2R Date: Wed, 11 Sep 2024 13:37:04 +0300 Subject: [PATCH 13/24] docker compose updating --- docker-compose.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/docker-compose.yaml b/docker-compose.yaml index b5fb103..2aec19f 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -21,6 +21,7 @@ services: - $ASSETS_DIR:/assets - $DATA_DIR:/data/ - $ROOT_DIR/export:/export/ + - $ROOT_DIR/adaptors:/adaptors/ runtime: nvidia privileged: true command: bash From f48f99015a7e886ccecad0f7ea619a8c7765b8a4 Mon Sep 17 00:00:00 2001 From: Asus ROG Be2R Date: Wed, 11 Sep 2024 22:20:38 +0300 Subject: [PATCH 14/24] cg dockerfile update --- docker/concept_graphs.Dockerfile | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/docker/concept_graphs.Dockerfile b/docker/concept_graphs.Dockerfile index e52fea4..1353745 100644 --- a/docker/concept_graphs.Dockerfile +++ b/docker/concept_graphs.Dockerfile @@ -1,4 +1,4 @@ -FROM pytorch/pytorch:2.1.2-cuda11.8-cudnn8-devel AS base +FROM pytorch/pytorch:2.1.2-cuda12.1-cudnn8-devel AS base ENV DEBIAN_FRONTEND=noninteractive @@ -21,8 +21,8 @@ ENV LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/opt/hpcx/ucx/lib" RUN conda update -n base -c defaults conda RUN conda install -c pytorch faiss-cpu=1.7.4 mkl=2021 blas=1.0=mkl -RUN conda install pytorch==2.1.1 torchvision==0.16.1 torchaudio==2.1.1 pytorch-cuda=11.8 -c pytorch -c nvidia -y -RUN conda install https://anaconda.org/pytorch3d/pytorch3d/0.7.5/download/linux-64/pytorch3d-0.7.5-py310_cu118_pyt210.tar.bz2 +RUN conda install pytorch==2.1.1 torchvision==0.16.1 torchaudio==2.1.1 pytorch-cuda=12.1 -c pytorch -c nvidia -y +RUN conda install https://anaconda.org/pytorch3d/pytorch3d/0.7.5/download/linux-64/pytorch3d-0.7.5-py310_cu121_pyt210.tar.bz2 RUN pip install --upgrade pip @@ -31,20 +31,17 @@ RUN pip install tyro open_clip_torch wandb h5py openai hydra-core COPY ${ROOT_DIR}/Thirdparty /tmp/ # # Install the gradslam package and its dependencies # RUN git clone https://github.com/JaafarMahmoud1/chamferdist.git \ -RUN cd /tmp/chamferdist \ -&& pip install . -# && git clone https://github.com/gradslam/gradslam.git \ -RUN cd /tmp/gradslam \ -# && git checkout conceptfusion \ -&& pip install . +RUN cd /tmp/chamferdist && pip install . +RUN cd /tmp && git clone https://github.com/gradslam/gradslam.git -b conceptfusion +RUN cd /tmp/gradslam && pip install . --default-timeout=100 FROM base AS dev ARG USE_CUDA=0 -ARG TORCH_ARCH= +ARG TORCH_ARCH="3.5;5.0;6.0;6.1;7.0;7.5;8.0;8.6+PTX" ENV AM_I_DOCKER True ENV BUILD_WITH_CUDA "${USE_CUDA}" ENV TORCH_CUDA_ARCH_LIST "${TORCH_ARCH}" -ENV CUDA_HOME /usr/local/cuda-11.8/ +ENV CUDA_HOME /usr/local/cuda-12.1/ WORKDIR /tmp/Grounded-Segment-Anything @@ -52,8 +49,9 @@ RUN apt-get update -y && apt-get install --no-install-recommends wget ffmpeg=7:* libsm6=2:* libxext6=2:* -y \ && apt-get clean && apt-get autoremove && rm -rf /var/lib/apt/lists/* -RUN python -m pip install --no-cache-dir -e segment_anything && \ - python -m pip install --no-cache-dir -e GroundingDINO +RUN python -m pip install --no-cache-dir -e segment_anything +RUN python -m pip install --no-cache-dir wheel +RUN python -m pip install --no-cache-dir --no-build-isolation -e GroundingDINO RUN pip install --no-cache-dir diffusers[torch]==0.15.1 opencv-python==4.7.0.72 \ pycocotools==2.0.6 matplotlib==3.5.3 \ From ddab8f7ee62828f116abe57709d1f14df55cf1b8 Mon Sep 17 00:00:00 2001 From: Asus ROG Be2R Date: Thu, 12 Sep 2024 02:07:43 +0300 Subject: [PATCH 15/24] cg bugfix: utils/viz color palette --- concept-graphs/conceptgraph/utils/vis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/concept-graphs/conceptgraph/utils/vis.py b/concept-graphs/conceptgraph/utils/vis.py index e65e846..f157515 100644 --- a/concept-graphs/conceptgraph/utils/vis.py +++ b/concept-graphs/conceptgraph/utils/vis.py @@ -194,7 +194,7 @@ def vis_result_fast( image: np.ndarray, detections: sv.Detections, classes: list[str], - color: Color | ColorPalette = ColorPalette.default(), + color: Color | ColorPalette = ColorPalette.DEFAULT, instance_random_color: bool = False, draw_bbox: bool = True, ) -> np.ndarray: From f51706f3548c47571d03afc2a45c73f05ade11ad Mon Sep 17 00:00:00 2001 From: kirillin Date: Fri, 13 Sep 2024 12:23:23 +0300 Subject: [PATCH 16/24] cg results, icra: baseline, camera_lights --- .../baseline_ex6_conf_matrices.pkl | Bin 0 -> 884325 bytes .../baseline_ex6_results.csv | 22 ++++++++++++++++++ .../camera_lights_ex6_conf_matrices.pkl | Bin 0 -> 884325 bytes .../camera_lights_ex6_results.csv | 22 ++++++++++++++++++ 4 files changed, 44 insertions(+) create mode 100755 concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_conf_matrices.pkl create mode 100755 concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_results.csv create mode 100755 concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/camera_lights_ex6_conf_matrices.pkl create mode 100755 concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/camera_lights_ex6_results.csv diff --git a/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_conf_matrices.pkl b/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_conf_matrices.pkl new file mode 100755 index 0000000000000000000000000000000000000000..766f279579d2af30a766f712d4c9eb94086e880e GIT binary patch literal 884325 zcmeI*3t$^redzzu!!{Nh)=pz?LI{{gV8RWqW9P|2S0uY8*@=VW>?K1>qR5J@Q4ku9 zG%}l9-2W6}#}59p0cK(01{xXzW}*BSE*G}6-MSPm+bxCV@o#B){L^0U?dBHfQnqE& zhRgpr9&aqIWb0*FvcHDaPjlwXIluE6MVgmJmiNA&>~~2sf5Wys(K^%}NQF8BDKi*} z#v_5wPJ5>K*&#hX7q|In7WlyXPB*O#gXzXOb498P?GBDI- zzgH}@X0}>NgkU6WPqYMLdhldmFsZ*KFmT!or$nv2XD>^V6N{Q=EF1{OPey}r!#$?@M4@>) z5e}rHXTmANGi-QewLui7G(Bk=4T@X!h^c7YG#VAJ>YZsrk4KCq#hXrxK4rCOrt)mc zh7`In1YeIx+&nT{=rde5dfDE&v$K15*RCC1#%!CRMB4E5PRfT&!+TgXb}z`^={5WEwpHeg6R*6u zKUWi%e)X66^Ut+F$z?{?*w$xF&uh5xJw4Ki|9I!TlBb{U|IV(vo}S%y=8^}yR&YufD%pAs=(A|a-PSm)pW@V0R#|0009ILKmY**5I_I{1Q0*~ z0R#|0009ILKmY**5I_I{1Q0*~fr<$H^r`yzuHH+(o+nSPU#!UMfJP8N009ILKwudK zu2<%I{+SV2ALN?oWn7bVjQ|1&Ab#a2|Pdl?_<_pvdfpzPyT0x?hk#% zzkGwBX9N&Hpo#+ef3MK{W)*X;fv&91?=0+=cut=0gVsRuq<{bd2q1s}0tg^bfWRG< z``&8-y{(EWfBzM+jr_wr$G6PFy!VVOydAzOAeUm0mVYT)rA-77KmY**5LkHv(>r%@ z9c(pM>+}b>4z`-!^o0Nd%O~(J2iV_V0{v;dna}MdaHUBE5U70td&KUy6-gOX21jl2 z=o+=hC!QVBW3h0^jOy`}J@M?_#S(r zEuMZ$;&ewyPlh|*l8%|tU@{pzZBN`1NQOhA6*HN>vG7F0jf(a}Q~cy?mG_F?N9=vB z0sEY2RaRWets}O!VWQ!rc{&jm{Y|)|yY2C@*}0wx*Q@PAhYsEKEAe;ZL6J;*V0_;_ zZpYf=#AYY1b>ccFwmEUD6T6%^cQpQ>Gu`LJqfS(vc-)BtP81je>50~%?m#Nk8AzGI zNHiV^Z0oXzZCUeZjk4Cf_wAR(JwM&&eX;8od#<&gn(EuoVYz#+dc@8aItb#jAQ}+R)<>qe=0m)1psVZJMcU>d9avY)B#TB&7LzVtUX#GF#{~ zTp02G7axx(b6$P%BtrIc1+4*N&XN(pfrKh7wUXd z|BAn~fiLA!?XNAjb^k9K4&-g)O%&KcGh@5;GLGyS8x^5>tk zrBVw&ytmLLPyDR?fb>W@r^VFck5(z)LbIhF0tg_000Icqv_RjUoZA#NmUG$Lq%}4k z(m?)>8XkJs6CN zj*6}de??oPF$54m009ILK%m+JXDhHxSM8kZxIh1?*X6(NxQ@p~>Ifi!z%vAf)$)%S z+-USEao+HB^{)f5{=O!^x8VOq1D_+$c(GA{00OH)z#g&t?fbsC-*scKilhuGgQK>1 zbdB2MTI-g6+Y}YFHf_DOQISS$ZPPh1Eh{eNRxz=8eBV88hsoo_W+$$7;yNd`IdQ8K zyPP=pv2f6t?sMW%C#p_7?!*Bnio^!e6RkttfmEn7kTQdjXgm_ww$mQAWzC~C%3AZ@ zvzH~wa-a9bu3zlA)_!WLZ$pRW?!D>}kHdx}t1Y6fL{c~PkRA&Rg_EhM9yeT~g1Sb` ziA7B_77m2tC!@i*;T}_cqR>2@2nSNpGvSos88*DK+8_#3nw~U`2F0z;C`99?(WrP; z?@SwdJYqB{-gH{@DXUF0l}$YvjD!s-B%XvcUr$UAnnz{}eTM5sFWYx)+qP}jj_o^k z8nbPN5^2NJJ1HMB4ew#m)Giw)*Y2#kDr5@N@NY~bh zo&$O7S>G;VoxNb6ln4}2KX0$OXMCeM=ibCq(j)ov&)rgqr6&r%D)jbm?f1X!4LS2| z*WP)ON04JZOSgTsVm%oc0tg_000IagfB*srAh5~=K7OG> zS2?XRH-@`P1@0;6{Mn*7!$0HS-J(b{6*kXZu2N8KDpg5f|F3b(u}ai*g8%{uAb?TKbQZ$5Q{EL1q2X4009ILKmY**5I_I{1Q0*~ z0R#|0009ILKmdVNBk*8~`%_hVWlz6yforNhSeg5ZRXS(dLjVB;5I_I{1Q0*~0R&1G zcsu(!1=vluzb@zRgch(URS-Y`fz>JS<5%&%xg=};N}lgYe4fwqCGn%%4zqiZn_i^1t=89$t0R#|0009ILsH(v9pC96Qay7-Le~0_oRMVTj z5I|u01b+C63Vptez^3D_fczpY5hpfWgW zi$~X}J+8HG>9clQ5&K;FM=uG!H@u(A3Cmwg=fD=Vx1L=v@q3%E`)EP*b!ALY732fVA58JZl(Hdo~ zdGFcFl4QBh`(oEG_FQW}HPyGF!*cgt^@zt|Lz2}N(N-dS^U zBj&`SrWp$d!ts;QVBBzzsXkF?o=$`Vspy$-%J2*uURiArg(*!>nnr`-R%aBVanop2 zysCGm4Lu$)niOw3E&7zzrkTp7o(x99h7=M{LYl89rU%Uc{(ka-B2Iw*Y@cLpSh@p7ue+8rmlJ>XgS`3Y8&_%Nh41uS`pzrYQSiyK3AYI8AQb)1Xr z<&ojyjyV?DJk=3E009ILKmY**5I_I{1Q0*~0R#|0009ILKmY**5I_I{1Q0*~0R#}J zNr5lh^>N*dKb|MghgSGa>}v8=LsAGJfB*srAW%tx_RE#K7eae`o^7T|CQMrhAbq+j*o)3p|1Q0+Vhk*6yb2x69 z!==2U_wSSBPcFk}ZFyO83YihZGq{(*-+o_ckZiIy60IR z`|q{=QYJeD5U3FWd&KUy@B8e2*NuD>Nf}fIM{V)w8nwr@)-C})_OLB$9<5QF8yf1eBV$ZeqQ&W8#IxKhZ zRgZWaHY8bX5p5-sx~YfsSYRleOhxs$;Sv?pHDXRIYMQZdARIp#4aN=knCcUS=IKN@ zkcyrOrwq@q;g!_}QJB*7q-iuLZgoZ>8aItb#jAQ}+R)<>qe=0m)1psVZJMcU>d9av zY)B#TB&7LzVtUX#GF#{~TsL~@?%cU;_pZ*)?YoWHHbaTD;pv@}51EGduxRR-A!S!| z1v@5X(LR3&Ab`MX5xDfC#rG`k)}1=v-z!bOed(@HoPKrATrNevbfEmzv19I|a)G8U z7XGb+`#*E1()qMFC*J3>Dv1AKju6SwDiNjP-n+s zmR^J&N)UL}k9Z#|fsI8FKmY**5I_I{1Q0*~0R#|0009ILKmY**5I_I{1Q0*~0R#|0 z009ILsE$CMdmmV?JF3%vk-fBkKX+bN2A1yL6Gf&-bp#MV009ILK%g=L+5V3;6ApMMm&PZ>Zuuy86q{cah;R z8vz6mKmY**5I_I{1Q0-=7=dTyzOQ#NO%zbqe|g*f0;*930R#|0U?~JXcTW|M|2YHr zquXlpzUtGHxxYvG*xB6WOR+v^69EJeKmY**5I|rR3$#?_I80H%Uw&zAZo3yXh?Q9R zfjgwfyYsCZeh+OWu5!kN00IagfIzJX{P>UaZG$XbhOswRzt6=+lQ?sybkCpVK5qP- zKdk;S_r>yMJ^~0JQ0oF0@2>uP{eS*KjlN$e^6b}Ze9xlRU&rK!00LzR*dunotw_qC zGB|3BN7tx5uC;FIw@p#5wQ1|Mjfym4Yn#rAX<2b8&k+-w$M+>XPPffYTJg8_h9s*kqOC+yH}#Mn3k-#msi+<|T%v-y zM$CytO*0k_gyScp!MNcbQ+=Y)Je>#!QqeQvl;IgRyt3LL3R9Y%G>rzutqalzw(aiTv1|LD z?c0snHbaTD;pv@}51EGduxRR-A!S!|1v@5X(LR3&AW)jXr(Ra08>Iyf=fwTFf6So3 z#cQ_Ov%51cYQOZ6#DS-Cz2}2P?7Vb}lfV1sK60LJqEb($V~LDB=i0sS@P?cD&J`|6 zypU(-B}ZrWk-Tmf?)|$e<(nffiV#?-0{%B-zCVBX!+HM=E=urSEBAr$MHeij!aaE| zk~zO*@12EYMI8hXsBwXHHF)Ci8D!IiZznxt+2trGV!LiR(xXoV5I_I{1Q0*~0R#|0 zphyAB|B?DW{~}$f(tf4CO8peM0;#^d0xgZD{rmaKt)(qj_GsUf=QwlO4b#AC60k;= z@Lb4hf@G`+Ab-ksR3ylFIfB*sr zAb9tD{r6V?dVc>) zGJmhO*7>bJD&4o%$3}h#Ab&p&T|o8P!tSiSkn)YNmU zI>*OqYRn{q00IagfB*t3N}%!Wb$%V}{q=Q!n7|9_eEWArz4{pp0thUnfc2Bi^Jo53 zb@h2O{k2>6FC`CcBY*$`2oxt^kJ$b8eP7z|y0MrdDTB)3s4X5{qxQJgx~1PXMFp)* zTd!?Yq!C-&bWTjmic4t`6Pw5PS#GC8j}sf6*zCl$PF&~2HYaX%VwV%=KHd&G(|t}n z>O|Fv$DKIfM3LY?dZKlxJCF)(3#80oBpQzdI(OQ`wyb%yMpQ%4)#HXsR8ZH5IkBi|#=?Pc{A4s3 zH{4^YPZXM`6X8HAdM2DQJi~@pRvScNO4F03(V)218I5S%G#VAJ>YZsrk4KCq#hXrx zK4rCOrn0FggORWyg~XGP=Ie>+LG#FLq0eyL=%su2j_y6X_w3od-I#4Nlt>$%-bwk8 zX?PEdrj8j>c12gPV^S9F^M?Qer3qYou&%bQOB>{(OV!*NI{)<#XYNJZ|D&g;tzTTI znIl5hZ(Poqi~p6+dph4m^~mNC2-KZ`-=C54 z|7pwlevwh9wkGQ9Y(Z_=kqrVXM4&VO`@p4lunk@Lwd}8aleCBc0tg_000IagfB*sr zETh1y|Au|XW#p&pIukhY$;|UN6A#zfzn|ylKaNyqtAcb9KmY**5I_I{1Q0*~0R#|0 z009ILKmY**5I_I{1Q0*~0R#|0009ILKmdVd74R2x|9~oH`oya;uXC$n*K~yd0tg_0 z00Iagu#mvVin#yOLhUdQ0R#|0009ILKmY**5I_I{1Q0*~0R#|0009ILKwxDHT=B2$ z@f9$-&qeF(-d+Wai7E&nfB*srAb%9x?zuZ%uV^5`x zo}~~#009ILKmdW2A<**+&ci*{T>f@#p|6I={~qr`t*D0p0`)EM(8sgh?@rX$KK{q| zE^xe}zE=&|BY*$`^(bJE*!{L5DTB)3s4X5{qxQJgx~1PXMY-0dt=Bdx(ul2XIwz)O z#iiUTCN_`nyT|Q}*5kxRCpJ5AtrOQdajO%%oH%zi$U$ei&xuE!s5LPHILRPYt4JlUX~=wecl(lezE6T`>CnE4IP%d_o_!c z4jYoJwurV8N!`>#dMq#$PNt%I+;E8s>KZX87B$USI1rAXj0WR|drb9-Li2PY97sja zgj0rR*zn3~gD6aCdeSr+6t_B~5RIEgqvBP)Gi~Vch|#2Y(`nJCtTxS5HuYpM5;mle zcoNclJuy9K9+@rl8Lk_>?AW$*TldaAJG;A#*)~IowBhNUln3oZMvUngDJku$Hc2fkF3zEuUHGXyFw@Y2kKHL+LL`R7f=foKo`1ZrL2!nW$2 z;H!0uYUDTV=SMrL;Y=S0Ab7L3)iR(X1nNm3>wMouE$8{1 z+8xnDEAo7$^x$vS*?OqmS4csbO}~cYkp)3f2>}EUKmY**5I_I{1Q0*~0R#|0009IL zKmY**5I_I{1Q0*~0R#|0U_}Xh`H$=4I;E#h*GHZ!>czoe5I_I{1Q0-=Is$e6W7psP z5Z7T>=Y>Kq2q1s}0tg_000IagfB*srAb9 ze&4mKt8H*!RlB1z1Q0*~0R#|00D%$(u5cWqM7g|+UHK3BubFt_Yx(ERZ}8G@7glfn zGCW5B0R#|0009ILsF;BNBQLA--$#DDu8z-IfAPB)Kl@WP|_$t60DuvHR`&zO~5N&Exwlx1;BAVxtqAow(MC>zvr;#H~*3a^l?X$3bVh&xuE! zs5XaxZx5N)HPyGENYsua3CB%84bn_ z_n7Jvh34r*IFO2-38xItu;G=}22q&O^rUGtC~kE|BN{i2M#ZaoXWG!?5u-`*rqiNN zS#6rBZ0gBiBy30_@g$`AdSZIeJThD8Gh8=%>Ds=dd(WOO@qc5s%}^q3czP%0L#E+9 zESfrINZA!#!H!8;w9g*`2$Uq?|2=P=AI&Lg*jWp0t>!n4{aKyoob<1G`GJfXU;C4r z-wOEJ@7#QzQ7PZVf5_US%2%)Wa_ga~oO?U>fBS4^{<)fd;Oz^KHNm|92Fve@7KA|l_dj?LK%hhczd!GS z&%bTq?U=j^G9$l$b$kAK%s~JF1Q0*~0R#|0009K*P~aO8j)zrqwN3YDoiC_n=k!&+ zK<4=!>%UFZ*}tDl{_+iIVbu%V|GRuEcGbTK06+i%1Q0*~0R#|0009ILKmY**5I_I{ z1Q0*~0R#|0009ILKmY**R-wSBUQi$3)$@KLPoBZd?^CbBRmhMLKmY**5I~^b1sghcMAkc2z4yYeN9H1c00IagfB*srAb>#q31r?sx&HF6QpQhB zaev(^QPT|q2&@o+&wh1*YYw(o?ppsP~gzy=!hukYEu65I~?}0&nWi+TTA}k@wMx<+oTvd*8N9pO23%%X!kp zIGB$B0thU(fIVXO+lr(NDubi8cyx{0<67&Me%ln~TAQ|B+o(t*wzlb&22k}UUmU+ntDo@?!=rusH?Snl4d9`QJA zNV3`@+DasKQxEB}z)(1uit2I0B`TWoS>ZW@h>SM|=cp~oXelj2RMMW3?TG*j8slfg*XkV4`~ zNb~i?^q_fUw$Nv|ZuHW%dwch;Jv(;p?lflG3?B2SrLsbJZ>M*h@cqAY;(q|4%n}Dtv#OwS$yVGi&0zua#aB zzPW7H+;S(a&$r}!{aEk&P-gx)I!XY^xu_h8Ojhq zpd5j$i?=(>s{WfVC-dqqdp`$nstZnU2oxdU_vfwrwxJ@n$MUwqj0}NG{)|Z`5I_I{ z1Q0*~0R#|00D(Feu>Q+`;doe`uS3$WqQJgqRp#Hzi$BdVn<}n0x~j0i*_ZSBtuQIg zA%Fk^2q1s}0tg_000IagfB*srAbF`$Oe#$G2?Rh__opWm|&6e>XfWV3r zupVvTxaEpmvkVFW1Q0*~0R#|00D&44xPL1Dynp|8f4ZY3elk5#-Mp+%?#nl#OaJ=zEbn|ZDMtVS z1nOMiAD)}Jzi(a3``)&WUYK-0H+GC*J17xsSDj&UBv>k2+Cx;&CSqI8h`zke+BA>JFqrU4fJt zj6~y+Kxd~tY|ENQYm~L-y=N~=lI1?{i(S9ibFKZLEQA7z!s-Q9W+BL!!vAnWwk*R zrZhci8V!nDol%L#O`}oqs@|D4^mxQ*QoQN3=u=jkW-6O{G8hRPQb;@rX}+GA9yE{4 z7Wxd=jb6HT?daOEXUDeA?Z#}Ip+ws7^iIl$Ov8IvGH*j%xjIB6lvGpUj_s z-hpM!m}<-1Bg~pTJNeq&{pYIt(aZa#&wVCm{U^G9>#kgjP=){k2q1s}0tn<3c)px- zn<8gj3JVl?&)i{>0_#%^0R#|0009ILKmY**5U6bd>(K`aIL2MuW2mp}Uc!E5eXX#} zY@d9%GRImjyuLE+WM)^ul)vg%CKixX1*+V^zpDx;KBI}wWBI>H`;5vIAbgdm&Vk?nyPz^=Z@o zN_MmKZ6#Ld^=?ZqM-K=ffB*srtXzS&RPMU*!AJ6JGp$@bMuq?a2q1s}0tg_000Iag zfB*srAb`}tj-s;;WPs*mf!s%k=K2q1s}0tg_000IagP|pJX z2iCJ+S%?_VY;)pPCw4h;?rxd~o#{R& z9(AJX#N$pJaH2?TAU)AK)E!8Lx&kRP7>ULsfo&22k}UUmU+ntD zo@?!=rusH?Snl4d9`QJANV3`@+DasKQxEB}z)(1uit2I0B`TWo4(ZW@h>SM|=cp~oXelj2RM zMW3?TG*j8slfg*XkV4`~Nb~i?^q_fUw$Nv|ZuHWtq_FdgOJB`^kLy5HE>7A4h znTGeUXzG|DWmj|sJ0@k(K7R-xP?~_*TfviQ@2b-a-R`cryr1~<+Ye-}&{xfUY-AP} zGWB3%g?6BFbT;szoUfmU-uJ$&{BtZ@k)lN5>#k9peAYZUFVY7&rL7+|q z?#cZ3`FVfN_lr6i3~3^Oz-knj_Hpx>>aO=?IapuaA<`QH2q1s}0tg_000IcCDuMVD zIgdxLDgy?KK*0i5=5c}ezwz%Um)(F#q?PF^gF00IagfB*srAb$%_f!4K!T25Ko@9=BBPJmgZON1F&BfB*srAbjGk+I;#Y^c2XSq$A+kdNfm##j`^Wlz-+b2N_4XYCf33X)$Oi!g5I_I{1Q0*~ zfvN~hzdHZ6`yUJxwqNr3YZW{m_Tqh28FywUnZJ*gIgcp>5I_I{1Q0-=<^`s|cW8gj zC%KXmeDd{$Z6~j!F*6nf5Llf8eg6E%4C+9{I(m@L$2wR~q=*0lt4F{dvHR`&9^LP{ zF=Rzj29?24TRgf(?QyMjOTTT33R;`CUfZZhBeu5boS2psm(n68HjnSK+zytEgjqIIY{kP3ALQf4p`jYk68 zy6s_G);wCHtTpdFds&h!_jzCJ`o*4W?Wd;tHgs6--m4z*IBZC=+9KLYBz02{>9N32 zIGKv-al<7lsB6TWSkyFQ;XpWkG8&8u6HXbPVZ$q{4Wclm=}FUQ zP~7T_Ml^03jfz+G&a|P&BSw?rO{Yblvf4CL+0>K4NZ61<;z>yJ^~Cg`d1SWGXSizJ{K6mPJUZ3)@}}(k7i;O6^FQ|A z3pn$;zVLOI2#8#*+n?EyBt7y8tNeU(HB1=-2;3CtySIRU(>JU03;_fXsA+-6Ubl4H zS2aEGa+AxwEmm%;ERO&J2q1s}0tg_000Icqrog|Q%rA&OS@bgurG|w`tL*PP`V~(q1O*2Xa5I_I{1Q0*~0R#|0009IL zKmY**5I_I{1Q0*~0R#|0009ILKmdXH0$bm)OxJzQPojY56C?GJX8}W@3IYfqfB*sr zAdo3=f90-+*M7-0(V4hRA%Fk^2q1s}0tg_000IagfB*srAbsRec2H{A9Jn@3K2j6f#ngfN9=xEk(5DYaMTu$u2FkjYu(as zo1$E6)7EPn6=}rQHk}jGvf@%&#Kh+DeU{tF*W<)SCpJ5AtrOQdvCWBFo!I5XxlgWx z&UBv>k2+Cx;&CSqI8h`xke+BA>JFqrU4fJtj6~y+z_va1uq|sItx?vR_ny5hNtXM( zFLwQ6&$aebQ+*pcEO+l!k9Zt5Bw1|{Z6%VrsfYAfU?`kSMfJGh5*5@nVooe-nz3*o z96uQi#trwF>Jx?L=|nh?ik=Cl49~FPmDL7On9}s5X*4Kqbw(o^H;qQct9obJ(Bl!K zN%5xBqEA_EnyGB+$zUXGNFnhgr1^ScdeA&FTj(=fH+tE=ZO5J+U7b63bs4j5h7xJR z(>p02G7axx(bO?R%C6`Nc1+5mef|(Ypd)a}MyqyDM-Z;$!o6EOPcz z^}^ke-#?Ji>81bn^a<4IyPsFYISJMYTZ1LX)HP^y66pRwfQAFcj@n2ZFNKmY**Dk$LpSOvC0E0|Bt zrk*H#J1OV#6e55C0tg_000IagfB*srlqHamxJalhz+xB9y2(pxB7gt_2q1t!r39uwdzNcJC4ben>DoX2 z0lX{q;-wt~5U6bdYifaWYqg#FjqJYT6YP`z*JE<#A%86Pum5)`{z! z*yhBoPV92x+{fENXS&acN1doT@wgKQoG20;NKdp5bq7+Ru0YBRMxyaZpsUj!wq?zu zHOgA^-m{k_$#S3f#jaoMxz>Jas&7Mwe{xg%b0C5lt>$%-bwk8X?PEdrj8j>c12gPV^S9F^M?QeB?(yHd$P{Yu9Y~*7V+4fod+_LT(|e>>9emY?yTyUUs7ECr#o8wiFIIK&b^(Jf8B7`;!7`5!#h?$ z-dVq9;s296^0q2R^S;)<+xpz{^Ud2aGZ0v<0)BtS$R{@C{Wq9Vj|l`2K%iy>9=^H# zm7SCS-}zQdwh7Y+Abs1_Yf-zMPmpcfB*srAb`M934FS8+cR$)%(Kn3 zRJqeG0tg_000IagfB*srAb+@OuDn&jh!8*k0R#|0009ILK%g!KF8MRkHV3M>kCoALO;1?gJ%>w8uOX5{ z0D)Q)n123?a{eB4X?gnnr7c&^QV+ehoTaP3Y-gS2TYVvVL;wK<5I_I{1Q1xE0?LcZ zKV~!fl?A>-{CutvSfMW+hJ^qE2q1s}0<|Mx-SP+JZ__R7a9Zl?`0VxTb?&{aE1S*{ zK%kxk+W+m2%B5@FUb(gkYsr85&I+DyEv!#!A%Fk^2;>*AN9=xEk(5DYaMTu$u2Fkj zYu(aso1$E6)7EPn6=}rQHk}jGvf@%&#Kh+DeU{s4)8oWOCpJ5AtrOQdvCWBFo!I5X zxo;01bf){9c+`oi6OTJ_z=Q%4)#HXsR8ZH5IkBi| z#=?Pc{A4s3H{4^YPZXM`6X8HAdM2DQJi~@pRvScNO4F03(V)218I5S%G#VAJ>YZsr zk4KCq#hXrxK4rCOrn0FggORWyg~XGP=Ie>+LG#FLq0eyL=w-*Y9eZ}|-qE#fr!m`R zD3LZiy_51G)9@Y^O&v3&?24{n$D}OU=MMn{N)oWveu0AuCCM%ALa_s`yE1zC?jPRy z^t3vCtqnzbWS19O7K^IUKaS-o!d!!Td9Ovb5 z;a%UYK)&-Dg*E=Qej|H6C8i=MAq51Q4h*fd#fj-gZZk+hcV$bkaot0R#|0 z009ILKmY**5GY@u=@E{Hm9JPt3w=eLFDRl}s#Zr}x;yitz|BYM?B9({Gu{n+Y#g)nji5I_I{1Q1wqf!9`Ud*;NGH}8?P z3?b7Utc8C{`$(7Y!N^JfmI?9T9^5Ga!YHT?=jY@ z#}gT!kF}cR&Ah%+x$jThY?NmRAb1bdB2M zTI-g6+Y}YFHf_DOQISS$ZPPh1Eh{diMNDiS-)Fg<#5_)HbYim;*E(^X6Wg4))rnnB zoclOC=uG!H@u(A3Cmwg=fD=W61L=v@q3%E`B>pplk!U;;=-h4(+p^}-8fC3{@7c?e zWVz4#V%IPBTx&ly)wiL;a`#^Kh{s_=lGPT`RwAjJdPt82hQi5IRF4}jQ9)fJ=ES0= z84Cx(@srVD+;ESnK2d0%PJ{!g=$UZJ@C+MXS#1!7DNRqBMuXy3XEdU5(`Zz@s&}Rh zJsvTd6mL2$`jpkCnaZY~3`W9+6cSHDny)9O2hAh1g+9Y|qnEDT+q-vd+tazL%b0C5 zlt>$%-bwk8X?PEdrj8j>c12gPV^S9F^M?QeB?(yiJG0KNmDCIi6)a%+3#sb=+lyIy z5%1bt$a2rDlX$K5)z2*5Iv(7Xv+cy~_a1m=E=y5zci}EC_~#yJy*uYO*q>P6aMw~~ zN1F&BFi+s@i*mkS%&W1$)6W+EZ+(GQn1uiW2oxo7;-Ne8*PmKoUjC(+vyi}h<_?oA zREc>AAbs$ay|z(atV@MlAQ( zGJG@dl7G?MsDQv?0++tZF~`M>n2!Jg2q1s}0tg_000IagfB*srAb&2{rGOnZf=cW5T@;iSc&vl`F%W%!w%6l0wA_NdX009IL zSWbb_%56hkc_i!3!OJ-=`bGc&1Q0*~0R#|0009ILKmY**5I_I{1Q0*~0R#|0V4lFO zJ$bJGnOBFW6%?>;uV7;|g#ZEwAb5c4vFBvWk zBY?n46tLd<`;~j&y!*qM-_@)N*nj1B2dm;tCkP;b00IagfB*srlqqm>YySNO?cT!n zOT538zl}1}V{rr!KmY**5I_Kd0tEi_t@{h8RaliD2MgN&WUYK z-0H+GC(eDU9dxGqoOslUsuPbpalnZp!GZKd>ri(f6%zlM!ALY733TqZhizH&XpOSg zy!Y&7NwVDMeX;8od#<&gn(EuoVYz#+dc@@;TE3?h*MGqt8AgZPZo3| z8G54_{-r)a7L2cYz{u4Ksy;)e9X9sg%2zu%9|CE(~3EgR*C}5{;D(go# zIo8eh@$a0R#|0009ILKmY**YEs~*|2O-NRb4S=eO>fhRhxR~%%6EK>-o3U*+q7b zy39*G=CafMBq!jk1Zm^Yy=QM009ILKmY**5I_I{1Q0*~0R#|0 z009ILKmY**5I_I{1Q0*~0R)y$pgqs^J-5}_bygqGyk2klKg@i>`Nmb@^7Kn_sW2(cA%MUN z7Z|@~f%na?XMXmqaEhXa7dX0rTTv}h836vPX*d8m^tc?06zVRupJ1)bIMG-)tjsz}^WZh5olE38a zt*v*KyljaJT0g#2{xOdd`z~oQ>t4~X ztxoK6;@sQqL1((piASBNI`Oy@2b?Gp97s>J4s{1oA@QFXj6~y+z_uOsuq|sItx?vR z_ny5hNtXM(FLwQ6&$aebQ+*pcEO+l!k9Zt5Bw1|{Z6%VrsfYAfU?`kSMfJGh5*5@n zVooe-nz3*o96uQi#trwF>Jx?L=|nh?ik=Cl49~FPmDL7On9}s5X*4Kqbw(o^H;qQc zt9obJ(Bl!KN%5xBqEA_EnyGB+$zUXGNFnhgr1^ScdeA&FTj(=fH+t#XxvR5l*RHPJ zoyKgNp+ws7^iIl$Ov8IvGq1fJq<`y!q z@5F@z8TC4@-aP8|_@|27tFitxYmX|U<+3KO{K?_WJe}4qeD*y#%QHHgRu<0d(w+jw zS@_;z`{08!uXTS$4$j0w-+OfNd~+>G83G6tE-?7zo9|M0tg_000IagfB*srAg~$)-t_wVx>{&8tP{pq!vY8AE}xjq@yYkq+rJ-w z4da%cn(tnY>zA%q{F45Th0mQ9-y%yOfB*srAbsWd6j0gb)5I_I{1Q0*~0R)y? zz}mw8&~mQ>`bPi(1Q0*~0R#|00D&qB+@1N`q{f@_e9xqcDHYSzy3VRx<7vI@eBmw^ zKFNM%G3$`J2q3UB1=QZG-xE*P=zYqs`0Hu~j6J|-OkJ%ol0^Uk1Q0*~0R#|OY60uv zj~9N-CSNkA?#^nXkNd6UvnsgU@6TV=T3}xOrI>>N0tg_000Jvm;Hj5o9Zz1tW37Rq zO;7B`y9U++DIkDA6$F}JmbJgXwQ~0(t3vwHv3s-fU#d>eW?nb6RNc@n0tg^b=K}VK z-ES+BGN=rW+TzhQYL9EJTl#HNlxuC;dTpa3jo8|zb7ERnTuQ5$*gU>(%vbbK+4as!lxa!~rLYLS^UBj&`SrWp$d!ts;QVBBzzsXkF?o=$`Vspy$-%J2*uURiArg(*!> znnr`-R%cYAanop2ysCGm4Lu$)niOw3E&7zzrkTp7o(x99h7=M{LYl89rU%U*960k13hJy z7YDZQDQ=v(`j}r9U-LSt{pC3=zTAE0uK87YjsOBnA@GHq|NfSuC)z{+0R$=|a54YE z*6SxKyPZ;*glPo<1Q0*~0R#|0009ILK%kZcu6#J_c$np{rS!-JftnJq&dpsuF`L$R zK2+MVm%eY7wp`g5?fH)*mEHFI2Bs?WB}q%F(VNB0AL}Z7ZmF!7Rk+$2G6D!7fB*sr zAbIF{xUe@)Net&J6{9T^w(;lkK zbzQYNXtF{80R#|00D)B@@Q%#u`O6velfRZ{o2i^eSr!2V5I_I{1Q0*~0R#|00D)x| zue`U6__yQHG@nm)S{o%$ET`Dw8nn3^o1nNw{ zdgI!x-xKfT`-*k84oMdQ1Q0*~0R#|0U23ZXj$5I~?rfnR2Q zZ~3Jqe(&OzWqF^U-v6vBy#80``dAZ{N4!W3vk?k}{|a zj@shUHENG*ty}tSQ&iB}wDsCXMH;cSP3Oe4thkg`F|m1k-sV+YBYr zhNpK@K4co+!=kBUhLl~=73`RlMf?0AfIvwC{!P!mtE2^UF7(2=H?y2`%PYL(&$+x; ztHAkMrNVii&%Z5w&w~YhpSky0_bT(pyZ_pYpI%S?M(=_7^Kw4FTc34#%zNkb=#eGc z7uo&W6Hn(gKaa^{pU%jC{=BE3f8c*FcnooR@MVkVegB&?*MIw8W$iaU-JA5p@bF#M zPaewI=IGD;b05Gr7vLEJ2$Uya{Xd!i9zXT{od5m`>SXF21-*9`)Yda9-IM1%`-2t! zYZI*ub%s-1j^QjWmt6KMZz-uE8sXZwqLPrLHBneT~J(fneb_qVEao~IM* z_B=Brsr`?Z!6DE1Q0*~0R#|0009Kb7P$1yk*s6+mcQ&qtJ=VoDjjd~7tznm ze&ro`_IHnEo-e4X6`hqW@Xg=JIA=8d%^wzb?8Ql-??oGD{@vwyjg*~b{su1W&U+jw z|3W3q`N72ur^Q>?pU2`Ag&l`^_|_u(TOgxPJg;(XEznI}%<4Rr`}Okj|5(!NzAna0 zk_aGx00IagfB*srAbwC27xCs*BK%f9qanR(7F-~RK`^POvBDdLqM?|EdVH+|a+b8j-3Y3``Xep=___Z7bH`N&@^;BduX+yJSK z00Icqr@)EZ{!ixn=JTI>^Iq=OkBU1cxYbqSXHy||dES>3m#XsllAYo7*G}di>$L9< z>1P{nI?i8sy0G69pT7vt5kLR|1Q0*~0R)yo;OuWK%XVhvMyeLKEnm5g^S0C%yIbsg z{4i%;){`glZ(q-A@zSq;Gjsn%`qsSP#d>t!^LY#9dwSw~3mg~9w@|s|W5wlIa(9-$ z;6+b7aEFw5cb?b#$EuWX!P!y~0R#|0009J+Sz!MYFT5)=g?ZaI(~lMRowIY7b00&^ z?4Yt!{&1e{o_XC&Pb@r+D}~%=P|La9JFl0UPoFq#iqZ4iYiO;4IegW^^_Vk#OpjYh?*dS}|u z;}N4t@ut(FPg!l6sch=WU?glvA@L-n`3{OIM`p`>hU-QrUEB6-@9f&MYiFl1+hiz_ zHatq}A=B_477ZOUr0jaGU`MNHeCE$R8>E?BX8syygVgNAwN6~;#5N~xbz+wjZ*$_@ i=h8uEy3dJ6ov1qTxDyAQIJb&gM{LD4GH>+=jQ=0(a27NG literal 0 HcmV?d00001 diff --git a/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_results.csv b/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_results.csv new file mode 100755 index 0000000..a5cb705 --- /dev/null +++ b/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_results.csv @@ -0,0 +1,22 @@ +scene_id,miou,mrecall,mprecision,mf1score,fmiou +v3_sc0_staging_00,23.345830217428254,39.66026792387614,26.810949812873634,26.668654135863694,3.7848566615983437 +v3_sc0_staging_12,18.643399293844897,47.77453442414601,18.756821046893794,21.174367667017684,3.884190654301152 +v3_sc0_staging_16,15.960986614324307,41.45536162890494,16.042454080694977,17.20586512660852,3.57829933991504 +v3_sc0_staging_19,16.79245331858207,40.01128889925894,17.258305777019512,19.252923414246588,4.112047147393327 +v3_sc0_staging_20,29.2813542928278,54.00531853416136,29.41739227529589,32.51218528216136,4.672988980337413 +v3_sc1_staging_00,15.495003791465933,36.75874466425739,20.984368484414517,19.261301765360315,2.972874031879691 +v3_sc1_staging_06,20.99228114200135,54.8289825518926,21.036967439576983,23.438325839018507,2.447827684626276 +v3_sc1_staging_12,1.5487374272197485,24.9190891161561,1.6417567036114633,2.0029250616105054,0.305274538833598 +v3_sc1_staging_19,21.528810459901305,46.752504040213196,23.25149741681183,27.199900430318312,5.811882378819158 +v3_sc1_staging_20,12.25880273782155,37.648388918708356,19.087876352098057,17.86369402359516,1.5988504296542287 +v3_sc2_staging_00,9.73483848075072,31.77378038565318,13.805749093492825,14.205857026013884,0.8533094042001017 +v3_sc2_staging_11,17.70449032774195,30.65307370852679,21.776979928836226,20.75427455121069,2.938277244059045 +v3_sc2_staging_13,23.75398351578042,50.20216293632984,32.352746825199574,30.99836618831111,8.293353555444577 +v3_sc2_staging_19,22.45139426086098,49.53165426850319,24.850738351233304,26.47702356111895,3.563814450163347 +v3_sc2_staging_20,16.516706463880837,41.09691232442856,18.88423142954707,21.858034215911793,2.6631079552615007 +v3_sc3_staging_03,15.136586013250053,36.235591769218445,16.012435988523066,17.89389768779659,1.9717539494885312 +v3_sc3_staging_04,20.490338886156678,31.815024930983782,28.60636063851416,24.62888852569598,3.3758584942557928 +v3_sc3_staging_08,12.976680997235235,38.23304183315486,16.76034535630606,18.36947506860192,4.369778713593835 +v3_sc3_staging_15,8.604571398566751,35.061855877147,11.645049207350787,12.681499551525194,2.107921310752318 +v3_sc3_staging_20,17.656101067276563,42.87629478117999,24.097983100835016,24.099548080973676,4.317741340024595 +all,14.772929585032054,43.498661270474685,28.03993814809741,20.85562032654649,2.7945876227714446 diff --git a/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/camera_lights_ex6_conf_matrices.pkl b/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/camera_lights_ex6_conf_matrices.pkl new file mode 100755 index 0000000000000000000000000000000000000000..07a376542499d0fb719adff7cd83d8326def1382 GIT binary patch literal 884325 zcmeI*4}2SWec=7kAKPo~rfY||Bu_|d`ZRQM4aR?xYyy*6lwlxWO*Bc`Hp(`Zz@s&_7j9*-DJiZ`7WZOUrX zT;=&L8&c@@5PZF&~;DLN{?&B6=>5I_I{1Q0*~0R#|00D(0uVExkDUbTjY zSA#>d?ytd4(WQa~LWQ62St{qpZm#WPHZ9lJ_V|`sx9efv%qMc5$6OCXXN(9SfB*sr zAb**I}=gJ?$cZ00IagfB*srAbX zrw~8@0R#|00D(dUzR!I;3Z*Kv-tT`&+}q_HdA?6PRgkyUGgkOf`>16=onk0tl=FftMUDk--?5Vq7 zoje^*hEE@ihD`h2_KpGD6w6Hc-V_ce0@3*C@L7AREuMaJ;#@~aPlh|*oQ|2%U@{pz zXHVS|NQOfqiX{@(W4LkFY$N#L6J;*X!5{Ww?pf3VzU!BI&qT|+nl)5iQP_I zI2wP%neKPu2`8#fJmth8Ckl+A^i=CePaqZQ45Z9pBpQzdy1MOATh=^UqpUUWzi>^G zEcYc}?ADduo9$<2`nPmg?!Fry@i=Novf3hYC6cZBoMS9AqCre%?zKLijcO2B%_zp2fQ;4D#WiP~uItUDzRx>G04)b#;H3+5QLW{P_K9 zu58*y0D-y^Nb6<#eEI#=xi9Pglxp<-`um(mt*ieCbQS>w$|Ycr*#q|FKRM*OoludK zVP$yS7LTrRds1uNF<_gbg4U*O*0w6rn5}KQD5hn_rQ9nfwoe{7>vp>0abmL*H#%{X z6Wg4)(}~?qTzLCE;!O8D@q`mqC!TWRkP}76hSF25BRzpss56inZO+BQ?0wdvMDyqi~ zm#Cm_5OZQt(~N}!;rQujFmAXfRG%m`&n3cvRP=l}Wq3vnudFtR!jz^bO`}0^t8)s` zxM?&hUe!C7Lyt#{CdHdhi#BDoX|A%VCxel&A%(<~kml=+=|S`Oe4)>9-EL*qp3a`G zJ)OIE?K0+bh7xJR(>E<2H4X1Ek?N!&Wmj|sJEmokpFac;C{95AMb4@fC%42>S07kn zW!D0v~%veOx!A{ZpQMHuTro_xK82FH}PS0R#|00D*NR&{dx6`RBy^AIN`?hIJ&$ zP!T`?0R#|0009ILKmY**5I_I{1Q0*~0R#|0009JQP+(@1`!`i{wO9|A?>uBR`%PO2 zAbDtfL>O&VF&@PpZ>S-u1)`1Q0-=&IJ1QQdmU#`ahhSDgQmwmLt4UqvjMLI8of7O=YBU*DfEU;R{-`hTcQzguqIU)L`f zI*$MX2rMIDkJ$sZA}Pbl@VG4=UE}to*1BWBHbuGCrft@?D$u+~mYIC+>7&w-XnR${%s2`<-~giK-J%IdRB|qGLnpsn(I6Kq}N3 zNSVP%G#&|b?XyR1S@UR(vevx+!Zk^<+?RZ@TUUB-wx5~l-_l{Z`)+u|SKAbWK4NZ61<;z>yJ^~Us| zd3?UmXSi;+vb$?vPxqd#?yfz?e9llJZFu^o<)fzIJtk6}G^Ff`u3*QsEb{Y*00PAc z^t_IP3B}1(bgA}}_Tjt@cs^G64uv&#K-&B#rJr=o(-|xH&pG?@_zS<6zyEo16<1vN z#h}-J`zME_*-F*FH-jfB*srAb`Ny6`1ws9owTu#kQ~Vw#f_x5I_I{ z1Q0*~0R#|00D<}v_{m#x9zU!@0^izSaMk51_6Mqe588*nP@OiPe}nzO<<>v* z5kLTeMFRecQ{$fG<@XUg{-iSl%aEC?Wg00IagfB*srth#{p$aLw)Y<}gRO3$(C z5_A9o1Q0*~0R#}JYXOcYS8??X{^GyIyNcd4g#ZGpCop?+5ASyc@egv}*@B#?ga88T zUBDi*2kgtAJmk6!p-9TGGCXdJN7uMLskQDHuuV}xYtuGsTNP={*0x;~)3V}H?iCZ; zCl8!;JA@u5Hal^n6E``r&51jm*zLrHOGb`3)BR37;Y8Jmr<^$CMA5OK^i=CePaqZQ z45Z9pBpQzdx;yPrTh=^UqpUUWzi>^GEcYc}?ADduo9$<2`nPmg?!Fry@i=Novf3hY zC6c-vWtT%r+XA%Fk^2q1s}0+|AX-kjS&nUxk#Ssbxl%#ri?(}i!>)s4GWSxUarGwAS4yKmNDkJkSKkVbUn2v4>GydbTO+HIVIY72 z0tg_000IagfB*srAbZRYe*jDFO%}fB*srAbn*3dD)*bI^|qSnF9HZ4P;G%1U8vsizlE{QzN0q3uPFac?tXPPa3A=Ebw@-1 z0R#|0009ILsGz{6+WUR(_J5cA9wc)LFTc0?+sW-;&)=U4u9=!l{e_oQ-@hgy=oA77 zAb)RMl4jjUj*l0_7KY__4h2tL1NO zKBM)9FV*P#^-P}QL!Yj_&%;Ws9P%Q7z`7K$$Ls-Hk(6O&c-$6`u5o)(Yuzzmo1$E6 z(>7~c6=}@Ywp|p{vf@&HK}>9)Jdp4>T+L40=)_G8?N)ladwRM$_wDW8Wz6RcCDMkcZ(2TT8s1|f z)k#CjuILJOOv@rae+VE@oWO&>R-+rG1rF!xg*AEN&i}6e}Efq8J|J`8Di|^ZCzxhso7Z0TD&;4cjV#DLUk5{R`Wd|T30<|k(9mss|`R4mq z@ZOa*tm_jMJZN(^&q0v!Jmpz^V-f)b)~7(?b;ZA@RZXn3EZa#{>#ii#TYsxaLsJMK zfB*srAbBS4Dvn$L$!Tw-{h3hi?zv?XR|Icf_wo1or2G{2C z$TBRc{JAbin zt8fiE_lH?8FE7T400IagfB*srAbbBzwSy6X0|%tfBB=|DDJ+}l^QmA5kLTe zS{Jx0`+ajm@%Jg8eP{Kr=eIT$-``bPH;>f$=lQCj(I^54AbzxVoJSDkPFuI2v>j12(< z$}7;AdA@J@O+|j*ykJ${=dC}u=TLb)$c6v{2p~|3fIVgp*q1+l$aOo3A}Pbl@VG4= zUE}to*1BWBHbn)kP1~$(RirUn+jdb*%Zf{B5fj@d4_I!ed5;qto!IQejZWO;#5O1H zbYiy?7cLJu;!O8D@q`mqC!TWRkP}4*htgB6BRzpss4I{%gOO-F66oAxkJ_^4(Hdo~ zdH;oLl4Q9r`C_-O^xkYgGtnZO+BQ?0wdvMDyqi~m#Cm_ z5OZQt(~N}!;rQujFmAXfRG%m`&n3cvRP=l}Wq3vnudFtR!jz^bO`}0^t8*IBxM?&h zUe!C7Lyt#{CdHdhi#BDoX|A%VCxel&A%(<~kml=+=|S`Oe4)>9-EO7lzOH@Ud-nEp zcN+6KLy5HE>6@01nuhn7NOjVXvMaiR9n-SN&mRH^6esYZ4-~m|U0iczU#j!2>R*vq z_CY*1;o#5Sm9ZBw_`M(8nSSH@YvzcMb@t!n?29$J`A&bIJG}ChSpQe%-r`c3uYM_S z-RoyOhnHHEc?cj-j6l|}&HPnc&i9LAGFNArBdc;0sXAk$6$B6{OyJS{S8+Z0_WIt2 zE)0NL2q1s}0tg_000IagfB*ue2<-g}_8m*{Tlup7l`p~)H6zgf(aiIGzxCNV`~P#0 zdeb+LMeN|HJIj}0t(kKE5 zAbuEyf+zf=1CVFtaWZ`T$&){T#s>AYv54N?yQ1Q1vyf&PzVz27}iU&m#B{y=?y zKCaSgr&$CLK%i0r_Lx0jE0Qv-43FF5(KT*QYOOm4Y*UnLZQ5pSt0Ilr+O~^gT2@@j zy<%ef^GEcYc}?ADduo9$<2`nPmg?!Fry@i=No zvf3hYC6c$Mf0$f9Ihb#Zu>=r0?`M zSB2FmOZ>OIZcpwB*Is{D!-zH!D62ps^MZ`my_nCNvigz?0R&c6pnXH$?TS_H*W$+8 zKT`Th&c*pE{=CkAbfn@wG=KmC2q1s}0tg_000Q+U;O~}l9zUsSw2Bo&=}(qH?>94iVyas&`S009ILKmY**5I_I{1Q0*~0R#|0 z009ILKmY**5I_I{1Q0*~fpsa6_;h_-r*!%)dHR{iy#8%nu0#fn00IagfB*vZEijXL zf0%k3%*6Y+4!hpg8T~~70R#|0009ILKmY**5I_I{1Q0*~0R#|0009J6L*VK*&MQ^b z{VLa0Rn?ou5I_I{1Q0*~0R#|0pb7&1j5ePB(>&iZ$*9Ey0tg_000K)2SjYaZ&VS$4 zuCwnWbaLEs$yX#Z5kLR|1Q0*~fm#uGw9d9OYsIa|epq)GcAV?#Te$9`NMaU4009IL zKmY**${{ej^F{nVaycLi%wb*mV4WYoFL0n#LjVB;W?ytL1L68qeeM76n&JIAV+AsS z00IcCu7Eve57?KVI^??D4MkFhmEmz)Ji5m1Nv(CqfNhEjTAQ|6+p0)owzln}n3ffn z(jq3dPad$`&Hy}4Y;lM~yVxYLQAl&0W~P5jhvn|O;SrCc zh9s*kB3B}*n|er(1xCWjR8)@}E>S_o=b!Ssp$D| z%J7UDURiArg(*!>nnr`-R_8RLanop2ysCFDhaQg@O^P?27H!IE(_Cd!PX;4lLkfu} zA6@01nuhn7NOjVXvMaiR9n-SN z&mRH^6eVE&p_ijMMaflmp{M_~dU$aQ?K=kVO#kDB!p@v7B#4YuY>&j6{YK8*{{$(=W}tT~XC8(ij2=AbW?$lpVc^7#{WOn80kZ6^Z)2WqxE%_W;IrICEM`7 zJ>&mDC7U8Q0tg_000IagfB*srAb zl>9Z*x5XW?Kbz+}g?D9spL+2!JVyWl1Q0*~0R-wopu2qAGp{+DXPc=mAm}6l z2q1s}0tg_000IagfB*uuFCe$F?_K+Ai5?<=00IagfB*sr)Tn^<%`?mYeoBq@oo*q3 z00IagfB*srAbY!)2%aNcQ)H%H9GA5kLTeH6?KL(@Wfse^VX&HmQG= zJ|FKZ-}fNa)C-EiAbzdxn7k z0tg_000IasE3o&m?BmJ)WtXYD3g0fnG3Qs6=lJo^i@5H%?p{B17y$%|5@_kq`rh*M z%JV*2RA-7<=>E~{&&MK?6tjqRe=$q33<3xsu+|0aF?+yPBxP6`9=FA#YuujHT6YZC zrYP6iw9VR9MH;iUZ5PF~thkg`F|mE}z=Ye8@i?*3iOo*j=)_G`OCfmEm~kTQdjXgm_=?zBg3S@UR(vevx+!Zk^< z+?RZ@TUUB-wx5~l-_l{Z`)+u|SKAbWK4NZ61<;z>yJ^~Us|d3?UmXSi;+vTN6_?p-}SUEQ6=e9llJ zZFu^o<)fzIJtk6}G^Ff`u3*QsEb{Y*00Kn`SV!dY9bB+JweZ$fR9{%=j=){t_`u;i zQfZudDY2mhP8-M|n?lzw8rv z`=suB#e>;$Oe26mi2~LqGCvb;B$xg_Ut)7CjsOA(lwZK_&zsXbl6xB{?*hzNUf`ZO z_+rxXqo4)?2q1s}0tg_000IagupnT)?|mE(TfiV9u&jWcc|3X9`gtnsJjMUJJTw&7 zfxv}dw5-k)3*YvjFbo*V zM?C}(KmY**5I_I{1Q0*~0R$=_u=j!d``59T=AT!Au8O)500IagfB*srAW+i++wU#?n9X-P zOV3f$NOTSX1Q0*~0R#}Jae>24`Hv?L{@Kc|*_?erjh^fL^YZ+rZNaPZSMwe-zWTtw z&hK4tekvk>00MO&@Pln-+~41u_kFce60^$xS*`bLf2GDiUIY+8U@ZyQWA=c3`KyOq zw+E$2%CIs#Zi`3PxIL-0?ijF5Q9*0dHfviIY0TEPT@=%@;!;}0#P-Pp6K;pYMRGoOri9=2l9UV$fwT|=zQlaiZ$_z%L z@kpSv(;l^D&7(ESTJ!!3*CfeuU-HFnUFp5qerBeBONZs|yWtU!qlP4_Eh1MUshfI8 zj|E1;$y8L28!k~n-5}<~qNW)O2g32w(O}$gPpCdoXr4=i1F7iwaLVwE8eUm#5QQmC zPnt%9;#TKWqH)t`RJ^KpE{7hE7)^>dofd7%YSUb0Q%?pXVM7XuCn3$(8`Fd4@%ciZ z;kw;Qch~Ow?(5li->zN8e9llJZFu^o<)fzIJtk6}G^Ff`u3*QsEb{Y*00Kn`%)aaX zYHhU^)xY(+(Cqcc4(A>Fkq1|Hho_c;UjtPLM6<5f=;?(CXK|HCErIoHGh*Y+SfKDWYS zEqOQ|KU$tVdD7=E-o$Z@{5Z@hR3H(p+&SGs`#?Pe5I_I{1Q0*~0R#|0009ILKmY** z5I_I{1Q0*~0R#|0009ILKmY**sx8p|jhx^AuXZ<00IagfB*srAbe_4hIS)0^2ZF7Xw>;s_vs zKt%*jzqHPO&%(ODA}%E*_4}nwFUj-WhNXT(v7|;>7y$$jKmY**5I_Kd+7@`kQ~JKg zp@GtK)V316LjVB;5I_I{1ZrBqO5e}* z{bL7zb1Bbx!BsT9-E`gEr*^f$XdeLt)`);TW)Ikkqzo&=y8226y;i* zwprV%NMp9P?V^~L6_;|anAkpf;H=vL@HnyAi5s1`$%$=F-08$_CobGg^N2Iu@5B>M zRGoOri9=2l9UDqdwT|=zQlaiZ$_z%L@kpSn%O165&7(ESTJ!!3*CfeuU-HFnUFp5q zerBeBONZs|yWtU!qlP4_Eh1MUshfI8j|E1;$y8L28!k~n-5}<~qNW)O2g32w(O}$g zPpCdoXr4=i1F7iwaLVwE8eUm#5QQmCPnt%9;#TJrqH)t`RJ^KpE{7hE7)^>dofd7% zYSUb0Q%?pXVM7XuCn3$(8`Fd4@%ciZ;kw;QPj^pGSLg0Mot?&f&QKz4c>1R0qo(0K zCQ_X=r0j~WV8^s9^7DrP0>ugZ{74x$h({LQ+KTH7OFb))c=!Ip&rXOZPrmWDGo}~M zTlV=w=gPDL_1u=M#~#V~`nme2pQ_S9s(bc7w^zAKuJ3zi-ahT!`MbsY=5CHs1Q3wE zmHGd3(x3DF0w@F!K%iy?X78zwA0?|n$KCdrHd0tg_000IagfB*srAh4zc{28P9(0l7^KhtWe zPJe&6llvKGyik}x009J+7O*~dv(Dc)?<(K-5tgPZ*Ia+O5|IZ11Q0*~0R#|00D-y^ z7>k#_@A282Wjwz0i?N)Z}?!Iq#XV2bU zoyL65P$F%3`ljWhrr|v%Qk^uU?24{n$FwZ+^M?Qe#R>eY4Q#CzCs)y>9!=hLcyR;P zr61o}PY-=`1$R}Rc|h9x$1AA6xYYulZ+JZC(JAW#&*UsGAma+FJXHAERijfF`u({Y z@T;R4-nr^fh5!O33iQ9Tj8}7%*cTQ@009I_6?pPhmD|25wQn`Oymfy~caP2?fB*sr zAbwf5+_5KdfPkwfLWjF(H5e0tg_miUMzYarurXui`N3rRnQ0 zxWaYE^|G?)CjtnRC~zg~{$mY`rhSW^m)K0D7XOP=d_Gpnmb?fcfItxf_Lx0jE0Qv- z43FF5(KT*QYOOm4Y*UnLZQ5pSt0Ilr+O~^gT2@?2i3prxUxKxbVq!#F_4Q;t40JPCVtrAt#Cs4yC7BM|uLOPq54Fjc`gwSq@w4;DZ?{rcxAOg6s9yiX&McRTb>n^JsFII4Jjm^gfw4oOb?pJ=L>y?OPvGUyRWBn z_wLTly~cdXP$F%3`ljWhrr|v%GMzM}ta`3s$FwY#i@3CD|s#H%2pY*5kLTedJs7OcwKzIQj$7ze-6(6PVN1kfMqZ79f?Hlan!?Wg+3yH z00IagfB*srAW(|HcMq3-%;xxume_x}Ykw)-V_5_cKmY**5I_Kd0tCKvf$LxkU@BLY z!7J~@yIh#$K>&do7kGL8b8G$=9ITzre=TP}t$%Lj{knEm5NQJrsDKi*}#v_65 zPJ7grHILRPYt8#FT$3creaRQQb*1-a`kEghD-?}kS_jvA7zwuoGbq;BdVJr)=V zCsR>9Zn#7Rb%U4_i<)LE90M#GUEyzhq@+RloV^mDOL|@N&;L{YuWg9ZUH{&hq8bt$hAf zf5FRGpUUfS=&>){@hvzZ6%jySX@OaP#{9``eBNY$Fo6I9wJ-3a{dxQN(9dh{8!CA_ z%M1h%KmY**5I_I{1Q0*~fmIQ>zoE{Lhxz@h;#PE{*XyjmMGth1E@)-0zpwAC^Z!3< zbTu)o$_hMvnPZNX6(&0Z2q1s}0tg_000IagfB*srAbzE3N()Ay$3)|b$64Qr=M5(b*Y+KQFI0Y1Q0*~0R&bq;9j^L;>s7zUgF7co^7V= zicBMb00IagfB*srAbvyn zZ6SaF0tg_000IagfIt-lHs}8xs};Atv-v(zoeWb%@setJ?fZ8-M*z>fC#!1ExL#2&`Lyr`=^d$5_s5 zYYJN%=B;Ru-tt&JmPWGkYu$*YPS2ZW@h>SM|>2(Bl!KN%5xB zqD@(CnyYN;$zUXGNFnhgr1^SddeA&RU+6Pjw_EAz+}qjNv%7oW9%DXdD3LZiebe$$ z)9@Y>sZJVFc12gPV_FvZ`9lDKq6DmspW>iGQF4`C$ok4thZnc-O!A9&rhjQ{i4&oV zm#X}8|2KXu=T6R}U)*x1zm@MsVJ+->QC_!S{pE%S3rj*R1P~}#V0P2NjJlVe&*x1B z2one(Q2PRj{6Fp$JD7L7qV~r{4-r5B0R#|0009ILKmY**sv%&xpWt{{HK3nu?2oAq728oeIyt?+M?IM5x0u>hUKfEdP^JH5~p6@YMxFbcUzn<~=Sah1IE%;}AAGB)LG=u;G z2q1s}0tg^bQv(0t@0Zxu_^pDzXKd|!&hLp8v|Q~te7nx}Uus8=o*;k#0tg_0Ky3*4 z|KHbV9#57ee{J+^jrQW7?ptCTdX0{mVIhFP`V_D}l=ZvSPu11_zW>nC?9a#bxgHrc z0tg^bBLeoAJz!t{>LJ(d{wR_%tPGFa;?Xs3Pin0@25eJQ(Au=k+Ezsxv$bs(#k8!r zlom0uee!_ic1GfHVxtqAow(77o1EC@#GOv;cH+Y2AxE6)ekY!AqUyv`P8@Qg=-^O# zs&%9%kP3-^W-t``0RJX)iyHSfQ0O_D74C132;mEN1}XJ-1hbXe}b8y@jE zYDlu$B61~?x~YfsSYRZaOhxs$;Sv?p4Ps6#YMQZdARIp(4aN=kgz6K8=D9>TkcyrU zrwq@i;g!_}QJB*7q-iuLZgoy08aItb#jASfa_I4h(WH3OY0;*vHqBKw^<*#-Hl&bv z64HFVF+FG=pD*+ouG_8b?%LORU(fEI?!Cr*&QKz4c>1R0qo(0KCQ_X=r0j~WV8^s9 z^7DrP0!0Z}#ydE=P?TI{7kcRQw!@h%SPLH;nZ?UawLBnoe6n)uc(5mT?&rO@40}Pl z3K;X*!uJke_*mXP{q*Ns9$c1^3J4%jnn3ppbH85{l=asOct0p85tR@?0D-~<2EUSZ zRC>{({`cjdx2PIVS0eCM-s@MwMO_3CKmY**5I_I{1Q4is0qad)F5!4s%@2e6!11df#N8vMbplyTdxI-{o*1Q0*~0R#|0009ILKmY**5I_I{1Q0*~ z0R#|00D*NPaBpv(>wnhC_^UoluB)m#ISnF!00IagfB*srAb>!H1%6b%>n$tX(^XCX z#Ov7qUey&q;|L&7cLKltf%3g?j(<_yqrcjJ|Fyn89}{hS=b_p!5!yol0R#|0009IL zs0jfp_IvsFJ*;mRww>&K!z%1^)C3=$KmY**5I_I{1ZqPdbofx&I`WEN$-jMAwnnlN zeeA{6Z!@b4E1LZ4UnrmViW;Ue0tg^bWdZAt4rI1i!uJ6BUsb_l2T$g_*R1uc->a|B zmrQJ?5I_KdH78(?*#ovBDZ|R}xGf%C~`Y9r`i!`y5ET>oTxhSloN-XC^|Tlo@yQG z38X^epBaoqq;UVGG*HILRPYt8#FT$3creaRQQb*1-a`kEghD-?}kS_jvA7z zwuoGbq;BdVJr)=VCsR>9Zn#7Rb%U4_i<)LE90O?amsNJI7eIPLc=#1Q0*~0R#|0009ILKmY**5I_I{ z1Q0*~0R#|0009ILKmY**5I_I{1nOJhJz4h$XsOZbv7XB6r=xDk^>v+<_14Vm_3Hb@ zL+=ql009ILK%guF&oAHh!NbaGUDIC{36day00IagfB*srAb5-fB*srAbB`lYb&zM!^FgjVUI*)$AtHdl zoWOr9<2lJ)x$oIpN$9(0>g@CJHPtt&q(8Y4KmdU(0ej3IurEJ#$aTA1k(6O&c-$6` zu5o)(Yuzzmo1%i&rft@?D$7i!2Q0Uf(c{EMCpJ5AqZ2ndvCWA) zo!ITfg^#x*&UC*MPdHI^;wdK%IZBR~dfae{3hD+i zCl)o$SU3=lpNNwi9+*SA{mJAc*JN@yy>)PQ&yYiDw}#T7zrCvNIVH?zTTJ~G>^|0`V80YR=W1}?CS2^ci(+` zjQO0QMB4E5P0L43!+T7mI%!DR6*<60jcL%F%_Q37|ko_YM0i{~x-{M)~`vii#=yi=ib>iC_0-6@u&sn)Eswu@&v&u5-4L%;m~9GP<$ zo!pgo&+N&k9?dyr|zBuc=$~v}S=m;Qy00IagfB*srtW@CY zXSkozO3g~CKlmQJOYvh_1Q0*~0R#|0009ILs6Byye0%0Kl2f-br&rSBpY|8O>`K~M zT=%ILSLb^%FKDSwJB!E0a|94Tpq>RbPnYdF0RL|v<+sV}`L#gb5kLR|1Q0*~0R#}J zq`>E{6S13|KAS zy=K;i?sG;l`_8>(^nOly3J^d5f!Yz6-H~+-i{-DK&Mn^)YwW*N>3GGV{mb`;8VDeO z00Q=yJzy)6GOP@b+v3qRZcl2hI|gi1lxuC;W^Jn?joI3^i(*<%=B;Ru-tt&JmPWGkYu$*YPe6ZW@h>SM|>2(Bl!KN%5xBqD@(CnyYN;$zUXGNFnhgr1^SddeA&R zU+6Pjw_EAi*V!fh?d{rQ%;yXx(uSvRT0UwT-eV%wNkhu6=n8gB%OXF22p~|Lz~$fI z;6ib76Q88x3f(bBxK`p?afzhvxC&YhhW^Ie(!&z0eg zzngQ}TvhXyS$7xwC2{Lq>mln+Z_W97Kl0gEKbW@-W*~q70(k{Kb?3b~ZyjbJfI$5U zT*`m2b#t(C+gJ4qR!GnNg;b&r0tg_000IagfB*srAh0$BKESb~wXq@@MKuKc2Q$z2 z^nb9(V=w=u$mL2Mw$|gB7gt_2q1s} z0tg_0K?#Fd@IyI009ILKmY**5I|s!3Rs^%QTo2e;72&_Q<2w)x=P0j zD>4|8B7gt_2q1t!B?Z2Dqt3U-totj;Q|{c?fA)^jx1r13De@tJ00NZ|@P9e;dKhVI zk>3OO>gw#@e(J6&yX^{0&7md9cmq{9CD)Q=umpf6O6^|QCrqLTBEEr@4s+Ok}UTnU+mVE-ka@b zX8N~uSnj?X9`QJ8NV3`@mQEyfQxEB}z(_cmit2I0B`T;J#GF{vG-KgFIDR@Bj2rF= z)h7zgbBS;u6+ItL8JYPI~ZW@h>SM|>2(Bl!KN%5xBqD@(C znyYN;$zUXGNFnhgr1^SddeA&RU+6Pjw_DlOwYzKgu6^A--Nt;*P$F%3`ljWhrr|v% zQk^uU?24{n$FwZ+^M?QeMF~9gQs09`EtqQ||CX$6_FR>UEc?tWbKgVwNHBMKk%V_3XU@0pdhx;s zaK7$RjsOBH6==CPKhe34i-cJ z0R#|0009ILKmY**mKQkt@v$-2)9v>+i2rxjtADK2I-d79QdvRmCo-It&;4*o7S>&# zSz&ui^{Ayv?Dfb-PtemeUh9o6e55C0tg_000IagfB*srlucmJD%-wuL4Df$K>mH*&wM)nyn>Qb z2>}EUKmY**5I_Kd^(!!)Y|Qz)Dz`-jAJ2WQOiGfwRBIIivR)$AbAH zMaR|tbVc`dAKdmx1>fsCbANZ!{PhAp)5>x@HM9NeCl)znui0NdDD{3Scfa2Aw%p~} zDVRn80R#|0009ILSY3hdyXs@RZFWq0LEy-S>5 zT&VP_rE@G)yB$aK=KJa8{w(i|1(`qq0R+~JK>NPG%{<5WmPOac%)Vj;pD!m5%s%7G z%vDyUZ|8h}|K|U?t4iJd}XxM?&hUe!C7Lyt#{CdHdhi#BDoX|A%VCxel&A%(<~kmfrg zsvMs$^BJz&jqK{WuV+tB*S Date: Sat, 14 Sep 2024 01:40:55 +0300 Subject: [PATCH 17/24] semseg results: dynamic_lights, no_lights, velocity --- .../dynamic_lights_ex6_conf_matrices.pkl | Bin 0 -> 884321 bytes .../dynamic_lights_ex6_results.csv | 22 ++++++++++++++++++ .../no_lights_ex6_conf_matrices.pkl | Bin 0 -> 884323 bytes .../no_lights_ex6_results.csv | 22 ++++++++++++++++++ .../velocity_ex6_conf_matrices.pkl | Bin 0 -> 884325 bytes .../velocity_ex6_results.csv | 22 ++++++++++++++++++ 6 files changed, 66 insertions(+) create mode 100644 concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/dynamic_lights_ex6_conf_matrices.pkl create mode 100644 concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/dynamic_lights_ex6_results.csv create mode 100644 concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/no_lights_ex6_conf_matrices.pkl create mode 100644 concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/no_lights_ex6_results.csv create mode 100644 concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/velocity_ex6_conf_matrices.pkl create mode 100644 concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/velocity_ex6_results.csv diff --git a/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/dynamic_lights_ex6_conf_matrices.pkl b/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/dynamic_lights_ex6_conf_matrices.pkl new file mode 100644 index 0000000000000000000000000000000000000000..0e5a3b0f855c431df67741301cdc58a123f14f6f GIT binary patch literal 884321 zcmeI*4|p4CedznqAKPm!VeK@AT)K%OE3!sG zXf)Eu-K3ZfOYGRGDS^(?E#yFG3>dn!3zx#4=Ww_#(6VRCIcK5m^KkYqG^gxsHwD@~ zlmks0dhe@voLE}PktJD{?603E`f1*I=l8zvXB27vX=HgH`O$utl=&OA<*C+@oB+!IxBWX}p>=cBQX&K+VSB125YvMv1H(!Eoq?g#W;i8k?Kyi{k}UUmU+ntDzH9BL zW(GEOSnmF-9`QJ8NV3`@I+jT4rXJE`fst@B71iU0OH@$Ti8-;TX~x2VaQtL67&qJ# zs!tS}rxW2oDtacIGCZS(S5_NDVM^1JrqQ6dRgaj8#!aJ9@v7cT4m}<*niOw3E!vdT zrcC9zE*nzl#t?jcqT}X~xk8`ey3tC{?w#E`c64>`=r!hYh7xJR(?2aAG7axxk?NQs zIQnR4H zu2ufMFbd~p4gv@ufB*sr)ThAy%5BedKFT%G_4y*8*9ahh00IagfB*srAbPldS`vMoUzIy&g@%M-Jf2{Czr~a?)E2cT>AbcS2p~`$0ej3Iv=vDiR))uI@#q@2C#UWV>9JTiWJdLP%AUIOEytps_*N}ZqWR(?{a@UxxZJKI0X`W7mMSD~3Xs3pyA!*eIDdD~gUeX+4mnX^45g=9M|uLOP-h@z1|!jUB+%7ukJ_^4(Hdo~dC$X_CCPH1_rJg8lh9s*kB3B}*o4RlhjD(XZ0dKfO1$CX66N{Q=EF1{OPey}r z!#$z;L}3OekcyrOrwq@i;g!_}QJB*7q-iuLZZ)G2jhjZJ;#Iww9C|!rG%4P6TC^#v zO_|E3o(x99h7=M{LYl8HrU%U*`rDg!{>FM0r)!Eh6ZOo+%CDMkce_B3d8s5Vq z(=kKJspkrIOv_?P{t!T*G=cP&IJH-r+#*Yrce*u`xbxB00|gtfURC^MkL_<--a}VM z?$5hRzg0?)m@KEA7WVM~4V^TtZpP?>A;N?M^Tga85vAb&tt8t@>M`^cVpI5I_I{1Q0*~0R#|0009ILKmY**5I_I{1Q0+VkAU@^pY8u|7vi zuMt21fyxM2P0ttqzInSQC*HLZ-+}bsu3IZI|LNm=&tOI1X%qnj5I_I{1Q0-=W(Cea zT>iet^&hRoK1a=xR<2|193QJ3CYcaG009ILSk(g7wON+%yzk>H?QMOdkC0MO&ylI|EfnkjvA7zwuoGbq;BdV zJr)=VCsR>9Zn#7Rb)A?Ki<)LE90sCZRxCWju67)^>dofd7%YE!1NsV9Sxupx!SlaS`?i|Il0$Xub% zaH$!<-rZfDyLvi1dyTo2p+ws7^iRu&Ov8IvWIAR@IrUt@j%ise$sYm;lqN9qGR~=$ zCbz6hC7yfWK*0va+(qmcJ>z`HSYjV!`||CFeC+3W`{LhlvP%0H?Kxw6qTsnq|4-k( zU$Q=(_q*g%_y73=1-+Sp00IagfB*t(RA7F4D$npQ?q?f14+}*IAb9CNG^ zHO(M^00IagfB*srAbn->YlN;KfB*srAb`M{6F6JB?V0w^=DoMmnj>Yv2q1s}0tg_000Iag zfB*srAbZTN2vk_W99;c;6$y2kBEt##X=ZHjWOP1~q#R-`do z+j34!%Zf|6OH6E?-20H*>4?XP%}!kJ#0^etbK-UbNnPE?(E+=)X@ z6dfB%PqmKp1X7{SK*|h8qVY(eYqvdW%bG`Pl(ptPXD>^V2@ z2nSNpGvSos88y7J+8_#3nw~U`2F0yr6ryp{XjHtaH1Wwj|&+0>K4 zNZ61<;z>yJ^~Lm{d1S88XSi;(vZHHP@9teY?& zbOk%6Ws#ph1P~}mVD`TsS!Cn7q?9bAJy!O`=6C$z!v_jNKISg|1gPJ$PqO}C{v)Sg z%93YX-I4$GGyP!keJr{0%z_t+-ih%a-?(3T`oX*>pr21&dZ4UsurLA$Ab@~}{Z00IagfB*srAbf8-{zt9t!1xC#p#smQsk z*=@g&|GZU&drLY5mPO#n(Y)s|m!&O|B7gt_2q1s}0tg_000IagfB*srAby#?(O+Ew=KmY**5I_I{1Q1wZfejV8fAWn%Jb$6=-#J}jjKad?ZL=bc+1i$KVp>*Q%3We&>*U^t+zz3~iOo)2@5BvGY;)pvCw4n={&4p}XL`Vi zN1doT@wgL*oG3arl%8rG=?SDloq?1Yj6~y+KzFA-YRj5OYm~L-J!daVlI1?{i(S9i zcdh-@%)q7&%iVv~BOXT$Nmg4#u0&Ed^^hJ5jD(Y^s2(?5qJp|k%!x%!GZqen<0qrR zxZ$2qeWK7jod^d~(KF$c;Tbi&vf3aDQ<|PMjRwW7W)z}v(`Zz@syCBEk4KCq#hXrx zHf6ObQ`yv$!ARJULgGnC^Yz8_pm}7j&}X=Aw9>V+`<~w2-96o%#$3)&B5ip3r{zPY z;XN!;9W$idimqVCv@G)ThX4WytQCPT^b~(LW$S}`rHOAW-?stoekyNYX5|Y9s;1Q0*~0R#|O6#^%`dAEU9#n_g85bXu-?sL89 z?Yd>}A`Kvb00IagfB*srAb+-^8JWvBY>qbqbj zuUMQsESKmY**5I_Kdk_7I~|NCks<*Lv^&;C;WZR8v2-&%Y9 zt#ZP9|0Xi009ILKmY**iWj)>#@ahRn>bUjPqRlFmw4W@cnWGF zfB*ukRzQ2S;QQ)wjoJF@Cu;K?=@;IRe}8}Gi&ZyTE_?DKfB*vd1ne<;&{ia6SQ#F- z#iMK7p43{m4cewC*V?p=+Ga%>v$ZYf#I&rql$VN$t&@8b9w%F~6W2R&gA?1FxZR1} zPMrT%+d*e~z==nls5pTr#))Rnn!DtwdOr% zFH4f;KJSZNzu0%J{nX6BrVh*9f7K%%M-53xNfx4y{oIUtEanXXSXqz zGn7ahp8je1kZE`ii&V!9DYv34*fA}O{QMz+KxqO$cy;zhsdXX$j{}sJoux_@c=Bsm z>&1H2%z>M!e)ysM8|*hLEPOJvN3uRM|B?(hiJ~v& zpZ&Lc3ij#5mXF@@U6`Kd2p~|p!0fMOf1Z5(V@r7N%4t?UyNm}U-o-^HIowPlfIuOE zM{jL+d{Zs`J*`msHD6wX7cH*&bpika2q1s}0tg_000Ibv-um0tg_000IagfB*srAbjH9bKW;CGyC^ni^(xx=gJh<^UqrpinA{%-M3Iq$`L>S0R#|0009ILs0V?GzbgNj z&7WV)$uZ0C&eg-Zr;i9AfB*srAh3D_c08PO9qfap-ydf6s4U+|j{RcJ_V4mFMP>vL zKww1$X5HE6&%S5neBKn>yGtu`oM7Lb`-|-%^$9 z;c;6$y2kBEt##X=ZHfw7o3>HgtVm#!QqeQvl;IgQyt3LL3R9Y%G>rzut!6Z$ zanop2ys9^oLyt#{CdHdhi#BDoDO1_hlfg*XkV4`~Nb~i@^q_fUuFz+=ZnVbr3Y@>eeeHX!aBQ9*|FX1#M~3>gCER$bjts?o3i?!vseWu zUo!u3di$L3I#<~3f8BA*_x72nfdB$U3Hbe4)vmSWeZR=6Q&STgIJ;3(>x0f9up$Ef z>{b)EwyU0bZ{h8V?1h*@009ILKmY**5I_I{1Q1xO0w4ZP&c5UCbG&J-z7`nw(gkK7 zEA3p{#8-0u-z~jqmOuaj1Q0*~0R#|0009ILKmY**5I_I{1Q0*~0R#|0009ILKmY** z5I_I{1Q1w30e>L-ZgVTxm8_;;xVNFQJdS3 z#rB_i2q1s}0tg_000IagfB*srAbEovtQYgGhOQ=i535QpXChYU7u&)vm8H`MF4@dC-6}t z`}bX~ty~{p?yKPG4=wUt(7x(?9+o=>mPY^q1Q0*~0R#|0pwa@5e{zxSz%m4 ziQApn?Zo-3K@K|815P~ZMAeDMojBw~(XpZQRO?7jAQkEgq|9I>8jl3Jy6sV0);wCH ztTpdBds&h!_jzCJ`o+F$?Wbl2Hg#C;{;MAGIBH0;+9Gl#lDesf^jKgdoJ>XaxZx5N z)OBJ`ENYsua3CB%84bn__k`*bh34r*IFO2-38xItsNt2>22q&O^rUGtC~h^Q5RIEg zqvBP)nH+jNVl*kWak^{%?XCql~y%T(8cXkyd@bBPDNvmKrG6Vz= zKmY**5I_I{1Q1vt&??p3aqR^|DkRk)%G49V}$n(<{V@A;gpI!xR~;ED1t zjBFWWJ={i_*$5zj00IagfB*srAb1)E&;sh-R>UG?K-fCwOfKpuhb{*d2y z&BL;MMIYn)2g`>`W&{vG009ILKmdVS7Pzv$@b`ZQo-RCZ;g~MHySRD_m*F`A2q1s} z0tg_0K*a)CT0Ln-}5#F7XgQ0D^nm_2Ae^6mYu8{<$UWmp*=x5cAt+@920w+-5+sGzlJ8@0`f zG-hjC&WUMRaVafgV(a8y%k2nwoY?5ZW+$$9;sz(SIdQuayPY_{`*F~j9&qAOC#p_7 z?!+M{iVhB?r&>pP0;y0}AY}$4(Rd`#wc8%GWzC~C%3AZDvzH~wa-a9bu3zlC)_!Vc zU{i2#GF({`-P^Nc$F3d5T+UD;ZFu^pRsT@cU16rSc1FW)cfm!{@+GmAp14`CwG@c(Pxb7ITiFfEiU@aV_5qO)W?7DNC61Q0*~0R#|0009ILKmY** z5I_I{1Q0*~0R#|0009ILKmY**5U5*$=eFd0SI=LY-@J0|XF2009ILs4amXR&M)i-{}I|Otm#)dV>H02q1s}0tg_000IagfIvkBo^Gn% z{;<`?cgQQc@<@gN0tg_000Iagu$Ba@mu#rO@29LKu_DHL^|3|n`SqzfyRNE;@lpi= z1Q0*~0R#|0009KnxWMDt_fKBq8U=^{{0#TkEr`Pm1Q0;r#RArmCl)!zc6a5j^?z~G zODj3?%>vgeT=Fk1(Ms2Tw02Banu7KbKmY**5I_KdnilvV+ijIzKfON@$85UupU3xa zF0j3y(}^p->RmIP)AZV${AZ3wuFVzCs1ZN_0R#{zEO76iP_H~w_#=UCp=i@RoLP`V> zKmdVa1?(|<&{ia6SQ#F-#iMK7p43{m4cewC*V?p=+Ga%>v$ZYf#I&rqlvXjZb#m{7 z+iAh$#6~AJJ8``eH#o7)iQApn?Zo?>IR9yO(3u`^;!!86PCV|!At#EC4yC7BM|uLO zP*)&j1|!jUB+%VykJ_^4(Hdo~dC%F)l4QBh`(oEG_FZc~H8ZfO!*cgu^@zt&Lz2}N zkt>nZO+BQ?0wdvMDyqi~m#Cnw6LVrw(~N}!;rPjDFmAXfRG%m`Pbb2GRP;WH1soq>y+L(tLd} zJ!l@8EA$zz8?AKj?&{gmyYrszUSlq2D3LZi{nPRx)9@Y^sg4;^ZbetHV_FvZ`9lDK zk_7y5xq=55wE4HTlKR3zw*2Wer=+~?2kkuJ3_+lBMi z-Txu~>&JiZ?N9Co_g8gCsoi({fHeCXdE0#UK*s~Qa!ezDK)C{m?2Bqz%)I~q%58<^ z5kLTe{1#`b5y@c(tf?1f6CGbS{=SJ2%CJ;aX0R#|0009ILKmdXI6R#K3xsZepQvKT_~5Utl5M792TVN#5I_I{1Q0*~0R#|0009J+LEsnvW8wX4>t&VO zk6(talM(?05I_I{1Q0*~fm#-@{`F@SyFZox4X?@n8!fdAQ0>0?8`yuTwmIz~fB*sr zAbOWceJ#*90tg_000IagQ0D^w zvn}U%@-OVYIigG7V!QVy#uA=fQgiNscx%~9pBl^YzBP|$2q1vKsuI}q^6dTn*_JXs zQ)a4vPVpUg^M1XmUXTn10R-w@z#g*)?MI&4@47J%MN)>9;c;6$y2kBEt##X=ZHfw7 zo3>HgtVmz%m4iEU2Y?!;~<-si;m-KK-i z^neqOI#G4vaVHKrQFL@DJ=HqW6G(-+11U2YiN+&=&Q5#OmNk#oC~M7o&R&)z%YEJ# zyMD3nTKlP)flVEjyZ@?3JdPTYthR_;iKK4oAw3ot2`5ugJ#M%}1$CX66N{Q=EF1{O zPey}r!#$z;M4@>)5e}rHXTmANGirEcwLui7G(Bk=4T@XMs6^wY(WrP;ZzhKxj~Gpg zH=P!3%4$=lvZ*J7k+30!#FLQb>x=0@^T=GG&v4ynrKh`l=Z;bOk%6Ws#ph1P~}mVDaGdyBs!em1yI zdf_7lk1`c(qu3eGKb*HO*0w*$-kU2n-!j%a@wWf5U)uhTy!|zO=N?$brb&$e0tg_0 z00MO>5Srg@e{t`xye989#fvLaQoO*W>~F^wzaTXcKmY**5I_I{1Q0*~f!Y*!MeQ9Y zuT3a=g@6$F=dV=eSc~@ChLz!Q zTRghP?Mbb5+n{ZVa;;63p zyA!*eIR7bh(3u`^;!!86PCV|!At#EC4W*}AM|uLOP@shNRI9hSTQsz*GI8j`HGh+K)JZt5XD78nU9Q&Bx`xI_hY zotP7gnr18<2**!GgK@(>q54Fjc{&jeq@riSDZ?{rcxAOg6s9yiX&McRTg@m$L=1Cxel&A%(<~kml=)=|S_zT%pf!-Dst2$344xyLRl_ zxyzW#8A_xLPye)h$TYl%MXFaXC*w5RwXfzsB(iK*EY2fSepVr%>GPxw?FUu#oDxC)CeF@7Xq^{S-x$kx&WY)2q1s}0tg_0 z00IagfB*tz3B+H>dpx?VoGgsMsuj49eZD90FQxtedA_Z*Tvp}4Qz-jKUo6m(Z1Q0-=kU-#>%Dr!X=f~B*p1<8)Z=Z+$LaTyu z1Q0*~0R#|0009J6wZK@s{CyAgYUQ^7t##Gkf01&I@2%<;&fpM0009ILKwvcr%)a3* z`&Z+*YIaQ8q4Kw(Yj$vS3;_fXSRgQSbp`g(7qnmIQ|r$Cd_I@j=L ze&osht{X`eNf}m#$8GWG8n-94)@_5fDJp1f+D2`&B8}PFmUCiSR$NMpnAkeG*K#}E z@i?*3iOo)2@5BvGY;)pvCw4n=uM-bCalna3ov1qTxD$t*C^|Tlo@yQG38X^Zfs`4H zMB|Y_SC2hv%bG`Pl(ptP4_}rf%YEJ#yMD3nTKlP)flVEjyZ@?3JdPTYthR_;iKK4o zAw3ot2`5ugJ#M%}1$CX66N{Q=EF1{OPey}r!#$z;M4@>)5e}rHXTmANGirEcwLui7 zG(Bk=4T@XMXhh?t(WrP;ZzhKxj~GpgH=P!3%4$=lvZ*J7k+30!#FLQb>x=0@^T=GG z&v2<3z}-7KyLx-NdUqIeDMN|0;pv~251EGdu*h`GkaFs|f*sScSdu>k5GYOHm)Eha zTAJMQE;W9~frSlpUcCF(^oc)Q!v4{B#BXUlH2;CKFlA+)_y2zWV^im!8_Upb?%TfIuY#2Hsz}YdI>>A95go00QL*%=+_h zQxwekH^pzW6wI+2W^i5HYFLYm0RaRMKmY**5I_I{1d0{--^+JwzSu6Tx_azauDVrv zb3o5$|2F(oiT7lFFvM}_o2EReJAn()B_4CEyAjf11Q0*~0R#|0009ILKmY**5I_I{ z1Q0*~0R#|0009ILKmY**5I_I{1YRs~Hl1_b*QM<1$zHs~%9Q-K_vQ3+Wx7GD2q1s} z0tg_0K;;FVtK7X1darX$bmdnN84*AL0R#|0009ILKmY**5I_I{1Q0*~0R#|0009Il zA@HNS_+6e#IFbVa1Q0*~0R#|0009ILShE6_KWpeGu5ry|7Kk&^d$IWM-&lXT$Zv&m zFY(8pk1Of;kfB*srAbYPn3Rtn2Ep0$yH<@sD=On2q1s}0tl>O0sm8<<2u-?uKzN&cdOb@ z8bbg91j-b6;H^35D-Z6ymC?80`K{8z&wAP4FO0(+1Q0-AO$gXy_Moju%CIs#Zi`3P zxIL-0ZX2{sQLeRV8@0`fG-hjC&WUMRaVafgV(a8y%kA)ZoY?5ZW+$$9;sz(SIdQua zyPY`y$#u|~9&qAOC#p_7?!+M{iVhB?r&>pP0;y1UAY}$4(Rd`#wc8%GWzC~C%3AZD zvzH~wa-a9bu3zlC)_!VcU{i2 z#GDsw__pZ*a&YioA zxtyUy+VJ#G%ZE(Edsw78W=OdeUBQlNS>)#r0R&1Cm_7UJ6+DPDv!|p^6t$4Qz{$Q( zmVaWgsP-#WX-82;C&vEMfrTxe82Rwc>Gpp-x3qQk2H+^5cBqazSP@2Fcf7Wu}`NXol7iG1>1Of;kPz`~1 zeX_t8K&sI=Z6JUE0tg_000IagfB*sr)P+Dy%DKp?E_z+Zoy<8NS;r$;n_W-jobQ?9 zcyhTfgeQN5VY!CYUjD83^SM!NbJ{}y0R#|0009ILKmY**5I_I{1Q0*~0R#|0009IL zKmY**5I_I{1Q1wxf$Kl3+;{b^+_7xeP0jwn%L?2-;f0**-&US<@gBUSsQX$9i`?HssIQUHTLch5 z009ILKmY**N)~wbK>5dP^j9yk|5ESA&PtYLK?D#$009ILK%i;@v!6f9b(xjce)?qv zjwOqR9)1+>N_&$J0R(DOz`DA~_bfiO$h;aE&Iupy*EO;#=mr7^AdnHT$LvA-k*D^% zZg41)GOP@b+v3qRZcl2h+Xih@RM6VAjoM~K8nd-6=ft$ExRe$#v2}8<<#r%FPHc2y zvlG`laf1`voVeYI-A<9t6ICZ3cjAx}MF)q{Q>`OCfmEnFkTQdjXgm_= z?zBg3S@UR(vevxk>}5%^+~<9<>lgd3wV#?9*wkUU`>%S$SCY&-nqlQ;j8$@AB)03vr zpt#kHMl^03jfz+GW^(B9h|#2Y(`nJBtTtsTn|d-B2^&&KJPB#OzL*{~kIWVN4A+fT zy1ROMJG*;&cXS$aIYWuG;pv~251EGdut;^xka8=!f*sSc$j=`F2$UpX{op5yoLws^ zB@0zR;OR}t0}Iofc+clrfP2q1s}0tg_000IagfB*srAbkWTb{QmcamvY_dnt5d~Py`StBryBZSLOXZ=tA`u zFTdnpeExFG{cz>JkFXrwdr{6#KF?odLIDB@AbHg ztVm(i1x5FHU+ntDzH9BL zW(GEOSnmF-9`QJ8NV3`@awU?wsfYAfU?iMOMfJGh5*5^SVooe-nz3*o96uQi#trv` z>Jx?L=|nh?ik=Cl49}?HmDL7On9}s5X*4KqHKP%Yn?|GJRlS)UdOTt@Dc*Eiv?;4i znaZY~3`W9+6cSHDny)XW2hAgMg+9Y|qm>;y@7b}dtG8=+w=tJ9lt>$%{%QG;X?PEd zRL2Y{x1uZ9F)fSy{2_oqNdngTFL6+zB)R2XsPq0`KCrL__1?d{H9hp6MNWh+Tx$8B z`;Y%?-kqE)zx#o_<(2P|Rs2D`pWIilPo4j*^MT6ujf@B&kR|ZlO?=*DfiQsp0<|x& z$d@zE?yH&YvD$Z}hX^2m00IagfB*srAb`Ny6tLX&ei3cGt+&UEKVML9Lt6{|ZU57K zS;HJ0;CONth%@naFE069R!f=n{p|k-t7Tk_0|5jOKmY**5I_I{1Q0*~0R#|0009IL zKmY**5I_I{1Q0*~0R#|00D;8>K01^0UA-E;9`H?f=D&W+`o#S0gBBYl^AJD)0R#|0 z0D;;S(Eg=@H~FpIVbU`M5I_I{1Q0*~0R#|0009ILKmY**5I_I{1Q0*~fi*6$>yLA; zQ(EI53?Bgm5I_I{1Q0*~0R#|0V2J{6&bn``)!kh1H!+u(nCb{1fB*t(Sit|_hV0Li zzr8E}_ZVx|$X{Q-58=Nj>+`&3S?L%82q1s}0tg_000I>ju)g>Yi|lJm>?x)>YttX( zcrzt9CD)Q;81$1b)+Yd3WT!}k}UUm zU+ntDzH9BLW(GEOSnmF-9`QJ8NV3`@awU?wsfYAfU?iMOMfJGh5*5^SVooe-nz3*o z96uQi#trv`>Jx?L=|nh?ik=Cl49}?HmDL7On9}s5X*4KqHKP%Yn?|GJRlS)UdOTt@ zDc*Eiv?;4inaZY~3`W9+6cSHDny)XW2hAgMg+9Y|qm{0%?t6N>dw1>VH0E-K5^2NJ zKP?|J4ew!*>X;$rR&)hBre%?zKLijcNx*vBZ&h$>^#n(PN?HpqUdX!ZZ?cZE`3KG( zcyX;_N*>-LY5&*q-O>3gU3p(WSGxW@tN+FHw6r=0i+;_`zTLlH(q{8o^y=Rz(YLf- zvlIerQsBF<%KLt?CM_5=0tnQHz`**v+YR;6`8w>S<*%=!{&h%9mk~e!0R#|0009IL zK%lw;);qqz@v!QW(^_2%%=)vt|3YbZ2RXTQh5p}J_hG$8L?0*;IL>&YWKww_r>c=YgzPT^9eBagl`aSt`TN`WezUm(=`ZuoUR|63N1Q0*~ z0R#|00D;vmVEtse{CyAaujSt_SnYU>4*>)aKmY**5U3S_j`!@ZSXb01s(*i&fBtCg zZBKsXSi$XN>rTpG(h-`D;`_ViNvcLtO+ASXBb{m_2AKk}|9ekK5wWHEvI8t=k4|Q8aL{ zo>N_Oc{d?(@Fb^^1Mi+E2|4Z0fMw{Z~EWanz7x zwMFDgBz02{>9N2_IGKv-al<7lsO!X>SkyFQ;XpWkG8&8u6HXbP zQNt^%4Wclm=}FUQP~2)pBN{i2M#ZaoGdc8l#As5y>9lB5R+}=FO+6WmgbgVqo`f`C zUrZ00N9GEBhU-QvJ-d3lcX#gS?%rX{Y1lqrS&w;FR>nrmg80b)YTTqRs2p~|7zzZY!-!F>G=`XGtwGco6 zfl3N=7Tm6oPBhluiNs0{om>bYfB*srAbkH$ z5I~@Y1^)QX!ta~bm$E;5YIqn`?^^I^VYfv)^P8A1>cdO~5I_I{1Q0*~fz>Kteelr| zx1kq^-2Inzc06oB-)}s%?$0e*p6OZt^5i8e0tg_000IcCd4ZSz`|`Kx);wbI0M_4J z;=1GFxu}T%0`)9lwY(|2`=#zbTYjl!drq$G%*!&h{$qSTE(*QX`?KJFh zVxtqAow(kK8=TnY#O+S(cH;cyAqSo50Vf`HqUyxsP8@Qg=-^O#s&%9%kP3-^W-t|t?NM9SJX)iyHSallS&}UGd0*`M#lCCpr)CB=by)8Hs~+(*` zb)%K8&d%fpW3&kd+nv|)mVQ$|B8%EzZPcP|J%1eYisSSz%OT9 zc;mum7klo1;mhK+uIuC0DsI%+ZYGnn} z4+IcE009ILsC|J~SMJ_aC-!j-dhM@pdWZl52q1s}0tg_000IagP|pILznXnsrJlz^ z-w{9n0R#|0009ILK%o8wtZnS0)c;y!1PCC200IagfB*srAW*cxiR}9iKXakpzGsr% z|Dwmew2Ib&i6y!WRHa`_8wIryKmdX230T7~&G|j?8vTByw8-x{R*$RrmL`|_c~pFA zY9fFD0tg_000IagP>%xES3b4GV>Wq4+5Fa=9E<%%d7kAd$|Lam*}vJArv^m`Ab#KA2_bYfmvT6+X?RrlCm#fjw zENvgY{dL@nZMjwjc@aRMrUmRVd(c)SWmp*=x5cAt+@920w+-5+DA(GwjoM~K8nd-6 z=ft$ExRh2gv2}9qgxeXG$BB(jYF8yf1eBV&Ap)Q!@jb zIxKhpRgZWaH6&SW5xEjc-PA*REHDyIrlNY>aES`)Ix!~}HO*K!5RRXW2IGc%LiLG4 z^K>E{NJY)DcJ%Jv)oIM-3?LWE$SXBGoZN z%B|=Mc1+76KYs`yP@2Faujk-GX>!ZD)WElc2NpJP<&h8Gn*QmZFKt7<_vK5gzp&xk zJU=n`NdDK)|NFHn?F8MXg9~SW{EhkhD(VdU?9C4>tjco)5Lga@FLA?%JsRmzno%e!}^t7 zwRe7MX(PBz?W4uJ`1kF*O|mM_{`nr+U1UyZG^nw6qs2q1s}0tg_000Iag zfB*srAbfS$n_ZwIF{h_m71xz4-00LzTSX1A~e&2j~Pxf@#E!04Q_UF0x zdJU`wx_|%z2q1s}0tg_mwgs%O9^GHIz@bmCzWtT5QI<2o7x=BO<>Vzd0tg_000MO= zV5wJgw#OP)_j$*vtfRt)m;ULu%HM{r${1(_0R#{zQ{eGU+20BHMpKFJe=GHUjaZfT zp9hw3pIjH8(Ph3YSsVcb>Q}%Xvj^=*Hhpq#1d5~#E5qZqcyx{1lUnPxLE97+v^H&{ zwpo$JY;DUqF)b@DrBzI9o!mR&b_VZpVxtqAow(kK8=TnY#O+S(cH(_boPSF`=u8hd z@u(A3Cmwg=kP}5mhtgA?U@T^j+Op=+8fC3{&)Lh8WVz4#V%IPBU28uzGq9<{a`#{L zh{sVwlGPTmbRwyndPt82M#9NdRF4}jQ9)fN=ES0=84Cx(@srVD+;C5*K2d0%PJ{!g z=$UZJ@QfN>S#1!7DNRqBMuXy3GY-+XX*4Qc)tkwo$0J6Q;!USTo3h%Jsch=WU?glv zA@L-n`TAmd&^$6%=rde5TIuQC*}LnWUEMo(8FM*9iL~MApOz1qhWD^YbB=YY|W!0(lgnV;9xYC)k^PO12*sS{{>GS#f z_4J`d&dp@E$W#pqSl`N8f9=r^EbZ8fGo;y!m%y_`i~Jo)$e|L?xI;+ie7UU|5r*ZrCWW8erNfB*srAbYo>r21-AVH$RSBt9r%j{P750o&#!#`O;>ev5}bzXG4ZIsIc20R#|0009ILK%fYL z50-y^{AMfBif@nJOkMEF-4*PA!A6!e>uruHtsB7@#CeLmfdd>AbQ9XZ|$X7UsP1Q1w70$0BKy6pY^f7f;EJe&2(;y+*fFW)EG6Xn0xFVXg&@B6dt z{x5O);;Wy``+1Rf`R5;b)1~*m|CZyzN{@fBh|jWx3-BBP1Q4h%0ej3Iv=vDiR))uI z@#q@2C$-jXgSIKkwKlC$)|&U6afyv__jzCJ`o+F$?Wbl2Hg#C;{;MAGIBH0;+9JlB zNb05@(qn;SCY&-nqlQ;j z8$@AB)03vrptx0!n2N?tqfzmy-b@ZX9x<8}Z#pg7l+~t8Wm8WEBVj`di6~`XPPMrT-I_OLfIPs_xRVN;I;*b;PS5fPjt+>V(tR8{! F{{v9u4u=2$ literal 0 HcmV?d00001 diff --git a/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/dynamic_lights_ex6_results.csv b/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/dynamic_lights_ex6_results.csv new file mode 100644 index 0000000..d324cad --- /dev/null +++ b/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/dynamic_lights_ex6_results.csv @@ -0,0 +1,22 @@ +scene_id,miou,mrecall,mprecision,mf1score,fmiou +v3_sc0_staging_00,16.35857254572329,36.49932509928476,16.780753862985875,18.20795674074539,2.229672935723111 +v3_sc0_staging_12,18.37031886169522,47.47496020048857,18.559143974998733,21.192853054458986,4.051327466652305 +v3_sc0_staging_16,17.258784222988954,36.30118364312997,22.626177414761816,19.86461525809445,3.9912149830049217 +v3_sc0_staging_19,18.3738461881876,40.301154454549156,18.732624525825184,21.370808233896984,4.080107392743362 +v3_sc0_staging_20,18.9288209458547,43.976374183382305,19.01918804006917,21.023355647071977,3.9251749737556336 +v3_sc1_staging_00,17.771121760597453,42.57268086075783,18.475668702740222,20.90293474031914,3.4996460792468445 +v3_sc1_staging_06,23.02044864743948,59.37089721361796,24.352017659693956,26.770977361193072,3.622491599907944 +v3_sc1_staging_12,1.4451698603807017,19.563234806992114,1.5386945033242228,1.7790211386387904,0.29830220298888394 +v3_sc1_staging_19,12.767627703792908,39.13529304897084,16.05573279016158,16.924165592567164,1.470670648524265 +v3_sc1_staging_20,10.89025823508992,38.82002690259148,15.744197894545161,13.85001744503256,1.5663664297654893 +v3_sc2_staging_00,9.524439349770546,31.556014021237694,13.566696544488272,13.784754655423498,0.7464087433831004 +v3_sc2_staging_11,9.487729566171765,29.386060871183872,15.238457545638084,11.86166112888154,2.062620633635652 +v3_sc2_staging_13,11.030462494602489,34.89576008169024,16.92058138661423,15.172217926378803,1.6572129084212266 +v3_sc2_staging_19,10.13098637922667,39.65153619647026,10.372836751048453,12.350877309562428,2.3484014938164774 +v3_sc2_staging_20,11.300820740871131,36.234867945313454,11.81814008159563,14.09514368965971,2.292482364690867 +v3_sc3_staging_03,6.203255371656269,30.881483480334282,6.230744835920632,8.866998548775932,0.9055596212233671 +v3_sc3_staging_04,17.37051352392882,36.97795569896698,17.918862169608474,19.847193693523213,3.3820542833073066 +v3_sc3_staging_08,20.024624434881844,44.35847927816212,25.213147336035036,25.31737779469919,5.48455973928394 +v3_sc3_staging_15,8.034933928181143,34.72810773288502,11.138984056956627,11.580419597608508,2.0589996077077286 +v3_sc3_staging_20,15.525695881532394,41.00211990230223,18.550551981281707,19.4979781317337,3.6922112412385286 +all,12.10307065056815,38.47415667859262,24.688727065222338,16.028993171824734,2.3056709092477488 diff --git a/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/no_lights_ex6_conf_matrices.pkl b/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/no_lights_ex6_conf_matrices.pkl new file mode 100644 index 0000000000000000000000000000000000000000..3227e7ba1f7614ca8a42e3d02cff97a46f11aca0 GIT binary patch literal 884323 zcmeI*4}2SGec=DmAKPm|x^^132_e)=H+0elpZ_G;O&7YtSxmAM2gliKhChiSE3!tZ zs?kUz|2An|28bOyq$LZpbPFjLhyg=sZ=rB(DMx)A(CxlxyT^~V(4)QdpzY>YpxYMu zBamM2ar}5=X(h*&C0VjRe!Zfv=9y={&-1)Tk!D6SBgg;fxA%LL*}qX&z0f++6Usz7 zLm4|9izj2D&Q5o>_yrL&IUM@Mu$_)S>|S`GZKfkeM<{E@6B+ly#!xysl#M4&hU{oE zW2QqR-R?WZLL26)Wkd+aqV9#3P{ItK3=OBvUknYMwxbzQtM}{`MRC061Bn}#`mVR1 znjYBF;duM6`NZR>rKoy~=vXRk+GfN|ghrz2Ox#Rb9#KKxAm$|Eww;KEqREr-aMJQl z=mAk^pH4+XnfRG##`29?epPP}g&D(4+g5|-)qP?rp0uq-&9D1sbC}7P)uj2eS<$Ad zH_cX_>$0UpZVn;PCpvB)nJWxfo|~=g>bz&yuFhS%ySlBpoRL)4^7T)thiuD#Sfn~; zDftyW;f^U)-rR2q1s}0tg_000Iag zfB*srAbz+1ycXJKCad4`$my|b}apV2bGmvVs_5A zTp|?J5I_I{1Q1w90(X^fd&c+;&WT<}uN;Po00IagfB*srAb8-H@;zn@Z11FlA2gFO!vZ{}3_zK1N{2y+lX009ILKmY**5I_KdDhm9d zJoitoVvkp)sUQArneGqW&V86yWd+hG0ti%FAlUK~*fB*srAb`NS69|6& zzVaPUUUzz>4EUuh+I}vjMV3JT0R&b>z}3pPzkf$FpH-`}5@{3x1Q4hl0e8$DbTvgA z)`rJj@#q` zPevbhFSI4IznD7R5i!%zj$h0s?07hx4xe@}yfBoGMno1noxQp6g@&6I-3v|0lXF$x zDOw+M_j!igb0Vv%d9*vnTw}|HhLiT`R8+Kg!5hEVot&7P>$~82g?s4Gp$Gq6{M~%e z6x$t|-1o3on)+mHmT{wun`CU0ai@&kGS2T!9F)@oG9Hytm+`oaLoy1Cq3ng$k)BW{ z(izIw;aEHw3w3q7qpoWBj7HUH?tRM@MRC061Bn}#`mVR1njYBF;duM6`NZR>rKoy~ z$dyW)wkh00Bhhq5z*`NjJ#eYBL#Ebc!tK2UU=R5O1 z%2w{o0vrkn2x{R+YtMq;yEZ=!VHSjW^3kV>9K)D3mF?Y~?^wax2Hxp`#HmnVgyW-I^ z?oJx5I|f}_R503%&Bj(u8FP(o=ft$Cd9*vl#P-R34|`=-d@?r6xKYMUGPcRMQ^sx? z=TBofD5nQxJSw9u<8c{>WE346%3f$4=?P^bouP~!j>VI)P}d%J)Kv|i(Wn~Dy=Si| zisL;WNZh#8cfI}8^uU%5$J>9+Cmu&FMb%qGu2kBz&4`%@jYQL#xS6y(qJq9b%t^#; zI}r^0E ziUeNKyQ0PA!pu;QI0t-U5lCsAbY4I=4_WShlMev|5I_I{1Q0*~0R#{zN#L&HzhhNWzO}i~%pI>O`dguE zbHt1a0R#|OnZV3H|7;oGH^0v@x!bM|yDYzlaNB06fB*srAbhY$mh$qmhs(MFsLZcETev{2rRW94Q2XVaPD7fi_}8^0R-w%z#Vf3T}{!3wc&AB zJbK36NuzbgplgeAqs`cCY}J%8*VuMWOskqlyHiYTpWOGbSB};vW3!AKW!xlVn~XbU z?3Qu?Dul!+YBcwr zy`m_N_k196<5J)C_EXaXTRI$X|23a@9JLfxZxOjtY11|%W+F5aO=sd}((;H3`UWv4 z5x4C`G!#vqjE9q!cR~+{Li=!EnokXddRl?hefJmmXcr56YiK&MSlJeK%g{%p4Tn1aa~$UmRh;MSB>!lMY9b5 zpGCez&0i|9Px=1zA{SNX*I(YW^W}dp{AKJ1+N$)rdVI+{bk4T#?N`3~l7e>M`p7#U zT;9m3jQ|1&AbsnyiD!5HiVy>1a_5gd**>1g}0fiFbbML009ILKmY**5I_I{1Q0*~0R#|0009ILKmY** z5I|rtfvfMFsM7DJEY^#4H7~fXM$2FkKmY**5I_I{1Q0-A9SF3P?|O{(cbD(n%_Ymf zq+-8iy08mQ&-b`L^gc`T`lG#fB*sr zAbA zS{_kB-yr5B;`ePUrzqXrE3+Lz(!QXvXr5T7Fe;5QQ1TOxsq2=GA8v z;z`?T)cm@CHiwyvSxuThn-y)UdedxW+f0XJQA>%4ClMpimoUTjk-5Tv<+<5PSLeNZ zcXjUV?%HF`<&31Vmal(GJ!D({!y?r&OUbY333p7XB0ql!Ab`Mn5%})Qm$7H@&ECuQ z|Ek=c)GPKC?2FShR)%kDKRf?XxyYJLF z0tg_000Ic)3S9OV+y=_6v~X%BxNwT+g#^Btw_R7L4rK@+fB*srAbNf4!d&g%}XNva8`QW3~H_Ba0 zOd)^(0`(`*`O)gXciwSNk^SmQ^96&8ws7Lxg+F7Q_Z9vgbJ1kXL;wK<>PNsGa|c~b z(T26*aaTNg#@$Jyb;qDAS{_kB z-yr5B;`ePUrzqXrE3+Lz(!QXvXr5T7Fe;5QQ1TOxsq2=GA8v;z`?T z)cm@CHiwyvSxuThn-y)UdedxW+f0XJQA>%4ClMpimoUTjk-5Tv<+<5P&po^L+_P&} z&+cw(E@vc_wS4_k>LJ_m9~P;OSxSCIPq<@B75Vu?0D;m3e*0zB-w-aXr-hdaKDIW$ zEIgXcIjMc-RO-NU0H1r`GIk`Md6nXvoBzmpE=9SX1ly10^m9IM=Vk3yf9DG;60a`# z!iM0tf4pE{XS;Rcp@2l_c zUz8KQB@cdC3qD?F`>)Fy9F_Vc;rs`dZnxdwT%76qI!zL!{w z>y`-dNs6k9BY*$`Yf-=*a|hi=Keyj=b8woX4Qs>Wu6Xo}yOT!ijzQNJ6^u4x zv$0iE#$03DIWetj9<4=8Y@giccqNNZ#zq;NW!xy^CK=mg+$m$XjPs}89F)@oG9Hyt zm+`oaLo$jE4rMR2j`V~wk*-k24#(ojSg3Q4JL;;2&uCPQ=H9bc6vgqL4bu^4 zYIX|| z(`;qiOowApONodl5hKu-FvIqdxx#?ux!For_uk$2^mOmt(`n7+jHI%buYXECWLy5j zBGoZV$*<@McTA}wKYs`yP@2F;A1`t1y0qq4YH0$&&wMgxFXF^^p1CzW@Tq02GbgyT z7E7z;Jbk(F(ZFx?7cMWYnMIcRnfZ^?Me5AY`a;pVGe6vXV7?X+0R+k=Fq8XhGoNm& z()&WW`v2ZlIf_)S-jfFb1j-@Mp8GzwFy-I9bs5`Z3m4>haRKN4;`5k;00IagfB*sr zAby$!d|&E&>Q3fB*srAbsOIl~Bsa_=^mGjkOup8i?R zPg_NPnnnNt1Q0*~fr<(QE1JF1$!9)Lo7<0-9yj?BKmY**5I_I{1Q0*~0R#|0009IL zKmY**5I_I{1ePc8biB0tS}m^?Y9oLE0tg_000IagfB*srtXTo4`0rS)*)f+eu!n!Q zO#9#8=l26k;Aas85U4)^=h|EIe&3bf@vFbJNRJUf009ILKmY**$}cdJcN``dEPos8 zEaO){u=0DRZ1d;szbrI7$+4V;96Uz=0R#|0009IlA@I=m%Je;4B|2B9IWIp_{GK&G z_!i!Ex?1Tr0thTqVB#IQ?{}RqU->bFWp-gj)u(zFIbN}%%q)xm0tg^bf`B{b4!WA6 z4Qs>Wu6Xo}yOT!ijzQNJ?Dul!E-HEgg=x|C&!cj#`SUw}@P+v}v0WGZ7kzrZaIfX?a8i zeS?^jh}(7|8j2=Q#=}X=JD~?ep?x|P4Q1kIq8ZCKYWY>YK@?^TGi_T9npdAyh$n5U zQSP@qiZ8IH?MJ**Foe|+n3mQhmQs|b zKmUaT3mrE9(y!l|?);_Y?EpEiEPgO)VIw6xpZV;*{e>2r`ICPuSYD`Z)yls3WS{cJ zpUvHwl#QQw=;OD1tJWbJLSU^6=(#6kgkE0G&zrT5$M_IHpe6*I-1o7CeR}%trd||u1_1;RKmY**5I_I{1Q0*~0R#|0009IL zKmY**5I_I{1Q0*~0R#|0009J+BJg*=Q6J|i{pIEQ=;u<*se=Fl2q1s}0tn;?%mi~L zS2FSNZ>++77+Sbb!AjN{%OZdP0tg_000IagfB*srAbIAcAV?- zuW%kfuU1@ss>Ma&evV&-wBP>-_kA`ANu#00L`Nz`6T@@{LR{ ze*B?)jpSr<{^4=nuX9A0KmY**R#(6sa|hi=zr5dbvm2VC4Qs>Wu6Xo}yOT!ijzQNJ z6^u4xv$0iE#$03DIWetj9<4=8Y@gicc;x_mGB(QCEaOHQH_6y0<4zg7Wt=}fcH20ppq9~5{d?0b- zQs4FVQ_}-mIvj8RHJ^AKwG>rv5xG)n(>5b!A~X_BXX0kk@`wuh1~DfQx9vnU6iuFt zhm)3fLJx>S`*bQA%EZq^GnQ}E@~e7-D9jjU+O`@ruRg00PufYsS|&(q;E4%WYq&X4P5%%t#3;o z8ol*Ar&5s#=S5}Qqsnc=+4Ap&UqAnT-|deQI>zGt3id8O|7KU-x`aBi&m-^ZQ=H$N z|M<-ri1U>GU`_!O2p~`ffv@Ktf!Pu+{C-h}mPj!x@B~+5@j^hL>;g`(Xv)p^EpL0Q zXd-6Z65wu2w=8&u00IagfB*srAb?1d|AQ!7vv7ImZl!dJzuYux<+3R zKmY**5I|s+1YTIa?U`>qTx6SRm4-&M2q1s}0tg_000IagfB*srR6}6@omJc)em3_$ zuhkeIZ6JUE0tg_000Iaguto%CrcT}d_fyu$SZiY#_m}T`$l4eXy+8l~1Q0*~0R#|0 z0D*NcaGC2dmhu|-{xtX3U5XQR5I_Kdc>&{z-23sje`&#e;UB7la|D8Q_W5|R&iXt5 zLLnl600IagfB*sr)TF@G_bk|cugMXh@$%8%K+@dZ^pXZlvxd4>Q22q1s}0tg^5 zC*Y``$UUCCCSl5PkG%cmIe#_rGR`|*ldmxbg#ZH26PWmqx%>Oh59{Z6$LwHE*fsk3 zc(M+@>wVs5jUp;WHXlqq+C&6-9Bp=L3lwm-?=^pPC-n(&2df zuldB|sHLcSi^!Eqo3| zP$qsRnz4MNmS5EyL}A7-)3(*1dG%S9c+$2SHNWnk&0!{ER+Hw>W<{H--ZWd;Hq+r) z)KVhiNyG^BCCspWWUeq^d2Y6{yR)l%Z)eZ$?oMkiXC#%ieEn1EA=~mF7O9R|N`6I8 zxMNBc`T0WtfszDfepM~w!G*7|fmza8c>Y2ce}DYI!qWZ!&--spf9un^J4g#FKJWQo z_d0L>bmewPj{RoA*U!wZ)`I0Z-FxfE?d#+$u(F96^_PmaH!ywwgDcC<(g+~%e1W`^ zw}Z8MaOC+trvw265SSC_EPhePiIHV&kKwsA0VlY$delMy0R#|0009ILKmY**>O;VJ z&wDC&Y`#7S=p_Qn5O|{aahPRfQC=D1|J`!Ds9ReCXMc)gjT5 zZTgmbeUT3V1Q0*~0R)yVFr%08=Du%SBil?%=c6V92q1s}0tg_000IagfWSHs82!51x{Rer08#zl(!|a-4t-tGJX$O7uK%fo;UiagWBHf{k9gfA5u~28HJL;;2&uCPQ=H9bc6vgqL4bu^4 zYIX|| z(`;qiOowApONodl5hKu-FvIqdxx#?ux!FqBy}LTQyE}K^v&Wjt8A)X=U;mVP$hQ24 zMXF<#l3&pi?wC?Ve*O?Zpd^8rNA72+^?6ZeN!g2q1s}0tg_000IagfIvM6y!f|r&w8qdKGR18R!iX7 z&z9#{%O_r2o;=HTaDvNTq$&$I_phj#RJ`E#YAedg!U!OM00IagfB*srAbyxor#*H#= zlCe$3oicXIIR7bhP)-lXcvMDR#^W*$$tXHDl)cb8(i6%=x)Kv|i z(Wn~Dy=Si|isL;WNZh#8cfI}8^uU%5$J>9+Cmu&FMb%qGu2kBz&4`%@jYQL#xS6y( zqJq9b%t^#;I}r^zUNaf<#=-auT(~`OaUi5$?JZZW>iN2 z0R#|0009ILKmY**5I_I{1Q0*~0R#|0009ILKmY**5I_I{1Q1vQ0+*We&bPdrdp=pM z4DqF3&gLUA@IUsW4o+I2}w~z~E2q1t!5dr6m*X#U!vqrB6E#fVTgT-et2LS{SKmY** z5I_KdH7qceT=~97;2Y&T9_HM?8rLJ&=KXBe@XL;&A%Fk^2q1t!c?D*^acF;eGm>pl zfv>!F<=fDUHo!~-5I~?t1ZG03v5#K1!Owh6nj}I10R&bq;EuV2?xRoc_uNdS zDcZ0$Jno7|&$v5jwC)&mZBfB!Gd3GrHD$~-ww)8xs^-yJ#KiW=eU4Xl!Y5;+jLkA` zlyQ@cZ8GkZv0KJ{G9Hw1K*pmo>M|aeaY#nd!J+Jh){&l2Cej_s*x^__84GpwxTCIW z_>4x?XzqQ>6-9Bp=L3lwm-?=^pPC-n(&2dfuldB|sHLcSi^!Eqo3|P$qsRnz4MNmS5EyL}A7-)3(*1dG%S1 zc+$2SHNWnk&0!{ER+Hw>W<{H--ZWd;Hq+r))KVhiNyG^BCCspWWUeq^dGuMpdpf)B zxo7Xa_jFrxDI=+@k5GYOHr#G;zTAEx%m%9Az zh64*5nE9{&err7qd}JB>M+2`?+S`{=e_^Xjcpm$3;bT+({5yrqmyq(gDw{vQgnl1f z`n{@${J950A>jO^^K%jgA18*<-f3SRwkO=_<5U7&C)2~^=c1o4{QdiC7 zUH4j7eW$YsAb zRXPqmo!kGqUmpx$RRk`^Ip(-3@HC150tg_000IagfB*srAbZ`^;p(E2?1VXnU^eyv`7aX-?nf5L zn|B+LXZKZqfB$uZd`Ehl)xvB95I~@s0`8bQ=xT~KtPPL5;?XniP8zK{23=c}8*RpB zW2>f&xyH70Vp`QaT8o(2KDp2F%3k_pY?QHC#*H#=lCe$3oicXIIRD9YP)-lXcvMDR z#^W*$$tXHFl)cb8(i6%=xG z>47aBj<^4sPdtuVimJDWT&c8an-Mb+8i}SeaWiRoL;BmsW-?|qY5r_hw5jS% zvz2W#9gamUB_f_gj6h$)4BJQM3ImqsW-DDid+zPp-F44Bd#t&fkyO_5^-rmXY|DRG zq&j9P`4v6kjww~-=MMn{N)nj)%ZKaaAkI?@HvUT*YF#b#@XJ1TAg9BZA9`*F=JFHc z%UEYlP-*}6bIa!BEW^Z%63gwU%z5IHr(k&*n#@b_#F7t+IeS0+8m0aH1;2>aKi2tR zUScK@Kw!lJ!C+2`XFgD|_oAG3m_Psl1gatM%OB)|#A-Ox1_B5mfB*srAb7Yel2BFG-%PNo&Cyty1@caU?mSo8@#M$Fap=oiN?W$fBO)CF%NBU(em*yrjm829 zAb*_ujHMLUc3<3xsfB*srlu6+IW!#?e=55=Rsf#z$OfUI9Q{`-x zTnHe500IagfB*srAbLR=S1C=C9KIy9b}B;JKug6(TzV2q3U30>Afo`R+@8$J@C-|IL;7 zwtY(KbPjzeT%*J@xBKt1`Y-_LjN~Qw{AbG-mh)h%xRQg@zkzoZy=e*o1XfXC#`{3=*1uZDdy^G!WhHZ*^8wzkR|3eg z2q1t!nFZW2chG(Gsr{atc{N2F)`rJj@#qf&xyH70Vp`Qa zT8o(2KDp2F${zV-Y?QHC#*H#=lCe$3oicXIIDdQdgK~O6#-lRoG9H(4NJi1Yq3ng$ zk)BW{(jCg!;aEHw3w3w8qpoWBj7HUH?mc@&Q5^62K;p)wzU%F$rU$lkINttiKJhqe zDXQKga;4IyZAQ#QXe64>#LcAT5f$_eVooA%+lgo>nmic~CoS)U9uS50=~Oh7iJysP zEZ?Z*SM>%_m@&+>Z8d0KeO4o$w5>+Xulr|nn8}#cr1`U1(Wa_5%~rO}bT}5Zl!$l| zF#>%FGi)E3D-2kko2_(r_H^&Pch{bKx~;jKkyO_5^-rmXY|DRGq&j9P`4v6kjww~- z=MMn{N)m9ceRq*>*GfvsLW>GG!9{1wnJ2WJ2XgCOytU`^rS9c@t7?3w$eD_{EtE1f z^Plz<+}oLX*T)K$m(oVrmf7D|^tJx}-#z?W_Ezm_`5r1Q0*~0R#|0009ILs8fLh4IB@v(^W^eS4AM0)9Rn> zyt_xbs!bAPY)`4wucq_fWYTVs552LS{SKmY**5I~@Q1+=?L zKbAE=>WMEbat-lj?!7YaL_`1q1Q0*~0R$=};Qa0vR=!PFp}s9Gt-rpu84o;L=iW&0R)tbO=UdKqx#>)%^ZAn8NYAk^?(2Z2q3Uh0e8$DbTvgA)`rJj@#q=m~dBsUkmr2p~|BfV1&a98@StuCfa~{GYELSlGhF?|lB&^jjtt z`66`TQuEK>QLzJE!HJ(LxRdkrTYlnJf9JbVk(gS}NtQw7iMJQrsnmb>2UY5CMG#1e zKy3;*AD!Rao$KG(&3xX}=9;2c2p~{Ypr!bB#@Pp!xjk025oREO00IagfB*srAb7XTy^m4_l+_mSI&uAhg@lIntMFF0z@}c4*RR?;(Q}_y;N#kbu^4YIX||(`;qiOowApONodl5hKu-FvIqdxx#?ux!KCDp1s|jUAsH)>9*!_ zMp9YJ*FU8mvMv8%k?NSGu* zi4i~m0R#|0009ILK*0I#)XMig{BNk-J_oA^0R#|0009ILK;X6l!Ph>v|F)HuTEX{* zm2V#|wXNssJy}=BlbuH!JgL040>VQqNa6_1{AchYFxG3eT&+-Ng48(TGH%r&;16Vs~Z(OSgB z_Q`#YSElyK*eGMOj2mU#Bx9S5J7w&aasDjNgK~O6#-lRoG9H(4NJi1Yq3ng$k)BW{ zBL3OoSUedEb>8cax~kzb8damY_v{r#alGdPi5r*tuD73>9@x_1c>AyU#N()?sCtXY zl}eko88H)~k!U&-Hi)u4IxS&ewowi-3R?w`$JCSz8U=FetDo2uS4TiG_#;aJpCBH~HJ2=pb) zuzh5%FkpFZw$jseZ+GXOo?W{-t+||$RMztKPpOA&%YRs;I%X;P6+PjODOKd>4*>*9 z5}5hS8_RbPr=%XRP(gtckpsEqGxHxCit^;wm$73cPMUmNT}J)fR&P6{f23gU6JPzy zReD`rEx0o_JC0{xaxl;I*61#N?e~h#yH$&42q3T`foDewzF#acXK;!2sD=On2vksD z`quW?K~Dt_E>~~}q(%S%1Q0*~0R#|0009ILsGflFYwSB#Pn(wNUf>mFIceK@@{v_K zId6^L%y`Bsa?_T&f59;T1Q0*~0R#|0009ILKmY**5I_I{1Q0*~0R#|0009ILKmY** z5I_I{1Q4hhfe+ofZm(wgRH!3gDRQ2hT4(23wHLh~L!nVqh5!NxAbp|d0%C|lC z8I5ac*27DNF(QBf0tg_000IagfB*srAbNrQc7fLoB+C z00IagfB*srAb00IagfB*srAW$a)&O07k`8M=Ve}!v^m$bf};F79Q z2>}EUKmY**5Lm9jrn^?YO}E@O*Kz%;-uH3d@j71r3>|@WC2;kVWxAj0#aAf}in~klS zGUgiF&WUMN^Jpz%V*BJi$19WjWNehNS;mbrZj!N0#+@>D%Q%0^%|SUmAmdRPbs3M# zI3%O!;86BL>qt*16A}OHa4epTg}QdTqpoWBj7HUH?mc@&Q5^62K;p)wzU%F$rU$lk zINttiKJhqeDXQKga;4IyZAQ#QXe64>#LcAT5f$_eVooA%+lgo>nmic~CoS)U9uS50 z=~Oh7iJysPEZ?Z*SM>%_m@&+>Z8d0KeO4o$w5>+Xulr|nn8}#cr1`U1(Wa_5%~rO} zbT}5Zl!$l|F#>%FGi)E3D-2kko2_)+yK7f>XZO85-PT;rNGfaj`lr-Gw&g!8QXR9D z{ED7%$CN7a^M?QeB?&l>ZRO}fNph84=!vgQ9$47Iv%4R?HSPSt^7d++Kbrr@S(vgc z&)Z*j=vKcLW_hub z{rkeoT3~4e5I~^J0*jnoHSm_@ZI6|?XQV>_0R#|0009ILKmY**5U2xzH@=p$h3a56 z(ZyO3czw=yS&&GpDl~!s0tgfp2>#xN+}{&-UY_%LQ*=RQEGpon z&+~b-s1P#|KmY**5I_I{1lF5?^Vturd>i?X`&XW$*5Oao(0Sv^Ot99u=pOh5$$UDfazjjGYyd-jT=INtMt#EnaR*V|7`4{Yghy#3dF z;&IecRJ}#yN~KNPjF^egNHm>^n@P(fD(D-;oJ8EV6VXsKc`_bOTHXmgAPVi%sc0w@ zKNHPZzER7s>J6eWW0+~%YS6s;tV%p-TaB7u_s`}qlQFAF^JlZ7O;vB2t!$g=a4c#m z5%DBq1o{$Y*gi5>7_dAyTj|<$Pxr2_?!9|Dt+||$RMztKPpOA&%YRs;I%X;P6+PjO zDOKd>4*>*96L|EM99$?(uA)n&-ue0iISmAdZ|xLq`tQrzskxvnuYOL$i%&Rz|JK4g zJMG^tT)ud^ip~xGwzueO{bm31$b%Jaoa6`~AOt>j>wOFl1Q0-=E(D&;{ZX>^-?()V z&BVRS+`f9?*0Oam_6l_}_!1l{L`zx(5I_I{1Q0*~0R#}JMS=gs@vvH4E%d2I1->|c z`ovuSzWT9c9($3)I{2B=mRqWd^SRu9KDXSlEsv<6ZxC}5aobKr zL($~PcsOZ!C-i_Qv`?p^p-lWtG-LTjEx)Qah{B9vrfsW1^Xjt>@uY1vYJS~6o5M`T ztR~H$&5AZvy=k_xZKlJqsHH^2lZX-MOPFE%$XsE-^4x5tXYcOrT|Iku?cQU}<&31V zmal(GJ!D({!y?r&OUbY333p7XB0ql!AW)LP-T&(a50WVpK|^`wjcR=!T*0tYvJRCKLeJw@NH@h6#hQonMZQpUv86S^=Cg( zcpR(WF?xVN{R-SWnfpG_{@ zw(VBly;~Wk3tvxi;=dN&?zy!f&k#TW0R#|0009ILKmY**5LlVO=bl{rUIzQ`Tzvk@ zTF+fteY|Y@@wthaLI42-5I_I{1Q0-A?F&3S94P+#DJPbB->Md66}HW1@4>dhzc(K( z<2|KBU)m29-yizQPZpoI!eOvD0tg_000IagfB*srAg~$&Ge5j5=N!qa;qT;3FTGDw zy~?xqt-^lgzN)l&`A07nzW$~VkaKvEb3hv_c8yS>u26;m0tl>@K*~^Zk6Zrbi*6mm zb3U=G_d+Liq{PprC5+xVSNJ{K`D9f-U!K>)cYWrTQ~pfn?gx}zA1*vU7+anG=J%0l z1Q0*~0R#|00D;8?f)C_<-y94szQ8KYz51TQ+sUic9YwiZ-094(h~M;jr0_n*#^Z&z zpYz7?Y~l@h$9*1N@cUO^UGO|_!9tVY+LpJD3sow2S&`qpTiDErKP$sJb%G01F8bX0 z*aONPJ%ufTJ!R<2qOCI%0R#|0009K*OyJURf9~;Q<*8fSU{{}7*0zK5s=r>+HeiWF zh3^mZf1JYGJvog$R(RQ*I=7qHUwFH>&|(L=UZbS$D0tj>#@&+VT_^!%2q1t!{R=#O zI`2NKJKwk9{rbNw{XNzBBtR^#)OxG0e1WHE3SlC#K>_+iKMOx_>r@nT%OY znm?NrZK`_HY-QU_hhtGoiHIi=BXCeuIWkukusk;#+0}W^u3eoyJ$pK>xs;Jq*79ks zhiuD#SR^`TDf#t0;f_|3eD=>f7nIq)%>Fgb1*KWWjWTYMu}#LEGIq;&pN#XLCkN&9 cfQ(0F)MY#_RkAv}|7aQ!FACS_r@&SH|CI5-YR=6Rkbimb>Q z1)U(DLUAl&42p3QNnC z4NuD5&-Qp@sgfhf`fGpudPQH&Z+`Rr{eJIJr1__j<$d&r2VBzp-?%MLw~zG&QlaiZ z$_z%L@kpS%+nz6eUPzCR2Hq4jlhHHw^z%(U8Pd7}X)_v2+0z>X$?!-z8aov*!|{}! z42<>K?-UDdSge*3As7kU(`|v69y}EoP3mt7jGQ&YDN(Ec{8dS^+?RZ@TbBoJcAlOc z+R|mY2XA=9n? zqR>2>2nSNpbK#WX88^JL+9V27nw~U`CdI9K#8fnH8qJDV_0H$e;}N4p@ut(FO<8T3 zue{i0LkcYn!8af}ZXR7M^ck*&R{D1L?%TJor>Ad^v6wTINE@EP8Tqhjc#nuw#|0^hx7ptJ>9sx*O%4-~T8fioXFn77{MUoB#tUGA5pzZ3r|^!Lqo zAIO>c_@CtOf6fI;E!6jCF8PuCWBuH_2c)@{*JRdToOO_&+in| z!k$;G@&9+-`Hxo><3!y$6`0I<9rJ0$|GNMusvv*>0?!G& z`2)OfK8I2G6KlugcXG?~eNgNE!dsyx0tg_000IagfI#g8p5-`9x$*ycR%|1`AcsD9ciIlmT7b#I1T24H^{9ma0tg_000Qe?;OCxqVBH&36X4?exDK|O-n4}P z0<{webg{o*34PDEaIJ(ZIT1jh?gi`#d)QVaWmFlRw8f)q(w>@rUPzC{!XY!N$5Zz7 z^In%c6;6gvJs1s{_B-tz!?r1wnfARg98LtH@l)Y5_H;)){iej(u8^J#cfBbcGo!&| zGI-XWetsYs4v8#gGQF_ybkjmbd%7ilYO%^YMe7sxe%FY7L1dK`mvZ-nt!f+*D&$R14`|#nz5B)FkxA34yradyX|BTzA_BgTCi5s1`$%!3K-08$# zCoUazKjcggIq{eiRVSWs;)oLk#z=a)eXK8#3UvokW-tDi$zU6y!%isT8-7LAR@S9~{dHH`hAbt1ebJ~6Fz0X#uzacLM6LMZUhiO009ILKmY**5I_I{ z1Q0*~0R#|0009ILKmdW-3w-)i&fis8=-Bm-Ho(1q#P7~D)ZYQ_U)50Si_Ri|00Iag zfB*srAb`M~1%?`IzwFK|5N?%5ctO$;JYB};-$nO5kLTe1p#}) z9=0F-)B)GRlp-mk%IKsm9$l06l-9mu*fvE4twYcnNsGx2Tb7E1`jD-W?_^D_xZn!V1K2d0%O@srf=(%vp@QfQ?S#1)9 zDNRqBMw8-J=M|!H(`Z(_s&_tz9*-C;iZ`7WZOUrPd}UKl1|wlZ3W+Bn%{LI!gXYo2 zLZ9JUXk}MVPv5TIz5Dib8;dzZiL~JvoRJTkhWCg_b=;7$E4qSRGqT9f9|8!JCb0LV zoK-7LuEeFfmfrqXUjBD>9L!tczRwh~&Su5l&lkkM3jNuea_*%)`J?}mzyD?J%2S13 z6}sj9ueJW${+#`Fk6eDJtlc6R0tg_000Ia!zQEj-{6}-I++W1@SmRTacLY}o-+n7^ z!(>AM0R#|0009ILKmY**sw?mcgR5w(D^6<-E$~=*ju-rYPxJq-px42>DtGR%p!TVh zQ{ee8;h1Ai7zz^cDdG5I_KdS_+iudj9z#%zZBZJsN5`MA}9G0R#|0 z009ILKmY**5I_I{1Q0*~0R#|0009ILKw$7)+@GqNuL5hbu;1mWW}j#a0R#|0009IL zKmY**5LlJKUhd~xfS-Tr!JNMnTEL=IK>z^+)}?^;(F441R%Fee$@95=PoDCMbb_P^ zAbz!3fL3&u&qeSs4_Zfi$~X_J*Bnp7`9DOu61aewXKRYVQbqih-q1IDR+yB?Nj^D zxSip6oY?BbjZWO;#11F!bYia)m)@lhInzTJFsL zU?duk1bX(_4Mv}Rdr?LU82k}UTnU+mW9ft#JDXNR_QS?<9b9`QJCNV3`{awU?w zsfYAfU@V+WMfJGh5*5@9Vooe-nz3*o96uEe#trvH)h7zgvx#sZ6+IVD8J=;&E2~YS zFs129(`ZuM>bycUZW_&sSM|>4(Bl!KMe(N7qD@(CnXhc>$zUXGNFnhgr1=J7deA(& zSm-lc3$5(heQ(d6J^Oa|?J*W}h7xJRGdLq3HVy9)k?Oc1Wmj|syJlpOpFac;C{4gU zTBeQb(wb+f+yYNN?>7$So+C;p|8a$rsM$+BUwqscOCLGeGgm!rOQ+ljF!zR!9*}{;hovm6nyI5I_I{1Q0-A?Fv};=N;SEVmY@d^471(41WBp zChRnd00IagfB*srAbu*Q z%H3jO`_%q3Zl?ntC$>6qqZ2ndvBQZwo!INd{Z2gO#33ghbE4|R6HXj)qUhL2db)kA zFOUj#2U2D*5{*X!z1{Y>Eo&aFS=L(n&tH`!%YDfgyLEZsX6NbIp)FmOd+>%wJdPWZ zthR|uMmx!Mzi8oz4JNrc*JN?yy>)PQ&wB%E1P;U7zrCvNIVH? zzJZt?G>TMOUzEMi%+` zLjVB;){B7m7mL`lu=}NV{J3&^QsJ4I=-BF6wmr<{;Ncj+g~gFx|Y-8 zw@?22hpN+GT0sB-1Q0*~fi)p;*Ubj_2FS$*J4$>xbGvR$yigbf0tg_000IagfB*sr zAb>#m1wNE}|MBB*&w0M0{Oyymo&~IbdOGKMpvw2Tt-+4NR9=J@N)WjGG2X{YU}F&k z5I_I{1Q0*~0R#|0009ILKmY**5I_I{1Q0*~0R#|0009ILKmY**sv{sT-6kljxjMam z>~UA_??ARS=Pu706_W@cfB*srAb>#Q2$bdlI-L?6?;`Qw-x4ZcVx#xFo z&0W4U9wGt=AbzDIxCtEN4z3Lxxzf*sij{pJ) z)VsjuZ{>YotytIp_^q{izxJ;@COh|aI@bGF|Gkiv8)hy72p~{X0eiw8wiQVkRYoUm z@#vbgr?mDR!?r2PwGM5wwpEcPY;D^GF)b@DlM_3fxYLQf zPF#9dJ>*OeIq{eiRVSWs;)oMPhep!V?PGm`RH!GAGJ}z5JQC>cw#RK*^JvYo*4lsm zsw7$NOTO5x%L6w%PtOi*>9X8|H$38T+>m6oP2@@>byE-NvA|e3nTqOh!zC)H8^oMg z)HGw^KsbIX8jKt6i>gl)nr9Q?Kq`7JoH9J)hF4abL}5zPlcv$6xYc=uXxucK6|d@@ z&!NX7MvLN2r$w8x+A?3+)RVzT*pNcvNl5bz#Ppzfbg|H9xE5OJ+1qpPy?giV?cQrF z<_sm$hG%d_K5QD^BO=vtL&~n`3U*i#dH)aAb^zz|90a**O@6-A)|%E4SVIE8_R>0i zPg`!>Xg_{2SNL|_@?r8E0R#|0009ILKmY**5U5vy55K#y&%a35s!n z1Ri-H&-Fi*YH+>hz3yM{0*oI41Q0*~0R#|0009Knn!qy+{(X$@>h%93y$yC>>9zJc zWjqKV&~O6xzbNZ{)1UeKuf+_=KU)3k`TZ^Y-fJ=Ih`It4z4vA9m*h!%{6b;%@+@BaV#{CqKF|XM5I_I{1Q0-A6$0w38hkt1Y^?jkykKL4 zZ~w01wZ@_dAh6B^KAm}fvhB^)Kj*j#aqHoK5W-D$<0lZMz_*WyPhmiHYq~`z^QAC65!Eo!IKc zjZWO;#11F!bYia)mpItOGU?duk1iJUw z4Mv}Rdr?LU82k}UTnU+mW9ft#JDXNR_QS?<9b9`QJCNV3`{awU?wsfYAfU@V+W zMfJGh5*5@9Vooe-nz3*o96uEe#trvH)h7zgvx#sZ6+IVD8J=;&E2~YSFs129(`ZuM z>byoYZW_&sSM|>4(Bl!KMe(N7qD@(CnXhc>$zUXGNFnhgr1=J7deA(&Sm-lc3$5(x z>D#++clX}zZeuZLD3LZigER7B)9@YO5HXu8|M{1Tw!J^p=jC?-%vUWM%fhemhFf5I~?}0^iSn*6;@(sGsezijBY8 ziT(a+w@7;kAbgg*Q%H3jO`_%q3Zima`#AYY9I&q^DH#u>q z6MLPwbT!B!XL`tq$DF7-@q`mcoG3arlAdlK>kFhpJ%N-Nj6~y+Ku@ndZp)fSYnHXv z{_|HQ$#P%v#co|5xY>Dnc4$kNmeJ|Ddd!IbG zyoYl?eq+Y;@_BcB?)R_$q-&l=tfyaoAlIzx|LOUq_cvFf^qT( zjn&`!TjTTy0R#|0009ILKmY**>P_Ik{vF4|>TR9SpL!O^Jf3{&S8|@usow!u{`&3P zDtb2E-eBut6=SW3MX!V_c22h*hDRR|KmY**5I_I{1Q0*~0R#|0009ILKmY**5I_I{ z1Q0*~0R#|0009IL$SH8>j~nAUrO!On82!wNOd$dYAb|1Q0*~0R#|0009IL zSdRi(znA*6&oi1Q4i4fnWM!ga4kz_5ONf&EKc2-`CAwkvRw; zfB*srAb!T z0&_oUKTzZ%R9}t2x!nzZ{C+iqXITUgSXTo6O~P{vC-ypVzY`BRamb0soTxhSgcC=cC^|Tjo^BuO3#39l zfs`4HMB|Y_&pvzHmNk#oENiX(=dVhV<-X*L-MTz*v-9-q(3URCJ$S<-9>)zyR@+3b zL{c~PkRA(+g_EhM9yeT~g1SM>iA7B_77m2tr=r2Q;l8N)M4@>$5e}rH=fWw&Gj4cg zwMi7FG(Bk=O^REc*NDbVqgnB)-uWDQJYuvc-gH{DDXT5>l}$YvjD!s-B%Xvc-#|BZxqzlb?YSuGgjzBmv4V7;NuV6ex6Y&&qU%<<<6k~!eq|ZPvX=_ zX8-eaZN=j8&*jXWxy=1pdyAQKGp1U$=goJ|wHG{;QI`n>5GaGdA7#GZHSfv$f3OUV zk>dIrD)(NGE&}UUz@K;MiI)_&J(f2SGj0o5_uqcbGXxMo009ILKmY**5I|rJ2>f6R z$HS_*8u$JN$H%JaUH-N*?>6;UryA`4&#BDg;pO)s<9ZNy=GS>2TMz4*F(QBf0tg_0 z00IagfB*srAbLt=)E00IagfB*srAb!<2+VwJh5PZ(`HSo3V>S4!nyo>b#i>vm z0R#|0009ILK%fx?LJu}#uj{(c&pf>PV>WyL+v;=FH4>de009ILKmY**>RjN^r4^1R z*LmL;x>t_#tp!{QY*}Ub{+&O5S2eG>ZG;s_9}z%ctqMHl&$?&W^H*^{vg`h;Gre%Q z!9O3XE=Ge0Ab>zc1?&lX*j6NER2iML#iMJ|p3>TP4BMtC*E+P#+EzuHu(fR$#I&rq zly)((eQN(jx6>_;6Pumb>covs+~mX#C+>7&uM=PC#HGE0L(cS&6OTDjb>aypjyO?t zbR<39KGqjVg?a)hGZ=}+BZ1y-d)$^akJc<}t^Mb(N|NQisW04h$o|$ON+o!fCyB}I{ zVP+zLKnViFOPjlk%l=@jtpBG==n#t_fB*tT3S9B$UEmu#i`X8^n}rz}0v2ET$?#wT z0R#|0009ILKmY**8eZTDi{oJpzY-Zh)dWWVqCCf1e)>px@>Ff$G=#u1fs=p1mtvQ3 z@Du?A5I_I{1Q0*~0R#|0009ILKmY**5I_I{1Q0*~0R#|0009ILKwzB+w7sq|uAAw6 zU7qXHR`?!beXc_82~wZygiwu`gD+v9 zqNe^djsOA(AbHfb+5Gv7SD&NtB^dz%2q1s}0tg^bJAt!*oqv1m zr1fA?9lP>3#nmq=`6?<~Gj-FixwpKI~s> z8)=2nR|F7P^8)sSJ#0Vv#RIN|kt&ihs*FzB;?Xr}PigHthHX<+&^olu+EzuHu(fR$ z#I&rqly)((eQN(jw`1gSVzU!low(77o1EC;#GOv;b>b_Xxb#)JL(cS&6OTDjb>ayp zjyO?tbR<39KGqjVg?a-iGZ=}+BZ2O2d)$^akJc<}t^Mb(N|NQiGfxbN1Jo-TzPtt+NOM2q1s}0tl>E z0sl`bwtcl;NhRrxONU9;F99P*009ILKmY**5I_KdH6>tu>SH;NN3W?t*41FHvR_$O z>!Zldw7s+7^F1qOe*H_!lV`<7a?KoSsH+-tjUac~9*%+KhM^Pz1Q0*~0R#|0009IL zKmY**5I_I{1Q0*~0R#|0009ILKmY**5I_I{1j;3F^1s*39X?kaQs>X-xjyZY2D^T& zu=`!CXu(Vb5I_I{1Q1wb0$aQBJ0{GVJ0TgBDlKmQ`UtLRNr2p~{Pf!{6P{l{uB z4C~}y)amhx&IY?rZ4Fi)O(KB6niQ}n>|tAxlu>1L(iV@dNqb6b-!W{PqFn3HHfviI zX~NdFT@cf<;!^Gw6WgcupK&`J9w)XsaibGAIkCfuJDu3;#HFi24ms09PCVvB)rlvZ zIO0Unv61w2`&eHf73vM7%wQxMj|6&p>~UMxJX*7?wf3LCDoK|6k}r1a^1#i`)3ZZc zx-9qL4Uc#nHzZkY6S)#e-PA*REHD;MrlNY>aES`)1~DfVHO*K!5RRXU2IGeNqUsZc z=GjCzkcyrQrwq@y;g!`UQJB*7q-iuMZgpNE8aIt*#jASfbLjDi(V}?MY0;*vw#-*H z^<*#-Hl&bv64HDFF+FG=T`cq&u7y^5_w?=Q>+R|8-DNE13?p|e#rEh<&2cEKzvAgi?y0W)TLIe;%009ILKmY**5I~>;0x$D%Jgfq&q+F{4 z{>kfoHkS=d+*4a}87_UK@{R-dC&=O^_1-1Q0*~0R#|0009ILsHK3hhGQ5^vU*0+H5<<AS?++EZHl zj$zvr6|@d*v$j={CTwln1u-ovE~QOOY@gb1xt$?+oY?HdRwr(B;wC3{IB};Fd!4v+ zd(A`6^pF#eIZ<`u2`7#?QFL%5J>5Rm7f6MA11U2YiN+&=o<4irmNk#oENiX(=dVhV z<-X*L-MTz*v-9-q(3URCJ$S<-9>)zyR@+3bL{c~PkRA(+g_EhM9yeT~g1SM>iA7B_ z77m2tr=r2Q;l8N)M4@>$5e}rH=fWw&Gj4cgwMi7FG(Bk=O^REc*NDbVqgnB)-uWDQ zJYuvc-gH{DDXT5>l}$YvjD!s-B%Xvc-#|wCf=kw?4$>tqzJD97S zG6WDPqri8j%lm)2j9nuY0tg^bw1Bm1Q0*~0R#|0009ILKmY**5I_I{1Q0*~0R#|0009ILKmY***0I3Leq5vLzKS&W zAJ21L=*i6M--=92bp#MV009ILK%n*lUn}4C%v~Shn&{fEP0tg_000IagfB*sr zAb23KP>iaH#?_O(1~48WK48VEK;OeYrutt6AIC=dW$^ zw2lA*2q1s}0tg_0K#c^hd^^uQQ&!WTRa_TvL(~leTztP1;jh`;KAT6y;inwprV%NE5cU?Shz=6_?T`Cbm!Q zx7u-`s6y~ObPyx3_!m-o4%3#$wJ;B5imEXXL}C;XNW!9XF)ximqVSj4bl=hX4X4 z30QA=sEh{{E-%|aFR42#EacC9Gh(x+3@5MNQNez%Fr*b`UHSh%dT{ydMEnEKP0t-~ zDPo=7(=VO;FBRMQd3^iWbNyQ0U+FV1&Rw2S=?)X|&lIq3XLG+Ua-{Zu_DEj0&G&t5 z_5ID!CPfG!uuQ=Em5RL=Eo+6R2q1t!bp$TGr-1F0>U1T4D`Gp4KLij!009ILKmY** z5I_Kd+6m0uYJh#m+VPaVW&dmizKB-#UXn1cz_rZtJ=b1e+VSK^w(|cjukkvZVf{;E zy)V4|m$1V>qpSn>p zKI_eM4OF8HfPNx?00IagfB*srAbX}NTE6|4Li=6TdY#odK33K~lMDd_5I_I{1Q56_F!!PJTmyQWqvFr5?0yU06<4MK z1Q4hT0c(1Nb8E%+@Ostvz5PFuc)u=oHBlb{1Q4j8fIVRk+mC+nfNP-*MN&qU(Mel8 zx+d)@t$oL^ZHfw7hqhVUsz?*Iw(WwLmKB%MCMLE|?YG=c?>tUyc4Dg&H#%{X6FZ!^ z(}}%KT>5xBnV|{^Cs5g)@gOO-F66o!=$8A~jXw9Di$zU6yqy8jhjZZ z;#Ix#IrMnMXi>cBv}jXSTjnd9dNLRZ8&XI-32DB8m>x8bE*AO>*Fr0O_wMT5y{o6U zyW3dI8A_xL&)|%F*fhLHM5^P4lwHvk?3$59e*O?ZpfrIUKWgx^Yo!f4bE#*mxR+4a z9?8&0-*|9&A0K(aM{ZC5+CXti$bhu@hdFazfBlOe%50n|1Q1xgK-SgJeQrK)R*y;s1Q4jIz-RmOHh0%| za&A-PtZ4QJ`a=00IagfB*srAbrcveUaS)R zt47Y2H&^lAUMKOc#yX`91Q0*~0R#|0009ILsGY!&w~G5%wHs!!EnC|^hfA?&)JFh; zbuBRW;NI2$?(=hkSNzXCt=f~J{J-;9wLTQp5Wl}s6c?2dKmY**5I_I{1lEke3x9s~ z$85g!Wz#qU z2sDzw+>gunJIZZUf3LS*^iUbkw>Hv>qpt`cfIxi;*c0}!tw_qKGCFCCN7tl1rM2%E zwoOs4b!eNlt%@{ZYuhe}X<2b8ZDL~k)PBqDjK|}|W+%2faibGAIkCfuJDu3;#HDYu z9df3JoOsNMsuNE*am0zDgCptb_OZS|DkT1y!ALY733T__4Mv}Rdr?LU82k}UTn zU+mW9ft#JDXNR_QS?<9b9`QJCNV3`{awU?wsfYAfU@V+WMfJGh5*5@9Vooe-nz3*o z96uEe#trvH)h7zgvx#sZ6+IVD8J=;&E2~YSFs129(`ZuM>byoYZW_&sSM|>4(Bl!K zMe(N7qD@(CnXhc>$zUXGNFnhgr1=J7deA(&Sm-lc3$66@?d{&%+kJ2ME@LrgD3LZi zgER7B)9@YAJvY+JK$+_~X-^y8j=e}07hCg5SYj6C-ZQp#AC`SN+A_d;Ow7I*u z+~M!9a1lq5si}?t0ti$^!0*r7%$d&=xjmLQ6EhG%009ILKmY**5I_I{1lFp6=P}N| ztkrePxGEsfl5srQdck`d?ElZx6+0JK0qi1Dj+XIa%_5qmDgp=~fB*srAbyTEU-8%F;Gh1dtbQ)_a%PQQ zKejYJA_52?fB*srtbKti<-0NO+#&A6u=Zatj1U0?5I_I{1Q0*~0R#|0009ILKmY** z5I_I{1Q2LUfv>zg&-FizIh^|M^<}QBs{fTl4-r5B0R#|0009ILK%gjr&dlpA&%C(7 z_A|ZJ*-v}j-!#~LrHgtIP#FOP5GcEVb?cLv@0?zlTuf z?4&~g0R#|0009ILD4W0rw%f{PxtPe>cR6$4BP0LU3fr##<-=Jy9xrUUjJ9eyk!Y~~ z*3yc$5kLR|1Q0-=9tHd_?L1JA{aUkqI(MtVj@Pf*HNn6TK%mkB{=Tf=t^Q1Pt}iLm zxaR)!#Z~%z{2s>@%Cyc%g8%|+Tfm;MhwVq7I^bFurXne$%IKsm9$l06l-9mu*fvE4 ztwYckUH9C4!P;7EG9eXK8#3WJx?L*+e*yik=In49~dXmDMIun9}s5X*4NrbzUPHH;rb+t9s{i=<$frqIlD3 z(Wb1n%vUz`WH1soq>y+L(tHCkJ!l?XEc6+!g;sj^b@z1l_Vw)DWh~|lCDMjxa7I3C z8r~x!)p0}0uILJO&B!7@e+VE@l7RJ}AI>_vR#KYUEp+|ati6TWb)}pwUwPr?gPHke zOWX?*XCH6;cIDRbdw(HkZvU|hS^du(P5Gvt%G?V&@ZtP-oc_X-1Jcd5oc%rUt6!>8 zf6F&=G9l1#0$aLs{{Jg%9Jfk*4=gM@wGco6f!qSV{M!||7oc=i0&iYAOtPvhEQ|mG z2q1s}0tg_000Icqi-2|H`YMix)ypX82Ld$`uwP%=)zN?bn_{_>ndgqGG6c(Cm1byU zZ3~?I@R~gDy|z~xqeB1z1Q0*~0R#|0009ILKmY**5I_I{1Q0*~0R#|0009ILKmY** z5LmT<^>pU<_^v;^itA{uRPOq*zj$+=>q0Bo=BjZ?fB*srAb8qW6P@emp#w%^zJ;rhiKiOqcTMZAbqfn78C_ZQw2ENs8z zp4aBjvGxTSAp!^>fB*srAW(CGlmGBr2g=rom;6}`w(Ty}>UeTVY;%UUeBLGXfQ1l1 z0D)==+*7{y=QBzrKVSU0M66>UE9|&Nu7y`scBk`>x60?bs(!LC0tg^bT>*Q-9<~)p z8C6CnZSm-uw5PQ89mBRM%C!z{v$j={CTwln1u-ovE~QOOY@gb1xt)G`oY?HdRwr(B z;wC3{IB};Fd!4v+wa6i7ddP{#oTxhSgcC=cC^|Tjo^BuO3#3BgpBaoqr3UVGe@ zHILRTYpwlnxhhGP`;sqq>+-Xw;0=#>95*CcZ4#dMq#&PNt%I z+;E8s>IN|<7B$USI1rAXiU#9``=aU-h345rIFO2-3#SavxZ#!6CQ+Eu^rUGtDQp)|SDmOA;R zf_~DXGx@{J^{(1BfB*srAbhGF;-7_!*5I_I{ z1nNnkyNufh|BL%D)YGb^F9;xj00IagfB*srAb?wEKjht^FzJgmL4~)JefB*srAbSSTt*B3vXH%EnfUFNjb{bf#=FI{zh$1Gp#lp}xu0tg_0z}gkqJKEseV~w?H z=DvEB>y8_1mC;)S5GYk(E}C^e*j0QF;L2q6??3j2w-k0TDWl5hq%9s@llGL>zGK)nMFp)x+pKL>qzPNwc0o+bic4t|6WgcuTW+U4 zj}x1n*y_ZMPTb_g4kzw(Vy_dIF1b16Ob*8vgXm6Wv#XU{8dS^+?RZ@TbBoJcAlOc+R|mY2XA=91Wwm9#vZ*J7k+30!#FLQb8;I#a^XOus z&u}fY(zAQ-zCHVP?d{oPEanU)(uQYnMm}sB-XkK_aYM?k=n8ht$Ra;~2p~|Bfd89r zjxLlWSJ{Q0`qj@L%xK|jpSpe2?U7Fvw^!5oMAjZvM&?ybBoY@h`{`u+-cwUK%QG6A zJFs%U9@h#O=XHNZ&J`xEeC>UCU+Y67<9F`w3b~nu00IRF{MpRy_pt)1Qw0G85U2wI zf8IVME)=kxl6PrlAbKZwa@&BNzP16_x2q1s}0tg_000IagfB*srAbiz!VJlD0&HrQR>A8)Y!*6M{xy9gkF00IcC zCxI>HyN>zftGNb!J-u`oD*^~0fB*srAbbjd8An^@RWe2q1s}0tg_0 z00IbN6Vw1Q2LgfzRBR`I#`+VCO5}@n6gL`MBarwAfEy(TAA`Ab`N4fIVRk+lr)& zDx;IOcyvwLQ(F6uVcQhtT8Fk-+p0(twzlnpn3ffn(k>>pPwl_xb_6|6Y<6O+6E`|> zlM_3fxYLQfPJE>km+r24$eA8;;xQ+xPCVhn5hsd{j-;pC$NB=Pkoac?Bhh#y(A#Z~ z+p^}-nq{rE|NK=+vfP(^v0Il1Zg!rY9oo`mxd(4}#N)Uj$!eR(l}PHQ9@1lhv2ZdK z)#HXsR8TjFIkBi|#=?Pc{8Tg;H{2IhpC~lXCc=SK^jtV)c*YH{tTu_ll%^+5qe*eA z^D5D}X*4Td)jOXQt z-rl|UcJJNWZ7k*tCDMjxa7I3C8r~x!)p0}0uILJO&B!7@e+VE@lEB=Ty4L=|g~Y4s z=u5`W6tvOa_Vs~-87p+|-rE;#zATlwgOpJ**Tl~%cY%IuJm=2NwZG9b`=6^@W?60y z?a!4uXW7s{=Y1WV`o9l7lyeyh5kO!S0$E>f>b{lt|KKVbVNnDSK%jyGAIiTC_0-pS zPphCjsS!W`0R#|0009ILKmdU?A<%Y!eaEV<$fxt~$5*uCV1Q0*~0R#|0009ILKmY**3K#fR`S#-rZ;8Rwdx5z>c4z&bcxOSscimWU|ATw4H`WTHw+JAB z00IagfI#g99^b7M&}*xG>R+!+%@JJYWwE;@rPHx4V@c;QUnk{puPm=&Saj;JN6qqZ2ndvBQZw zo!INdS2}U&_8f+-Xw;0=#>95*CcZ4*l;lDesf^jKgloJ>XaxZx5N)D2=zENYsua3CB% z6%ED>_eIqw3eB^La3B>u7fuB)}u3*=UEb{Y*00Jcm3_bjh2TNKo=R$8@dNa#8cXfsS+}rVortZ97 zka*P{&gs4=b6;b19bI|pxt|u));IS|D9aOl>remSp{41QfAU1($AbJXc+lz@E4=(}x{-eEqR(f{T1o_qdpzPaK)(#elMa_4?u|MkrEuYD))y1w?6lMh)F zU(A{FW2bj6eE>h#J)R(dz$yji-k$M(*ZJ_DfA{uPyr)$OWUjAH-^Z@x-mY)^H+j9QBt|m` zAb;sArMz7$EZfU+A2%y(;Ua21l6yZfXNLKEDSaq+zua?~ z=L>Q!Phq_ZSb+~`o-_K`-aH^6U&_};x$#Ii1pFj=R&Qy+~q~4zhia(_DrWc zE|9OnyK?_O%C7$Ljb&<2B!1g#%a<#=pZTV(f2q^sW4Sv#7x=l7UiZ1PQHlTp2q1s} z0tg_000IagfB*srAbz)2+Z9#nR(re^_|kM`?+>+ z#lE9<@}BQ!_H&8%z^+5I_I{1Q0*~0R)N@uzvSnGVZPGf5p2>yk_zX{nC}8{QFbIb*u2& z*1uo%eq|wz-SyQE6uIwt-Csyv>L7ps0&7*^@mKsL^L_LAKUsEM?H7uA@AiMAwZzY+ zRp`nS1$?IckFpD~$Cg`=vMhXqn@(@g)Wd=e=Wn$ms4LSEQ{QvHaD{NoiZrS?st1|arq{cK0>ZT9SHWu3ljS%1r#I6GP1^^jdrlCD1>z50nf7Sq-0@4xOAB?usZ z00IagP?rLK)Y|q?X2+IokDauN+Xme|T)}PbEB?%WI#VxuNuKSVWwouDJB;J$J8vUd zR^i*G)-UHhX8f|he`S{UvVQUu0R#|OLju>Dvc6-G_|s+Y*S^BvQ>BmgOK<+xvSrJS z^k+Fg1Fi+XoOQgS-0eE~tdC?_`28!jWX*mn&u8uLWaU{Y3-b^_0D;C6uqW(cTalDe zWpvUOkFH63N^9RSY@4E7>(H8It+oH0OOh=2C132;<$;@>r)P(@bXo4f8y@jEZb-7) zCdQpe>ZTsjV}Y@7G8NV1hD%gXH;6g0sA;=iNchoCrzVCajPCN6^)xlv*J~~^Evc*#As2x>9lB5R$JyPn|d-B2^&&KJPB#O zL!!#j#WJ7aT4TJy)=+T_m6Xb1w#I zzAy8C&5J>5b>c=RZgOIW6L&hX*NLxm;?n2IA!mBXiN~C%I`M=PN1V8{irOb^#Wk^P I^$3jr4GsqJ0ssI2 literal 0 HcmV?d00001 diff --git a/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/velocity_ex6_results.csv b/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/velocity_ex6_results.csv new file mode 100644 index 0000000..b81c2b8 --- /dev/null +++ b/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/velocity_ex6_results.csv @@ -0,0 +1,22 @@ +scene_id,miou,mrecall,mprecision,mf1score,fmiou +v3_sc0_staging_00,23.42980394962069,40.060783571874104,26.87856757368233,26.877867254817428,3.6355798272400466 +v3_sc0_staging_12,18.797402292936262,41.22077479027212,18.913502278204152,21.56857685527445,4.4230981881908065 +v3_sc0_staging_16,16.77651462706611,34.334731792332605,16.898619970161235,18.555317235325898,3.488442381434584 +v3_sc0_staging_19,16.63017500794619,39.40351323260984,18.67025646997111,20.342833444142755,3.867382880966356 +v3_sc0_staging_20,26.80460010624951,53.96271105855703,31.105017784148004,29.733226487921144,4.381580594100662 +v3_sc1_staging_00,9.804344297299394,31.97172179352492,9.828932368509413,11.424110878045173,1.3428539651944187 +v3_sc1_staging_06,23.418373772874475,53.95212729771932,23.457049656038485,25.559301367368477,2.4085603724532825 +v3_sc1_staging_12,2.8787043236661702,24.26191633567214,7.274644443532452,4.093212474987287,0.22726690975098277 +v3_sc1_staging_19,15.768349893829402,40.21917423781227,15.964350805563084,20.033098396159176,3.65439178759707 +v3_sc1_staging_20,14.056822779307634,44.50530942515744,19.119636134436245,19.388919648728052,1.8919549131851576 +v3_sc2_staging_00,7.286798531810443,31.05852643648783,11.597795685132345,10.792986098808024,0.6118302871358112 +v3_sc2_staging_11,18.302014120854437,31.79463315755129,23.38705703150481,21.442979720757094,3.102126819944591 +v3_sc2_staging_13,18.766605685232207,44.24151942366734,30.2250535460189,26.182162854558772,7.383726823697718 +v3_sc2_staging_19,19.61685288697481,50.08802432566881,21.372730936855078,22.914562170925638,3.570078620567559 +v3_sc2_staging_20,17.72165964357555,41.26236494630575,19.179429789073765,21.17484688601106,3.0761168588758885 +v3_sc3_staging_03,15.49946607556194,35.35259123891592,16.02310462621972,17.06591267595689,3.4160577134631884 +v3_sc3_staging_04,19.916240757038395,31.17042934569554,24.686559654583107,23.06614154470773,3.914816951735934 +v3_sc3_staging_08,12.877018429571763,33.42335307970643,16.61006312642712,17.804586126973224,4.08436596323975 +v3_sc3_staging_15,8.516831192023613,35.10092058602501,11.588130759842254,12.490784585932845,2.254299952450923 +v3_sc3_staging_20,14.6969892522868,35.79522441415226,22.183601382900687,19.551265858289423,3.9344376783623973 +all,13.996063956138267,42.46658368364853,26.71652332235895,18.965123319739707,2.7006557919685634 From f2f5e56028e8e5d8e3dd1e51549d7b3aaf149c7a Mon Sep 17 00:00:00 2001 From: Asus ROG Be2R Date: Mon, 23 Sep 2024 15:25:02 +0300 Subject: [PATCH 18/24] run_slam: small refactoring --- adaptors/run_slam.py | 35 +++++++++++++---------------------- 1 file changed, 13 insertions(+), 22 deletions(-) diff --git a/adaptors/run_slam.py b/adaptors/run_slam.py index 633738b..1901e33 100644 --- a/adaptors/run_slam.py +++ b/adaptors/run_slam.py @@ -1,25 +1,16 @@ import argparse -import json -import yaml -import os import glob +import os +import yaml +from dataclasses import dataclass from pathlib import Path -import numpy as np -import torch -import torch.nn.functional as F from tqdm import tqdm -from dataclasses import dataclass +from typing import Optional +import torch +import numpy as np import open3d as o3d -from conceptgraph.dataset.datasets_common import get_dataset - -from gradslam.slam.pointfusion import PointFusion -from gradslam.structures.pointclouds import Pointclouds -from gradslam.structures.rgbdimages import RGBDImages - -from typing import Dict, List, Optional, Union - @dataclass class Intrinsic: @@ -221,7 +212,11 @@ def create_semantic_point_cloud( return color_pcd, semantic_pcd -def main(args: argparse.Namespace): +def main(): + args = get_parser().parse_args() + torch.manual_seed(args.seed) + np.random.seed(args.seed) + dataset = MappingDataset( data_path = os.path.join(args.dataset_root, args.scene_id), stride = args.stride, @@ -291,9 +286,5 @@ def main(args: argparse.Namespace): ) -if __name__ == "__main__": - args = get_parser().parse_args() - torch.manual_seed(args.seed) - np.random.seed(args.seed) - - main(args) \ No newline at end of file +if __name__ == "__main__": + main() \ No newline at end of file From f6a8c9280de471e546290f43d82249e0a822d01a Mon Sep 17 00:00:00 2001 From: Asus ROG Be2R Date: Mon, 23 Sep 2024 15:39:54 +0300 Subject: [PATCH 19/24] eval semseg: refactoring --- adaptors/__init__.py | 0 adaptors/adaptors/__init__.py | 0 .../__pycache__/__init__.cpython-310.pyc | Bin 0 -> 123 bytes .../__pycache__/conceptgraph.cpython-310.pyc | Bin 0 -> 1956 bytes adaptors/adaptors/conceptgraph.py | 73 ++++ adaptors/eval_semseg.py | 348 ++---------------- .../utils/__pycache__/eval.cpython-310.pyc | Bin 0 -> 5566 bytes .../utils/__pycache__/metrics.cpython-310.pyc | Bin 0 -> 1976 bytes .../utils/__pycache__/utils.cpython-310.pyc | Bin 0 -> 1396 bytes .../utils/__pycache__/visual.cpython-310.pyc | Bin 0 -> 676 bytes adaptors/utils/eval.py | 219 +++++++++++ adaptors/utils/metrics.py | 66 ++++ adaptors/utils/utils.py | 50 +++ adaptors/utils/visual.py | 21 ++ 14 files changed, 468 insertions(+), 309 deletions(-) create mode 100644 adaptors/__init__.py create mode 100644 adaptors/adaptors/__init__.py create mode 100644 adaptors/adaptors/__pycache__/__init__.cpython-310.pyc create mode 100644 adaptors/adaptors/__pycache__/conceptgraph.cpython-310.pyc create mode 100644 adaptors/adaptors/conceptgraph.py create mode 100644 adaptors/utils/__pycache__/eval.cpython-310.pyc create mode 100644 adaptors/utils/__pycache__/metrics.cpython-310.pyc create mode 100644 adaptors/utils/__pycache__/utils.cpython-310.pyc create mode 100644 adaptors/utils/__pycache__/visual.cpython-310.pyc create mode 100644 adaptors/utils/eval.py create mode 100644 adaptors/utils/metrics.py create mode 100644 adaptors/utils/utils.py create mode 100644 adaptors/utils/visual.py diff --git a/adaptors/__init__.py b/adaptors/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/adaptors/adaptors/__init__.py b/adaptors/adaptors/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/adaptors/adaptors/__pycache__/__init__.cpython-310.pyc b/adaptors/adaptors/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..db16295578d617042cc6be26911f22f7be9c5a47 GIT binary patch literal 123 zcmd1j<>g`k0x`u8X(0MBh(HF6K#l_t7qb9~6oz01O-8?!3`HPe1o2BwKQSe-pd`Pj j7{Q2-&&#j^`Kg literal 0 HcmV?d00001 diff --git a/adaptors/adaptors/__pycache__/conceptgraph.cpython-310.pyc b/adaptors/adaptors/__pycache__/conceptgraph.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..468641584a1dc2e23765de824fda76a378648351 GIT binary patch literal 1956 zcmY*Z&5s*36t_K-$!s#&O|sc`w;#*&gO3yx#0d#i2n2;9m2&8+6(gb1WbDjNb}}=y zo$hYrTnHRjoDr&&960co@CR`7l~Zq=IZ&P_+og=-XTRs?pMUT9IYq10K=AF{{A)y- z2>of4*FOTvC(y-x5EN0IpFx#2rg8wk{shscjcl~AFv{Hzka%zm-llyEU2ZMV+*R(f zt?Ja`tJI?rCRh7?64vBO-1PCJ+!>4Hka2{;5FX@4%byvHJAHzJHcveMG^w`=F?x5 ztGo{($^^~Z&^yq(@(swlyTA(!*zeA_!Dkz=cLBSn+Dl*FSl(2<@vRj^0~zUDSZ<$N zx|QWkKmo6XiMeSImT#?4uVVf54`B4RyfJ=9wW&?_&J3IOg{^a8>s;8nuWZZy3OK5^ z>ZtC>g7UjE+ka3*)R%|MFFB)WEE)C7BF&{AXT_8T8)ZyG5j|rf*ceT-EHwJey)!6} zvOzNA7;19Xdsz6KiD@SNQ8CRaNbb)Z|2UP0KGe2##7T#Bq;Yc3ziMD7u(e)$7N;9dASR{qEzRE{^%Y!C@+V__31`o*o3Bj{G9ePW?xper@bi!RUMb za4G>tR(YaKf@i@8zHY=>Bt)1;6Q*lMiok;WK*V-*l8XKvZ6{eV)T9vFE+cuU?J?w5 zdz_V78b={73aMKr^C1ve16di%b>nfAP1!xp3$7heaA2n0gvp6a%~>Uf8v^iIT57u} zVS`hq@lnRKok4=R`LhZ-W*Q2{(Utz1&ZASHZE8(uCoZyrLzar1flMWv=*9?&BFv|g@>DnDA_r8K zLpL$zNq+~D%oCwq5wo0yDFvtMI@ZPEr3r)qcEc=yjGOWd%_Rukd`;7Mr0Z*4aL44$ zTbD>!;RNk-ZH+C`$;EnQ;hrfx3!F5GbB$x|R@ZrQI@676Rz>z|ZU}0e2Byf(Q~C|@ zdjRnF&G#c3m5^@n-^7H>N}2GeJRCp{dewbam{r!Q2WpNNk3rbP!7lbN`Nw|g)i2?; z<>F1eNj!Y*rDJX4uGIy7-|}z+Yz=H|E@)kR1-ry1Hnwpa_IIJ(#y!%*yWq*c1n7Q) zw_)H{jp!J0-H2NtpzO{4@Bz$&{r@o*Su`0K5mt#}qN>UYu!{LL@r6Nh?@kIj&Dh80 SDGPI;i(Ci^8iBxwXZ;JAB|`)N literal 0 HcmV?d00001 diff --git a/adaptors/adaptors/conceptgraph.py b/adaptors/adaptors/conceptgraph.py new file mode 100644 index 0000000..2203ad8 --- /dev/null +++ b/adaptors/adaptors/conceptgraph.py @@ -0,0 +1,73 @@ +import glob +import gzip +import os +import pickle + +import torch +import numpy as np + +from conceptgraph.slam.slam_classes import MapObjectList + + +def load_pred_pointcloud(args, scene_id, class_feats, ignore_index): + '''Load the predicted point cloud''' + result_paths = glob.glob( + os.path.join( + args.replica_root, scene_id, "pcd_saves", + f"full_pcd_{args.pred_exp_name}*.pkl.gz" + ) + ) + + if len(result_paths) == 0: + raise ValueError(f"No result found for {scene_id} with {args.pred_exp_name}") + + # Get the newest result over result_paths + result_paths = sorted(result_paths, key=os.path.getmtime) + result_path = result_paths[-1] + print(f"Loading mapping result from {result_path}") + + with gzip.open(result_path, "rb") as f: + results = pickle.load(f) + + objects = MapObjectList() + objects.load_serializable(results['objects']) + + # Compute the CLIP similarity for the mapped objects and assign class to them + object_feats = objects.get_stacked_values_torch("clip_ft").to(args.device) + object_feats = object_feats / object_feats.norm(dim=-1, keepdim=True) # (num_objects, D) + object_class_sim = object_feats @ class_feats['feats'].T # (num_objects, num_classes) + + # suppress the logits to -inf that are not in torch.from_numpy(keep_class_index) + object_class_sim[:, ignore_index] = torch.Tensor([-float("Inf")]) # -1e10 + object_class = object_class_sim.argmax(dim=-1) # (num_objects,) + + if args.n_exclude == 1: + if results['bg_objects'] is None: + print("Warning: no background objects found. This is expected if only SAM is used, but not the detector. ") + else: + # Also add the background objects + bg_objects = MapObjectList() + bg_objects.load_serializable(results['bg_objects']) + + # Assign class to the background objects (hard assignment) + for obj in bg_objects: + cn = obj['class_name'][0] + c = class_feats['names'].index(cn.lower()) + object_class = torch.cat([object_class, object_class.new_full([1], c)]) + + objects += bg_objects + + pred_xyz = [] + pred_color = [] + pred_class = [] + for i in range(len(objects)): + obj_pcd = objects[i]['pcd'] + pred_xyz.append(np.asarray(obj_pcd.points)) + pred_color.append(np.asarray(obj_pcd.colors)) + pred_class.append(np.ones(len(obj_pcd.points)) * object_class[i].item()) + + pred_xyz = torch.from_numpy(np.concatenate(pred_xyz, axis=0)) + pred_color = torch.from_numpy(np.concatenate(pred_color, axis=0)) + pred_class = torch.from_numpy(np.concatenate(pred_class, axis=0)).long() + + return pred_xyz, pred_color, pred_class \ No newline at end of file diff --git a/adaptors/eval_semseg.py b/adaptors/eval_semseg.py index 05d5b41..4a32a85 100644 --- a/adaptors/eval_semseg.py +++ b/adaptors/eval_semseg.py @@ -1,33 +1,18 @@ -import gzip import os -import glob from pathlib import Path import argparse import pickle import json -from tqdm import tqdm import numpy as np import open3d as o3d -import pandas as pd import torch import open_clip -# from chamferdist.chamfer import knn_points -from pytorch3d.ops.knn import knn_points -from gradslam.structures.pointclouds import Pointclouds - -# from conceptgraph.dataset.replica_constants import ( -# # REPLICA_EXISTING_CLASSES, -# # REPLICA_CLASSES, -# # REPLICA_SCENE_IDS, -# # REPLICA_SCENE_IDS_, -# ) -from conceptgraph.slam.slam_classes import MapObjectList -from conceptgraph.utils.vis import get_random_colors -from conceptgraph.utils.eval import compute_confmatrix, compute_pred_gt_associations, compute_metrics +from utils.eval import eval_loop +from utils.metrics import metrics_loop def get_parser(): @@ -71,233 +56,7 @@ def get_parser(): return parser -def eval_replica( - scene_id: str, - # scene_id_: str, - class_names: list[str], - class_feats: torch.Tensor, - args: argparse.Namespace, - # class_all2existing: torch.Tensor, - ignore_index=[], - gt_class_only: bool = True, # only compute the conf matrix for the GT classes -): - class2color = get_random_colors(len(class_names)) - - '''Load the GT point cloud''' - gt_pc_path = os.path.join( - args.replica_semantic_root, scene_id, "rgb_cloud" - # args.replica_semantic_root, scene_id_, "Sequence_1", "saved-maps-gt" - ) - gt_pose_path = os.path.join( - # args.replica_semantic_root, scene_id, "Sequence_1", "traj_w_c.txt" - args.replica_semantic_root, scene_id, "traj.txt" - ) - - gt_map = o3d.io.read_point_cloud(os.path.join(gt_pc_path, "semantic.pcd")) - gt_poses = np.loadtxt(gt_pose_path) - gt_poses = torch.from_numpy(gt_poses.reshape(-1, 4, 4)).float() - - gt_xyz = torch.tensor(np.asarray(gt_map.points)) - # gt_color = gt_map.colors_padded[0] - gt_class_np = (np.asarray(gt_map.colors)[..., 0] * 255).round() - gt_class = torch.tensor(gt_class_np, dtype=torch.int) # (N,) - # gt_class = class_all2existing[gt_class] # (N,) - assert gt_class.min() >= 0 - # assert gt_class.max() < len(REPLICA_EXISTING_CLASSES) - assert gt_class.max() < len(class_names) - - print(gt_class, gt_class.shape, gt_class.dtype, gt_class) - - # # transform pred_xyz and gt_xyz according to the first pose in gt_poses - # gt_xyz = gt_xyz @ gt_poses[0, :3, :3].t() + gt_poses[0, :3, 3] - - # Get the set of classes that are used for evaluation - all_class_index = np.arange(len(class_names)) - ignore_index = np.asarray(ignore_index) - if gt_class_only: - # Only consider the classes that exist in the current scene - existing_index = gt_class.unique().cpu().numpy() - non_existing_index = np.setdiff1d(all_class_index, existing_index) - ignore_index = np.append(ignore_index, non_existing_index) - print( - "Using only the classes that exists in GT of this scene: ", - len(existing_index), - ) - - keep_index = np.setdiff1d(all_class_index, ignore_index) - - print( - f"{len(keep_index)} classes remains. They are: ", - [(i, class_names[i]) for i in keep_index], - ) - - '''Load the predicted point cloud''' - result_paths = glob.glob( - os.path.join( - args.replica_root, scene_id, "pcd_saves", - f"full_pcd_{args.pred_exp_name}*.pkl.gz" - ) - ) - if len(result_paths) == 0: - raise ValueError(f"No result found for {scene_id} with {args.pred_exp_name}") - - # Get the newest result over result_paths - result_paths = sorted(result_paths, key=os.path.getmtime) - result_path = result_paths[-1] - print(f"Loading mapping result from {result_path}") - - with gzip.open(result_path, "rb") as f: - results = pickle.load(f) - - objects = MapObjectList() - objects.load_serializable(results['objects']) - - # Compute the CLIP similarity for the mapped objects and assign class to them - object_feats = objects.get_stacked_values_torch("clip_ft").to(args.device) - object_feats = object_feats / object_feats.norm(dim=-1, keepdim=True) # (num_objects, D) - object_class_sim = object_feats @ class_feats.T # (num_objects, num_classes) - - # suppress the logits to -inf that are not in torch.from_numpy(keep_class_index) - object_class_sim[:, ignore_index] = -1e10 - object_class = object_class_sim.argmax(dim=-1) # (num_objects,) - - if args.n_exclude == 1: - if results['bg_objects'] is None: - print("Warning: no background objects found. This is expected if only SAM is used, but not the detector. ") - else: - # Also add the background objects - bg_objects = MapObjectList() - bg_objects.load_serializable(results['bg_objects']) - - # Assign class to the background objects (hard assignment) - for obj in bg_objects: - cn = obj['class_name'][0] - c = class_names.index(cn.lower()) - object_class = torch.cat([object_class, object_class.new_full([1], c)]) - - objects += bg_objects - - pred_xyz = [] - pred_color = [] - pred_class = [] - for i in range(len(objects)): - obj_pcd = objects[i]['pcd'] - pred_xyz.append(np.asarray(obj_pcd.points)) - pred_color.append(np.asarray(obj_pcd.colors)) - pred_class.append(np.ones(len(obj_pcd.points)) * object_class[i].item()) - - pred_xyz = torch.from_numpy(np.concatenate(pred_xyz, axis=0)) - pred_color = torch.from_numpy(np.concatenate(pred_color, axis=0)) - pred_class = torch.from_numpy(np.concatenate(pred_class, axis=0)).long() - - '''Load the SLAM reconstruction results, to ensure fair comparison''' - slam_path = os.path.join( - args.replica_root, scene_id, "rgb_cloud" - ) - # slam_pointclouds = Pointclouds.load_pointcloud_from_h5(slam_path) - # slam_xyz = slam_pointclouds.points_padded[0] - - slam_pointclouds = o3d.io.read_point_cloud(os.path.join(slam_path, "pointcloud.pcd")) - slam_xyz = torch.tensor(np.asarray(slam_pointclouds.points)) - - # To ensure fair comparison, build the prediction point cloud based on the slam results - # Search for NN of slam_xyz in pred_xyz - slam_nn_in_pred = knn_points( - slam_xyz.unsqueeze(0).cuda().contiguous().float(), - pred_xyz.unsqueeze(0).cuda().contiguous().float(), - lengths1=None, - lengths2=None, - return_nn=True, - return_sorted=True, - K=1, - ) - idx_slam_to_pred = slam_nn_in_pred.idx.squeeze(0).squeeze(-1) - - # # predicted point cloud in open3d - # print("Before resampling") - # pred_pcd = o3d.geometry.PointCloud() - # pred_pcd.points = o3d.utility.Vector3dVector(pred_xyz.numpy()) - # pred_pcd.colors = o3d.utility.Vector3dVector(class2color[pred_class.numpy()]) - # o3d.visualization.draw_geometries([pred_pcd]) - - # Resample the pred_xyz and pred_class based on slam_nn_in_pred - pred_xyz = slam_xyz - # pred_xyz = pred_xyz @ gt_poses[0, :3, :3].t() + gt_poses[0, :3, 3] - pred_class = pred_class[idx_slam_to_pred.cpu()] - pred_color = pred_color[idx_slam_to_pred.cpu()] - - # # predicted point cloud in open3d - # print("After resampling") - # pred_pcd = o3d.geometry.PointCloud() - # pred_pcd.points = o3d.utility.Vector3dVector(pred_xyz.numpy()) - # pred_pcd.colors = o3d.utility.Vector3dVector(class2color[pred_class.numpy()]) - # o3d.visualization.draw_geometries([pred_pcd]) - - # # GT point cloud in open3d - # print("GT pointcloud") - # gt_pcd = o3d.geometry.PointCloud() - # gt_pcd.points = o3d.utility.Vector3dVector(gt_xyz.numpy()) - # gt_pcd.colors = o3d.utility.Vector3dVector(class2color[gt_class.numpy()]) - # o3d.visualization.draw_geometries([gt_pcd]) - - # print("Merged pointcloud") - # o3d.visualization.draw_geometries([gt_pcd, pred_pcd]) - - # Compute the associations between the predicted and ground truth point clouds - idx_pred_to_gt, idx_gt_to_pred = compute_pred_gt_associations( - pred_xyz.unsqueeze(0).cuda().contiguous().float(), - gt_xyz.unsqueeze(0).cuda().contiguous().float(), - ) - - # Only keep the points on the 3D reconstructions that are mapped to - # GT point that is in keep_index - label_gt = gt_class[idx_pred_to_gt.cpu()] - pred_keep_idx = torch.isin(label_gt, torch.from_numpy(keep_index)) - pred_class = pred_class[pred_keep_idx] - idx_pred_to_gt = idx_pred_to_gt[pred_keep_idx] - idx_gt_to_pred = None # not to be used - - # Compute the confusion matrix - confmatrix = compute_confmatrix( - pred_class.cuda(), - gt_class.cuda(), - idx_pred_to_gt, - idx_gt_to_pred, - class_names, - ) - - assert confmatrix.sum(0)[ignore_index].sum() == 0 - assert confmatrix.sum(1)[ignore_index].sum() == 0 - - # '''Visualization for debugging''' - # print('GT point cloud in open3d') - # class2color = get_random_colors(len(class_names)) - - # # GT point cloud in open3d - # gt_pcd = gt_map.open3d(0) - # gt_pcd.transform(gt_poses[0].numpy()) - # gt_pcd.colors = o3d.utility.Vector3dVector(class2color[gt_class]) - - # # predicted point cloud in open3d - # pred_pcd = o3d.geometry.PointCloud() - # pred_pcd.points = o3d.utility.Vector3dVector(pred_xyz.numpy()) - # pred_pcd.colors = o3d.utility.Vector3dVector(class2color[pred_class.numpy()]) - - # o3d.visualization.draw_geometries([pred_pcd]) - # o3d.visualization.draw_geometries([gt_pcd]) - - return confmatrix, keep_index - - -def main(args: argparse.Namespace): - - # map REPLICA_CLASSES to REPLICA_EXISTING_CLASSES - # class_all2existing = torch.ones(len(REPLICA_CLASSES)).long() * -1 - # for i, c in enumerate(REPLICA_EXISTING_CLASSES): - # class_all2existing[c] = i - # class_names = [REPLICA_CLASSES[i] for i in REPLICA_EXISTING_CLASSES] - # class_names = REPLICA_CLASSES - +def get_semseg_class_names(args): if args.semseg_classes_path is not None: semseg_classes = json.load(open( args.semseg_classes_path, "r" @@ -309,95 +68,52 @@ def main(args: argparse.Namespace): class_ids = [class_param['id'] for class_param in semseg_classes] assert set(class_ids) == set([i for i in range(len(semseg_classes))]), np.unique(class_ids) - id_name_dict = {class_param['id']: class_param['name'] for class_param in semseg_classes} - class_names = [id_name_dict[i] for i in range(len(semseg_classes))] + id_to_class_dict = {class_param['id']: class_param['name'] for class_param in semseg_classes} + class_to_id_dict = {class_param['name']: class_param['id'] for class_param in semseg_classes} if args.n_exclude == 1: - exclude_class = [class_names.index(c) for c in [ + exclude_class_ids = [class_to_id_dict[key] for key in [ "other" ]] elif args.n_exclude == 4: - exclude_class = [class_names.index(c) for c in [ + exclude_class_ids = [class_to_id_dict[key] for key in [ "other", "floor", "wall", "ceiling" ]] elif args.n_exclude == 6: - exclude_class = [class_names.index(c) for c in [ + exclude_class_ids = [class_to_id_dict[key] for key in [ "other", "floor", "wall", "ceiling", "door", "window" ]] else: raise ValueError("Invalid n_exclude: %d" % args.n_exclude) - print("Excluding classes: ", [(i, class_names[i]) for i in exclude_class]) + print("Excluding classes: ", [(i, id_to_class_dict[i]) for i in exclude_class_ids]) + + return id_to_class_dict, class_to_id_dict, exclude_class_ids - # Compute the CLIP embedding for each class - clip_model, _, clip_preprocess = open_clip.create_model_and_transforms("ViT-H-14", "laion2b_s32b_b79k") + +def compute_clip_embeddings(args, class_names): + ''' + Compute the CLIP embedding for each class + ''' + clip_model, _, _ = open_clip.create_model_and_transforms("ViT-H-14", "laion2b_s32b_b79k") clip_model = clip_model.to(args.device) clip_tokenizer = open_clip.get_tokenizer("ViT-H-14") - prompts = [f"an image of {c}" for c in class_names] + prompts = [f"an image of {name}" for name in class_names] text = clip_tokenizer(prompts) text = text.to(args.device) batch_size = 64 class_feats = [] - for i in range(int(np.ceil(len(text)/ batch_size))): + for i in range(int(np.ceil(len(text) / batch_size))): with torch.no_grad(): - class_feats.append(clip_model.encode_text(text[i*batch_size:(i+1)*batch_size])) + class_feats.append(clip_model.encode_text(text[i * batch_size : (i+1) * batch_size])) class_feats = torch.cat(class_feats, dim=0) class_feats /= class_feats.norm(dim=-1, keepdim=True) # (num_classes, D) - # scene_ids = REPLICA_SCENE_IDS - scene_ids = list(args.scene_ids_str.split()) - - conf_matrices = {} - conf_matrix_all = 0 - for scene_id in scene_ids: - print("Evaluating on:", scene_id) - conf_matrix, keep_index = eval_replica( - scene_id = scene_id, - # scene_id_ = scene_id_, - class_names = class_names, - class_feats = class_feats, - args = args, - # class_all2existing = class_all2existing, - ignore_index = exclude_class, - ) - - conf_matrix = conf_matrix.detach().cpu() - conf_matrix_all += conf_matrix + return {'feats': class_feats, 'names': class_names} - conf_matrices[scene_id] = { - "conf_matrix": conf_matrix, - "keep_index": keep_index, - } - - # Remove the rows and columns that are not in keep_class_index - conf_matrices["all"] = { - "conf_matrix": conf_matrix_all, - "keep_index": conf_matrix_all.sum(axis=1).nonzero().reshape(-1), # Looks like a mistake - # "keep_index": conf_matrix_all.sum(axis=0).nonzero().reshape(-1) - } - - results = [] - for scene_id, res in conf_matrices.items(): - conf_matrix = res["conf_matrix"] - keep_index = res["keep_index"] - conf_matrix = conf_matrix[keep_index, :][:, keep_index] - keep_class_names = [class_names[i] for i in keep_index] - mdict = compute_metrics(conf_matrix, keep_class_names) - results.append( - { - "scene_id": scene_id, - "miou": mdict["miou"] * 100.0, - "mrecall": np.mean(mdict["recall"]) * 100.0, - "mprecision": np.mean(mdict["precision"]) * 100.0, - "mf1score": np.mean(mdict["f1score"]) * 100.0, - "fmiou": mdict["fmiou"] * 100.0, - } - ) - - df_result = pd.DataFrame(results) - +def save_results(args, conf_matrices, df_result): save_path = "./results/%s/%s_ex%d_results.csv" % ( args.pred_exp_name, args.label, args.n_exclude ) @@ -408,10 +124,24 @@ def main(args: argparse.Namespace): save_path = "./results/%s/%s_ex%d_conf_matrices.pkl" % ( args.pred_exp_name, args.label, args.n_exclude ) + pickle.dump(conf_matrices, open(save_path, "wb")) + - -if __name__ == '__main__': +def main(): parser = get_parser() args = parser.parse_args() - main(args) \ No newline at end of file + + id_to_class_dict, class_to_id_dict, exclude_class_ids = get_semseg_class_names(args) + + class_feats = compute_clip_embeddings(args, list(class_to_id_dict.keys())) + + conf_matrices = eval_loop(args, class_feats, exclude_class_ids, id_to_class_dict, class_to_id_dict) + + df_result = metrics_loop(conf_matrices, class_feats['names']) + + save_results(args, conf_matrices, df_result) + + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/adaptors/utils/__pycache__/eval.cpython-310.pyc b/adaptors/utils/__pycache__/eval.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..97fddc72c4ffdb35ea1dce8bae17a24c93b77dda GIT binary patch literal 5566 zcmaJ_%aa>N8K3TXXf%4PR@Sa}9S1}R5QStNCj|uKG6{JBNeops6&R9I#_HCt){;i< z9);bl(S>y?;ow7xBaf1f<`3Y+i9Z1@)Lc3FMpaG;#OC+)NZN)+Hk{;Qzz5+4085Mc?{x5B(+Av0|ra#Px&V@fylQ1X6Z;D+)Q;EY6CJ+iv)n z*c9im=6S)cS*^$J;q9!eEtUm0hr{+L?hO--R;?2cyJOvphwVX>sNNp>8vQsD?U9lo z#MkM^V}brBguF4{Zhz3z=~se_uA6sJMzO-sTNu5f4ej7`fk%k)>kRZZR9 zl^FwPVcx3EnDB5zg@!d<1*e}BcPlB;y72GtNijK-@@Zpc9WvC4)3up3U8mMfSO5B_ z)^Azrre0b|i(LozSiG7Q)%K0HxswAwQPJ*JvX>l`wH!plq}SORb;P{~_}z2x#x1_3 z7|MGOsKGKN-TI-BuWXU-@}b~0xdcbu&$IqLU1QG2mYTg7pb+3^b^OR zapZevd^pMqBmr!nxk;=#J6X_G@t{2%4@Uc0K}o$6jb!F_!IflQB8NIwSs~JqQqg|q zow+I=4@G8Ui_F$C$();VUuWKU*t<2xGG^IUkU5ds)|sz6awyw9k=1&l zoy0g*q;*^LI!PF8C+$(EO|KW0sUB--TA7E|AR1-6teSL{<0yd(F!Ky~86v)9YP8fs8tr)4-)}zK{23K+9J(i@GMk)O zUGDXC((DcMk+D*8m^6*Lm=zCR{Dtlfx0_}$I^n~UHO+qGI-NV&>FIp(rRKr;C9i_H z^@e(@d3{Ij)B9X%cF52cKaL7X^9&xn0kUnS)_eYhrR-xXWt&!)f5LC!zKnhu(&8E4 zdM;zVyG%Wa8TrW5sAa+BK3=I44@Ormqx~tZzF{3axh<3SO({q1-cZPn)LWPPaVP5Q zE2u0cv?`fH4fR!8vrc4_NE0NpJEO7s8r6!BljwE37e(epqmhIqx+7(5u`h=Tk;}?F z*l!|5tC+3Iy-t5DVlO}PQ^Zy|A+K;H>h~9XIObkf?rjfa1>2=oST*8;K<4fG z5V7E7A7aMhCHL}M*}^``Tutrt9njTjBIQEB9A0B({5k#}bME^t>bwjibnZXp`dra< zc1Tc*Ul|o2k1<0E8pbG?;J1C4fFtZpm;f2a?^wbQ9`+O*|M;fz11m37-s$cqwqe*;|`roX7TiZ{uBrib(+F2{2XyPifOii!i@y zWPTsBW!nUeTd$<#2E49qg2oBmq*o))V|4QzDm~K8A~s+XjtvmcQ}+AQ+G(q2z^a}F z$pT|C?O`;KCWaf=3m*(mOb+;j=2C~z+}AKuljfeY4xS+U8MUD(MhTAT?UEf1AIs|# zC#TtqlYiPk!&sF!ncLK9i@M9rnGsUN-d%>kNt=$KYDWiD7qH43c#KUozU*X6i+bdd z)XP{XUz?(a`Vo;=mwlwKX1^a(<#mvUR`SsKkjsFcKp?`J&0R)|DS|b)c^k_A$X(u~ zU7iEEM~Q-5iMXMhWxkTg& zjh1rpcV(35@XV2Iw(|>?B3wIe;z!*phJkE72Ls{i(p4;`W`&{r4HRiFb4dPSA?^!m znrm%wa{xuEl$(Fm%v&lFUAA$z=h!-5hxtE=*y}qD?z0-Nvw*9ggQG)wwD^%eQS3D+ zkw#&5h`Ydtz>0{lHXdgN0X*CXoj!}f5PRp!;)*=Fp`T-pUir_PGwp#~h12QqUyV-dj?W8l8+ zaXi-QEI1UsNLH9LwVbe^RmVV_g3vkE@eKqAz7DIn**1;CcrkiRca3x1Aj2IevP}~J zui{S61Zw>&?4j2|IBVcf?lu2=pVb40t9LMT;+Dz9l5(3{CM7z7ya|i*6UZC4YLK^0 z$lG9Ud$(k8wUZJKwhw;z7OsZ`R%5X1PBr}?hukdQ;++Jex{kkP2kQk07p0K zDWHbIC{jHg4;3Bfpn8;H5ORjXm(Z;!3Mk6V!tw$b6CPHC?Np7(Dv_lH9gC~vP+P8g zg2ulN5*Bqo8suQKmRFZa4Z&C%1OPpK_2&$*G^58}4cX3i(!;8nVpN^4L#c7yB#!H5 zw>%8f1*^A-m}7h&wGO$A#g8OM3m9oU4fjByCWVe4+V47(!o;UglThZtQo@f8Vnbbnw4CzPK6Kx;CxPj;&0ISzgU_B+Ce_JG0x6}|A)nL|H?5_WLvE1jYFe9A(v@@- zA#5eBnz9OxN?M$A)UiVyqi0OUMYNcb-1($|c?~nKZpu24tKp@UrzKeE%A%H(rLaN|*?8%y0YYkSpmaa`#klC@cylG8WX2_orMsFJwOC!khPK-(FQO z;p$p;=IytkZ<0z%Rx__3-H?54j6JvYnt}NSWEjVz$k`Za84FR*QwALRj=@)4liCcYx$Tx|6i^xeg9{MJ`ro1OGHIsrZ>J-h@ zFEJ}bY6bT|8VdhFjJd-wQ_!=Wz=P2{Q>|ip^I#zMLUK#pJf%Oa+Lbxtmu&*&8XM{7 zKpMq5uiE~Dh8^&#;~;JFS=IHOI!d2$yUGIKHt&nTJIBv4|BQMclODouOIQ9EjP@*~ zJ8bNO2YvpAI!I$dI>9wZv(oGh>*D&pEeTp9pZhTcs(JLsI!xspUH^YH1-ye zCXuCIM?5edlzeGeIg#`ndD^UUBnBj5aQO326FL_l-K~*MxN^j}*T#Xv>M`(y_Ni9{ zVq0AB->`-z0Pe6V^LelgxOvzOJ}(!!%~|>Wd3S^Pu$|z(d7lesz;p%ALe9C!2Y{etJg}85EN4TPI_GP4oTLU1@UPl`*6|OD5ifrM< zI&ze3t7x=iyj)l^A4;|iRTTbD`$ED9#uvh?K}wFv93Dq!=3O_RigGAgyh>@nDCz+4 z+=%185utgrW4E=`ip;+}h{d=suQ05nDZTS8_zwU;pOumM(S!S0^P6~TyvCXH8Sd~w Y!1?{a=3g`%{y)d%|K)|?v+_^=4IFW}%x)Lrkw5$Qt-pu;JvM8#d<5gk z8-EUOdI99;3d^DYs!|9N$Mhb`BEgUT8Ed zMiVwInD4`sKVyUnq)>rR$Qcn$g=W?q&FCCWQDM)RLUTMN3Y|C{bMiG-WD`xT5?f15 z6@jy^qRQGpifkZG=j1w?+RC1JpouA45k)sp2Q1B;Il5lbsZ+}lBUj6~OCFrBc^Tl5_{!x#Z_6k9>Avbpz=8mlW@AGnPR+$L~a7r1TUUb?_t zQ|^ks%gS3p>&jmsg^Py5XBb|hIY)}kT0otWF8!)e?N5zrKWbFlFOgen@=EF(%C9_a zP8+HL@$O7s_Pa3RY}|djdlH0UAb2-Ud34$>e-IFF)Wezvr?EUY;P}Jue|YlOuam=b zlOz)xMEAQBmQ<_%UKk$kY~9-1?;gKXj-T#KYUZwCf((pbmdw4KCG)o>^Y)9Af83cg zauJ2GjI$IBxv&pLBiMPvTQbZQhui;JN;!WHk9ejhdNgH3Szcw}>u^6tt z3b$*-{-&42YoO>qy~Oxw+_YfIAAuYr(=BFbj*ifSk3J<+Yf2U6_?qbk(4$~A-GD*Y zLSK*};A*Fs*-z~+(GYWs)5or2N9YVsodRk{IcH=FonZ!@Lb==qDF(g3no&T`HRw?* z*!>^&78uU@l_YOGWpn5jk678`2=q8lc~C#idysh$szDVi?LUm7d{EX;Xo_VqOsP!1>Hv~6-$ma*0>$nz-WV%;dc3Ph8fYwsv1f)51LX3y8H7w;8DvdV5p zL~=YTr05wD3y`>JG`tR^zgE8KN_~r0%qfo#2bEa$I&T_*@`;+OZ}Bd$rO`tnm0kF5 z;B`oBiV5+7V?H0#=jB5| literal 0 HcmV?d00001 diff --git a/adaptors/utils/__pycache__/utils.cpython-310.pyc b/adaptors/utils/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a826fb0b37005f7aa409dc072a74deca3136d9a9 GIT binary patch literal 1396 zcmb_c&5k2A5Vqa_o$kye47)RnghnH+L=NmqJ8(e=78Z#UD+Gu`1Zwrf?%p2UKWn>p zlc;kVuJaDbk+X%>fr zz~U)P`4I?;DCTI2drhWRukF&Ay4dUy?v?)32iv0duV@-j{5_gDTR4fHPp}^6OeK<) zlI2O0a;6lUxn^@ehM)tO&tS^mF+vI{+~FJeRxVf$Xit%Ai{j3strp|_71hzx%&fD8AbvGCz!z6;)a>-$}ltCB4_l#HF+RRt80t1N?Q$A+L!HB!)qnMg1(*eqTE>v zW7+AvkUC&hTQZR2ne;fq&ux0xU5-dw@*aEgP50+I(}8iHs!@KRYhIt zBP!C@$sW02Qj@CDZd(;ETQ(W$un~+VGoDJR2fa$Fw1hCf-hR1@UYLd=1F0@t9`E&==%;?krf&LANC-AoaoIXxz+CWC~u}N_+ zA3;hNlwGxX@+J|M@H(YH^S_cw_&Fi@35W>${$Z0)oP!b%L1?#bSanIoduA~zQ)LRyi!0z{EO_c(izC>dMm_0AJCc33^niVC b2jbFT`Ja|GZ8`hQbhR{{?js*ZhBo;Z=BA1k literal 0 HcmV?d00001 diff --git a/adaptors/utils/__pycache__/visual.cpython-310.pyc b/adaptors/utils/__pycache__/visual.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b850353f3b213df9c8b169f401d27dead3d36703 GIT binary patch literal 676 zcmYjPOKVd>6rR_e+@wv>5;UQ1beByFRd6F(h)Zdq*AKZTs$<)mhgZ7@c++(tEbc!TSPBcbDnBR00py5*2PaAb{7&;|?KMQhwaFT;(^ z`q0619r{9gFb5A~ms=e8Jm1-mH`_qF-_{Jk^F^vEX>{o6Afe}JS;|~^uG3r;ndjGg zY?xU!nJDS$Jhz_YrDtO~SDq@F`zX~qUG{m>%ByU&)6RHiq!dZp$jr#;zh)43pL+`8 zdJ58hL;4Y1{kwOTinO$aHfNQcD|5D(n<`bO<e0v@HlLZ VUuA`;lzbgjHGxHpV$vZ;^bZ%Pq|^Wa literal 0 HcmV?d00001 diff --git a/adaptors/utils/eval.py b/adaptors/utils/eval.py new file mode 100644 index 0000000..479dc7f --- /dev/null +++ b/adaptors/utils/eval.py @@ -0,0 +1,219 @@ +import argparse +import os + +import torch +import numpy as np +import open3d as o3d + +from pytorch3d.ops.knn import knn_points +from sklearn.metrics import confusion_matrix + +from adaptors.conceptgraph import load_pred_pointcloud +from utils.utils import debug_visualize_loaded_pointclouds + + +def load_gt_pointcloud(args, scene_id, id_to_class_dict): + gt_pc_path = os.path.join( + args.replica_semantic_root, scene_id, "rgb_cloud" + ) + gt_pose_path = os.path.join( + args.replica_semantic_root, scene_id, "traj.txt" + ) + + gt_map = o3d.io.read_point_cloud(os.path.join(gt_pc_path, "semantic.pcd")) + gt_poses = np.loadtxt(gt_pose_path) + gt_poses = torch.from_numpy(gt_poses.reshape(-1, 4, 4)).float() + + gt_xyz = torch.tensor(np.asarray(gt_map.points)) + gt_class_np = (np.asarray(gt_map.colors)[..., 0] * 255).round() + gt_class = torch.tensor(gt_class_np, dtype=torch.int) # (N,) + + assert set(id_to_class_dict.keys()) >= set(gt_class.unique().numpy()) + + return gt_map, gt_poses, gt_xyz, gt_class + + +def get_keep_indeces(exclude_class_idx, id_to_class_dict, gt_class, gt_class_only): + ''' + Get the set of classes that are used for evaluation + + Args: + gt_class_only - Only consider the classes that exist in the current scene + ''' + all_class_index = list(id_to_class_dict.keys()) + ignore_index = np.asarray(exclude_class_idx) + + if gt_class_only: + existing_index = gt_class.unique().cpu().numpy() + non_existing_index = np.setdiff1d(all_class_index, existing_index) + ignore_index = np.append(ignore_index, non_existing_index) + print( + "Using only the classes that exists in GT of this scene: ", + len(existing_index), + ) + + ignore_index = set(ignore_index) + keep_index = set(all_class_index) - ignore_index + + print( + f"{len(keep_index)} classes remains. They are: ", + [(i, id_to_class_dict[i]) for i in keep_index], + ) + + assert sorted(list(keep_index) + list(ignore_index)) == sorted(list(id_to_class_dict.keys())) + + return keep_index, ignore_index + + +def remap_gt_index(gt_class, keep_index, ignore_index, class_to_id_dict, class_feats): + remapping = {class_to_id_dict[class_name]: i for i, class_name in enumerate(class_feats['names'])} + + keep_index_remapped = sorted([remapping[idx] for idx in keep_index]) + ignore_index_remapped = sorted([remapping[idx] for idx in ignore_index]) + + remaping_tensor = torch.tensor( + [remapping.get(idx, -1) for idx in range(max(list(class_to_id_dict.values())) + 1)] + ) + + gt_class_remapped = remaping_tensor[gt_class] + + for new_idx, name in enumerate(class_feats['names']): + old_idx = class_to_id_dict[name] + + assert remaping_tensor[old_idx] == new_idx + assert remapping[old_idx] == new_idx + + return gt_class_remapped, keep_index_remapped, ignore_index_remapped + + +def compute_knn_associations(src_xyz, dst_xyz): + knn_pred = knn_points( + src_xyz.unsqueeze(0).cuda().contiguous().float(), + dst_xyz.unsqueeze(0).cuda().contiguous().float(), + lengths1=None, + lengths2=None, + return_nn=True, + return_sorted=True, + K=1, + ) + + idx_src_to_dst = knn_pred.idx.squeeze(0).squeeze(-1) + + return idx_src_to_dst + + +def load_slam_reconstructed_gt(args, scene_id): + '''Load the SLAM reconstruction results, to ensure fair comparison''' + slam_path = os.path.join(args.replica_root, scene_id, "rgb_cloud") + + slam_pointclouds = o3d.io.read_point_cloud(os.path.join(slam_path, "pointcloud.pcd")) + slam_xyz = torch.tensor(np.asarray(slam_pointclouds.points)) + + return slam_xyz + + +def evaluate_scen( + scene_id: str, + id_to_class_dict: dict[int: str], + class_to_id_dict: dict[str: int], + class_feats: dict[str: torch.Tensor, str: list], + args: argparse.Namespace, + exclude_class_idx = [], + gt_class_only: bool = True, # only compute the conf matrix for the GT classes +): + gt_map, gt_poses, gt_xyz, gt_class = load_gt_pointcloud(args, scene_id, id_to_class_dict) + + keep_index, ignore_index = get_keep_indeces(exclude_class_idx, id_to_class_dict, gt_class, gt_class_only) + + gt_class, keep_index, ignore_index = \ + remap_gt_index(gt_class, keep_index, ignore_index, class_to_id_dict, class_feats) + + pred_xyz, pred_color, pred_class = load_pred_pointcloud(args, scene_id, class_feats, ignore_index) + + slam_xyz = load_slam_reconstructed_gt(args, scene_id) + + # To ensure fair comparison, build the prediction point cloud based on the slam results + # Search for NN of slam_xyz in pred_xyz + idx_slam_to_pred = compute_knn_associations(slam_xyz, pred_xyz).cpu() + + # Resample the pred_xyz and pred_class based on slam_nn_in_pred + pred_xyz = slam_xyz + pred_class = pred_class[idx_slam_to_pred] + pred_color = pred_color[idx_slam_to_pred] + + # debug_visualize_loaded_pointclouds(pred_class, class_feats['names'], pred_xyz, gt_xyz, gt_class, keep_index) + + # Compute the associations between the predicted and ground truth point clouds + idx_pred_to_gt = compute_knn_associations(pred_xyz, gt_xyz).cpu() + + # Only keep the points on the 3D reconstructions that are mapped to + # GT point that is in keep_index + label_gt = gt_class[idx_pred_to_gt] + pred_keep_idx = torch.isin(label_gt, torch.tensor(keep_index)) + pred_class = pred_class[pred_keep_idx] + idx_pred_to_gt = idx_pred_to_gt[pred_keep_idx] + + # Compute the confusion matrix + confmatrix = confusion_matrix( + y_true = gt_class[idx_pred_to_gt].cpu().numpy(), + y_pred = pred_class.cpu().numpy(), + labels = np.arange(len(class_feats['names'])) + ) + + confmatrix = torch.tensor(confmatrix) + + assert confmatrix.sum(0)[ignore_index].sum() == 0 + assert confmatrix.sum(1)[ignore_index].sum() == 0 + + # '''Visualization for debugging''' + # print('GT point cloud in open3d') + # class2color = get_semseg_palette(len(class_feats['names'])) + + # # GT point cloud in open3d + # gt_pcd = gt_map.open3d(0) + # gt_pcd.transform(gt_poses[0].numpy()) + # gt_pcd.colors = o3d.utility.Vector3dVector(class2color[gt_class]) + + # # predicted point cloud in open3d + # pred_pcd = o3d.geometry.PointCloud() + # pred_pcd.points = o3d.utility.Vector3dVector(pred_xyz.numpy()) + # pred_pcd.colors = o3d.utility.Vector3dVector(class2color[pred_class.numpy()]) + + # o3d.visualization.draw_geometries([pred_pcd]) + # o3d.visualization.draw_geometries([gt_pcd]) + + return confmatrix, keep_index + + +def eval_loop(args, class_feats, exclude_class, id_to_class_dict, class_to_id_dict): + conf_matrices = {} + scene_ids = list(args.scene_ids_str.split()) + + for scene_id in scene_ids: + print("Evaluating on:", scene_id) + conf_matrix, keep_index = evaluate_scen( + scene_id = scene_id, + id_to_class_dict = id_to_class_dict, + class_to_id_dict = class_to_id_dict, + class_feats = class_feats, + args = args, + exclude_class_idx = exclude_class, + ) + + conf_matrix = conf_matrix.detach().cpu() + + conf_matrices[scene_id] = { + "conf_matrix": conf_matrix, + "keep_index": keep_index, + } + + conf_matrix_all = np.sum([conf_matrix["conf_matrix"].numpy() for conf_matrix in conf_matrices.values()], axis=0) + keep_index_all = np.unique([conf_matrix["keep_index"] for conf_matrix in conf_matrices.values()]) + + conf_matrices["all"] = { + "conf_matrix": torch.tensor(conf_matrix_all), + "keep_index": torch.tensor(keep_index_all), + } + + return conf_matrices + diff --git a/adaptors/utils/metrics.py b/adaptors/utils/metrics.py new file mode 100644 index 0000000..ac7bcbb --- /dev/null +++ b/adaptors/utils/metrics.py @@ -0,0 +1,66 @@ +import torch +import numpy as np +import pandas as pd + + +def compute_metrics(confmatrix, class_names): + ''' + iou - jaccard index + ''' + if isinstance(confmatrix, torch.Tensor): + confmatrix = confmatrix.cpu().numpy() + + tp = np.diag(confmatrix) + fp = confmatrix.sum(axis=0) - tp + fn = confmatrix.sum(axis=1) - tp + + ious = tp / np.maximum(fn + fp + tp, 1e-7) + miou = ious.mean() + f_miou = (ious * (tp + fn) / confmatrix.sum()).sum() + + precision = tp / np.maximum(tp + fp, 1e-7) + recall = tp / np.maximum(tp + fn, 1e-7) + + f1score = 2 * precision * recall / np.maximum(precision + recall, 1e-7) + + mdict = { + "class_names": class_names, + "num_classes": len(class_names), + "iou": ious.tolist(), + "miou": miou.item(), + "fmiou": f_miou.item(), + "acc0.15": (ious > 0.15).sum().item(), + "acc0.25": (ious > 0.25).sum().item(), + "acc0.50": (ious > 0.50).sum().item(), + "acc0.75": (ious > 0.75).sum().item(), + "precision": precision.tolist(), + "recall": recall.tolist(), + "f1score": f1score.tolist() + } + + return mdict + + +def metrics_loop(conf_matrices, class_names): + results = [] + for scene_id, res in conf_matrices.items(): + conf_matrix = res["conf_matrix"] + keep_index = res["keep_index"] + conf_matrix = conf_matrix[keep_index, :][:, keep_index] + keep_class_names = [class_names[i] for i in keep_index] + + mdict = compute_metrics(conf_matrix, keep_class_names) + results.append( + { + "scene_id": scene_id, + "miou": mdict["miou"] * 100.0, + "mrecall": np.mean(mdict["recall"]) * 100.0, + "mprecision": np.mean(mdict["precision"]) * 100.0, + "mf1score": np.mean(mdict["f1score"]) * 100.0, + "fmiou": mdict["fmiou"] * 100.0, + } + ) + + df_result = pd.DataFrame(results) + + return df_result \ No newline at end of file diff --git a/adaptors/utils/utils.py b/adaptors/utils/utils.py new file mode 100644 index 0000000..3c07de2 --- /dev/null +++ b/adaptors/utils/utils.py @@ -0,0 +1,50 @@ +import numpy as np +import open3d as o3d +import matplotlib.pyplot as plt + +from utils.visual import get_semseg_palette + +def debug_visualize_loaded_pointclouds(pred_class, class_names, pred_xyz, gt_xyz, gt_class, keep_index): + class2color = get_semseg_palette(len(class_names)) + + n = len(keep_index) + fig, ax = plt.subplots(figsize=(8, n // 5)) # Adjust the figure size based on number of colors + + ax.set_xlim(0, 10) + ax.set_ylim(0, n) + ax.axis("off") + + # Display each color with its name + for i, idx in enumerate(list(keep_index)): + ax.add_patch(plt.Rectangle((0, i), 9, 1, color=class2color[idx])) # Color swatch + ax.text(9.2, i + 0.5, class_names[idx], va='center', fontsize=10) # Color name + + plt.show() + + # predicted point cloud in open3d + print("Before resampling") + pred_pcd = o3d.geometry.PointCloud() + pred_pcd.points = o3d.utility.Vector3dVector(pred_xyz.numpy()) + pred_pcd.colors = o3d.utility.Vector3dVector(class2color[pred_class.numpy()]) + o3d.visualization.draw_geometries([pred_pcd]) + + print(np.unique(pred_class.numpy())) + + # predicted point cloud in open3d + print("After resampling") + pred_pcd = o3d.geometry.PointCloud() + pred_pcd.points = o3d.utility.Vector3dVector(pred_xyz.numpy()) + pred_pcd.colors = o3d.utility.Vector3dVector(class2color[pred_class.numpy()]) + o3d.visualization.draw_geometries([pred_pcd]) + + # GT point cloud in open3d + print("GT pointcloud") + gt_pcd = o3d.geometry.PointCloud() + gt_pcd.points = o3d.utility.Vector3dVector(gt_xyz.numpy()) + gt_pcd.colors = o3d.utility.Vector3dVector(class2color[gt_class.numpy()]) + o3d.visualization.draw_geometries([gt_pcd]) + + print(np.unique(gt_class.numpy())) + + print("Merged pointcloud") + o3d.visualization.draw_geometries([gt_pcd, pred_pcd]) \ No newline at end of file diff --git a/adaptors/utils/visual.py b/adaptors/utils/visual.py new file mode 100644 index 0000000..6294a45 --- /dev/null +++ b/adaptors/utils/visual.py @@ -0,0 +1,21 @@ +import random + +import numpy as np + +from matplotlib.colors import hex2color, rgb_to_hsv, CSS4_COLORS + + +def get_semseg_palette(num_colors, seed=100): + semseg_colors = [] + for hex_color in CSS4_COLORS.values(): + rgb = hex2color(hex_color) + hsv = rgb_to_hsv(rgb) + + if hsv[1] > 0.3 and hsv[2] > 0.3: + semseg_colors.append(rgb) + + random.Random(seed).shuffle(semseg_colors) + + semseg_colors += semseg_colors * int(np.ceil(num_colors / len(semseg_colors)) - 1) + + return np.array(semseg_colors[:num_colors]) \ No newline at end of file From d4742538cb677f1b2b3b20e72ac0ff8781ed4ce0 Mon Sep 17 00:00:00 2001 From: Asus ROG Be2R Date: Mon, 23 Sep 2024 15:48:40 +0300 Subject: [PATCH 20/24] gitignore: pycache --- .gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 610f1df..1e092ee 100644 --- a/.gitignore +++ b/.gitignore @@ -6,4 +6,5 @@ concept-graphs/yolov8l-world.pt concept-graphs/outputs/* assets.zip .venv -.ipynb_checkpoints \ No newline at end of file +.ipynb_checkpoints +__pycache__ \ No newline at end of file From 9f0ee50a41fcfbb94cb7f698bbb5f5850f7f7f37 Mon Sep 17 00:00:00 2001 From: Asus ROG Be2R Date: Mon, 23 Sep 2024 15:52:47 +0300 Subject: [PATCH 21/24] rename directory: adaptors -> scripts, export files and coker compose update --- .../__pycache__/__init__.cpython-310.pyc | Bin 123 -> 0 bytes .../__pycache__/conceptgraph.cpython-310.pyc | Bin 1956 -> 0 bytes .../utils/__pycache__/eval.cpython-310.pyc | Bin 5566 -> 0 bytes .../utils/__pycache__/metrics.cpython-310.pyc | Bin 1976 -> 0 bytes .../utils/__pycache__/utils.cpython-310.pyc | Bin 1396 -> 0 bytes .../utils/__pycache__/visual.cpython-310.pyc | Bin 676 -> 0 bytes docker-compose.yaml | 2 +- .../export_concept_graphs_replica_cad_none.sh | 87 +++++++++--------- {adaptors => scripts}/__init__.py | 0 {adaptors => scripts}/adaptors/__init__.py | 0 .../adaptors/conceptgraph.py | 0 {adaptors => scripts}/eval_semseg.py | 0 {adaptors => scripts}/run_slam.py | 0 {adaptors => scripts}/utils/eval.py | 0 {adaptors => scripts}/utils/metrics.py | 0 {adaptors => scripts}/utils/utils.py | 0 {adaptors => scripts}/utils/visual.py | 0 17 files changed, 45 insertions(+), 44 deletions(-) delete mode 100644 adaptors/adaptors/__pycache__/__init__.cpython-310.pyc delete mode 100644 adaptors/adaptors/__pycache__/conceptgraph.cpython-310.pyc delete mode 100644 adaptors/utils/__pycache__/eval.cpython-310.pyc delete mode 100644 adaptors/utils/__pycache__/metrics.cpython-310.pyc delete mode 100644 adaptors/utils/__pycache__/utils.cpython-310.pyc delete mode 100644 adaptors/utils/__pycache__/visual.cpython-310.pyc rename {adaptors => scripts}/__init__.py (100%) rename {adaptors => scripts}/adaptors/__init__.py (100%) rename {adaptors => scripts}/adaptors/conceptgraph.py (100%) rename {adaptors => scripts}/eval_semseg.py (100%) rename {adaptors => scripts}/run_slam.py (100%) rename {adaptors => scripts}/utils/eval.py (100%) rename {adaptors => scripts}/utils/metrics.py (100%) rename {adaptors => scripts}/utils/utils.py (100%) rename {adaptors => scripts}/utils/visual.py (100%) diff --git a/adaptors/adaptors/__pycache__/__init__.cpython-310.pyc b/adaptors/adaptors/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index db16295578d617042cc6be26911f22f7be9c5a47..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 123 zcmd1j<>g`k0x`u8X(0MBh(HF6K#l_t7qb9~6oz01O-8?!3`HPe1o2BwKQSe-pd`Pj j7{Q2-&&#j^`Kg diff --git a/adaptors/adaptors/__pycache__/conceptgraph.cpython-310.pyc b/adaptors/adaptors/__pycache__/conceptgraph.cpython-310.pyc deleted file mode 100644 index 468641584a1dc2e23765de824fda76a378648351..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1956 zcmY*Z&5s*36t_K-$!s#&O|sc`w;#*&gO3yx#0d#i2n2;9m2&8+6(gb1WbDjNb}}=y zo$hYrTnHRjoDr&&960co@CR`7l~Zq=IZ&P_+og=-XTRs?pMUT9IYq10K=AF{{A)y- z2>of4*FOTvC(y-x5EN0IpFx#2rg8wk{shscjcl~AFv{Hzka%zm-llyEU2ZMV+*R(f zt?Ja`tJI?rCRh7?64vBO-1PCJ+!>4Hka2{;5FX@4%byvHJAHzJHcveMG^w`=F?x5 ztGo{($^^~Z&^yq(@(swlyTA(!*zeA_!Dkz=cLBSn+Dl*FSl(2<@vRj^0~zUDSZ<$N zx|QWkKmo6XiMeSImT#?4uVVf54`B4RyfJ=9wW&?_&J3IOg{^a8>s;8nuWZZy3OK5^ z>ZtC>g7UjE+ka3*)R%|MFFB)WEE)C7BF&{AXT_8T8)ZyG5j|rf*ceT-EHwJey)!6} zvOzNA7;19Xdsz6KiD@SNQ8CRaNbb)Z|2UP0KGe2##7T#Bq;Yc3ziMD7u(e)$7N;9dASR{qEzRE{^%Y!C@+V__31`o*o3Bj{G9ePW?xper@bi!RUMb za4G>tR(YaKf@i@8zHY=>Bt)1;6Q*lMiok;WK*V-*l8XKvZ6{eV)T9vFE+cuU?J?w5 zdz_V78b={73aMKr^C1ve16di%b>nfAP1!xp3$7heaA2n0gvp6a%~>Uf8v^iIT57u} zVS`hq@lnRKok4=R`LhZ-W*Q2{(Utz1&ZASHZE8(uCoZyrLzar1flMWv=*9?&BFv|g@>DnDA_r8K zLpL$zNq+~D%oCwq5wo0yDFvtMI@ZPEr3r)qcEc=yjGOWd%_Rukd`;7Mr0Z*4aL44$ zTbD>!;RNk-ZH+C`$;EnQ;hrfx3!F5GbB$x|R@ZrQI@676Rz>z|ZU}0e2Byf(Q~C|@ zdjRnF&G#c3m5^@n-^7H>N}2GeJRCp{dewbam{r!Q2WpNNk3rbP!7lbN`Nw|g)i2?; z<>F1eNj!Y*rDJX4uGIy7-|}z+Yz=H|E@)kR1-ry1Hnwpa_IIJ(#y!%*yWq*c1n7Q) zw_)H{jp!J0-H2NtpzO{4@Bz$&{r@o*Su`0K5mt#}qN>UYu!{LL@r6Nh?@kIj&Dh80 SDGPI;i(Ci^8iBxwXZ;JAB|`)N diff --git a/adaptors/utils/__pycache__/eval.cpython-310.pyc b/adaptors/utils/__pycache__/eval.cpython-310.pyc deleted file mode 100644 index 97fddc72c4ffdb35ea1dce8bae17a24c93b77dda..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5566 zcmaJ_%aa>N8K3TXXf%4PR@Sa}9S1}R5QStNCj|uKG6{JBNeops6&R9I#_HCt){;i< z9);bl(S>y?;ow7xBaf1f<`3Y+i9Z1@)Lc3FMpaG;#OC+)NZN)+Hk{;Qzz5+4085Mc?{x5B(+Av0|ra#Px&V@fylQ1X6Z;D+)Q;EY6CJ+iv)n z*c9im=6S)cS*^$J;q9!eEtUm0hr{+L?hO--R;?2cyJOvphwVX>sNNp>8vQsD?U9lo z#MkM^V}brBguF4{Zhz3z=~se_uA6sJMzO-sTNu5f4ej7`fk%k)>kRZZR9 zl^FwPVcx3EnDB5zg@!d<1*e}BcPlB;y72GtNijK-@@Zpc9WvC4)3up3U8mMfSO5B_ z)^Azrre0b|i(LozSiG7Q)%K0HxswAwQPJ*JvX>l`wH!plq}SORb;P{~_}z2x#x1_3 z7|MGOsKGKN-TI-BuWXU-@}b~0xdcbu&$IqLU1QG2mYTg7pb+3^b^OR zapZevd^pMqBmr!nxk;=#J6X_G@t{2%4@Uc0K}o$6jb!F_!IflQB8NIwSs~JqQqg|q zow+I=4@G8Ui_F$C$();VUuWKU*t<2xGG^IUkU5ds)|sz6awyw9k=1&l zoy0g*q;*^LI!PF8C+$(EO|KW0sUB--TA7E|AR1-6teSL{<0yd(F!Ky~86v)9YP8fs8tr)4-)}zK{23K+9J(i@GMk)O zUGDXC((DcMk+D*8m^6*Lm=zCR{Dtlfx0_}$I^n~UHO+qGI-NV&>FIp(rRKr;C9i_H z^@e(@d3{Ij)B9X%cF52cKaL7X^9&xn0kUnS)_eYhrR-xXWt&!)f5LC!zKnhu(&8E4 zdM;zVyG%Wa8TrW5sAa+BK3=I44@Ormqx~tZzF{3axh<3SO({q1-cZPn)LWPPaVP5Q zE2u0cv?`fH4fR!8vrc4_NE0NpJEO7s8r6!BljwE37e(epqmhIqx+7(5u`h=Tk;}?F z*l!|5tC+3Iy-t5DVlO}PQ^Zy|A+K;H>h~9XIObkf?rjfa1>2=oST*8;K<4fG z5V7E7A7aMhCHL}M*}^``Tutrt9njTjBIQEB9A0B({5k#}bME^t>bwjibnZXp`dra< zc1Tc*Ul|o2k1<0E8pbG?;J1C4fFtZpm;f2a?^wbQ9`+O*|M;fz11m37-s$cqwqe*;|`roX7TiZ{uBrib(+F2{2XyPifOii!i@y zWPTsBW!nUeTd$<#2E49qg2oBmq*o))V|4QzDm~K8A~s+XjtvmcQ}+AQ+G(q2z^a}F z$pT|C?O`;KCWaf=3m*(mOb+;j=2C~z+}AKuljfeY4xS+U8MUD(MhTAT?UEf1AIs|# zC#TtqlYiPk!&sF!ncLK9i@M9rnGsUN-d%>kNt=$KYDWiD7qH43c#KUozU*X6i+bdd z)XP{XUz?(a`Vo;=mwlwKX1^a(<#mvUR`SsKkjsFcKp?`J&0R)|DS|b)c^k_A$X(u~ zU7iEEM~Q-5iMXMhWxkTg& zjh1rpcV(35@XV2Iw(|>?B3wIe;z!*phJkE72Ls{i(p4;`W`&{r4HRiFb4dPSA?^!m znrm%wa{xuEl$(Fm%v&lFUAA$z=h!-5hxtE=*y}qD?z0-Nvw*9ggQG)wwD^%eQS3D+ zkw#&5h`Ydtz>0{lHXdgN0X*CXoj!}f5PRp!;)*=Fp`T-pUir_PGwp#~h12QqUyV-dj?W8l8+ zaXi-QEI1UsNLH9LwVbe^RmVV_g3vkE@eKqAz7DIn**1;CcrkiRca3x1Aj2IevP}~J zui{S61Zw>&?4j2|IBVcf?lu2=pVb40t9LMT;+Dz9l5(3{CM7z7ya|i*6UZC4YLK^0 z$lG9Ud$(k8wUZJKwhw;z7OsZ`R%5X1PBr}?hukdQ;++Jex{kkP2kQk07p0K zDWHbIC{jHg4;3Bfpn8;H5ORjXm(Z;!3Mk6V!tw$b6CPHC?Np7(Dv_lH9gC~vP+P8g zg2ulN5*Bqo8suQKmRFZa4Z&C%1OPpK_2&$*G^58}4cX3i(!;8nVpN^4L#c7yB#!H5 zw>%8f1*^A-m}7h&wGO$A#g8OM3m9oU4fjByCWVe4+V47(!o;UglThZtQo@f8Vnbbnw4CzPK6Kx;CxPj;&0ISzgU_B+Ce_JG0x6}|A)nL|H?5_WLvE1jYFe9A(v@@- zA#5eBnz9OxN?M$A)UiVyqi0OUMYNcb-1($|c?~nKZpu24tKp@UrzKeE%A%H(rLaN|*?8%y0YYkSpmaa`#klC@cylG8WX2_orMsFJwOC!khPK-(FQO z;p$p;=IytkZ<0z%Rx__3-H?54j6JvYnt}NSWEjVz$k`Za84FR*QwALRj=@)4liCcYx$Tx|6i^xeg9{MJ`ro1OGHIsrZ>J-h@ zFEJ}bY6bT|8VdhFjJd-wQ_!=Wz=P2{Q>|ip^I#zMLUK#pJf%Oa+Lbxtmu&*&8XM{7 zKpMq5uiE~Dh8^&#;~;JFS=IHOI!d2$yUGIKHt&nTJIBv4|BQMclODouOIQ9EjP@*~ zJ8bNO2YvpAI!I$dI>9wZv(oGh>*D&pEeTp9pZhTcs(JLsI!xspUH^YH1-ye zCXuCIM?5edlzeGeIg#`ndD^UUBnBj5aQO326FL_l-K~*MxN^j}*T#Xv>M`(y_Ni9{ zVq0AB->`-z0Pe6V^LelgxOvzOJ}(!!%~|>Wd3S^Pu$|z(d7lesz;p%ALe9C!2Y{etJg}85EN4TPI_GP4oTLU1@UPl`*6|OD5ifrM< zI&ze3t7x=iyj)l^A4;|iRTTbD`$ED9#uvh?K}wFv93Dq!=3O_RigGAgyh>@nDCz+4 z+=%185utgrW4E=`ip;+}h{d=suQ05nDZTS8_zwU;pOumM(S!S0^P6~TyvCXH8Sd~w Y!1?{a=3g`%{y)d%|K)|?v+_^=4IFW}%x)Lrkw5$Qt-pu;JvM8#d<5gk z8-EUOdI99;3d^DYs!|9N$Mhb`BEgUT8Ed zMiVwInD4`sKVyUnq)>rR$Qcn$g=W?q&FCCWQDM)RLUTMN3Y|C{bMiG-WD`xT5?f15 z6@jy^qRQGpifkZG=j1w?+RC1JpouA45k)sp2Q1B;Il5lbsZ+}lBUj6~OCFrBc^Tl5_{!x#Z_6k9>Avbpz=8mlW@AGnPR+$L~a7r1TUUb?_t zQ|^ks%gS3p>&jmsg^Py5XBb|hIY)}kT0otWF8!)e?N5zrKWbFlFOgen@=EF(%C9_a zP8+HL@$O7s_Pa3RY}|djdlH0UAb2-Ud34$>e-IFF)Wezvr?EUY;P}Jue|YlOuam=b zlOz)xMEAQBmQ<_%UKk$kY~9-1?;gKXj-T#KYUZwCf((pbmdw4KCG)o>^Y)9Af83cg zauJ2GjI$IBxv&pLBiMPvTQbZQhui;JN;!WHk9ejhdNgH3Szcw}>u^6tt z3b$*-{-&42YoO>qy~Oxw+_YfIAAuYr(=BFbj*ifSk3J<+Yf2U6_?qbk(4$~A-GD*Y zLSK*};A*Fs*-z~+(GYWs)5or2N9YVsodRk{IcH=FonZ!@Lb==qDF(g3no&T`HRw?* z*!>^&78uU@l_YOGWpn5jk678`2=q8lc~C#idysh$szDVi?LUm7d{EX;Xo_VqOsP!1>Hv~6-$ma*0>$nz-WV%;dc3Ph8fYwsv1f)51LX3y8H7w;8DvdV5p zL~=YTr05wD3y`>JG`tR^zgE8KN_~r0%qfo#2bEa$I&T_*@`;+OZ}Bd$rO`tnm0kF5 z;B`oBiV5+7V?H0#=jB5| diff --git a/adaptors/utils/__pycache__/utils.cpython-310.pyc b/adaptors/utils/__pycache__/utils.cpython-310.pyc deleted file mode 100644 index a826fb0b37005f7aa409dc072a74deca3136d9a9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1396 zcmb_c&5k2A5Vqa_o$kye47)RnghnH+L=NmqJ8(e=78Z#UD+Gu`1Zwrf?%p2UKWn>p zlc;kVuJaDbk+X%>fr zz~U)P`4I?;DCTI2drhWRukF&Ay4dUy?v?)32iv0duV@-j{5_gDTR4fHPp}^6OeK<) zlI2O0a;6lUxn^@ehM)tO&tS^mF+vI{+~FJeRxVf$Xit%Ai{j3strp|_71hzx%&fD8AbvGCz!z6;)a>-$}ltCB4_l#HF+RRt80t1N?Q$A+L!HB!)qnMg1(*eqTE>v zW7+AvkUC&hTQZR2ne;fq&ux0xU5-dw@*aEgP50+I(}8iHs!@KRYhIt zBP!C@$sW02Qj@CDZd(;ETQ(W$un~+VGoDJR2fa$Fw1hCf-hR1@UYLd=1F0@t9`E&==%;?krf&LANC-AoaoIXxz+CWC~u}N_+ zA3;hNlwGxX@+J|M@H(YH^S_cw_&Fi@35W>${$Z0)oP!b%L1?#bSanIoduA~zQ)LRyi!0z{EO_c(izC>dMm_0AJCc33^niVC b2jbFT`Ja|GZ8`hQbhR{{?js*ZhBo;Z=BA1k diff --git a/adaptors/utils/__pycache__/visual.cpython-310.pyc b/adaptors/utils/__pycache__/visual.cpython-310.pyc deleted file mode 100644 index b850353f3b213df9c8b169f401d27dead3d36703..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 676 zcmYjPOKVd>6rR_e+@wv>5;UQ1beByFRd6F(h)Zdq*AKZTs$<)mhgZ7@c++(tEbc!TSPBcbDnBR00py5*2PaAb{7&;|?KMQhwaFT;(^ z`q0619r{9gFb5A~ms=e8Jm1-mH`_qF-_{Jk^F^vEX>{o6Afe}JS;|~^uG3r;ndjGg zY?xU!nJDS$Jhz_YrDtO~SDq@F`zX~qUG{m>%ByU&)6RHiq!dZp$jr#;zh)43pL+`8 zdJ58hL;4Y1{kwOTinO$aHfNQcD|5D(n<`bO<e0v@HlLZ VUuA`;lzbgjHGxHpV$vZ;^bZ%Pq|^Wa diff --git a/docker-compose.yaml b/docker-compose.yaml index 2aec19f..0677080 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -21,7 +21,7 @@ services: - $ASSETS_DIR:/assets - $DATA_DIR:/data/ - $ROOT_DIR/export:/export/ - - $ROOT_DIR/adaptors:/adaptors/ + - $ROOT_DIR/scripts:/scripts/ runtime: nvidia privileged: true command: bash diff --git a/export/export_concept_graphs_replica_cad_none.sh b/export/export_concept_graphs_replica_cad_none.sh index 371e722..cef927f 100644 --- a/export/export_concept_graphs_replica_cad_none.sh +++ b/export/export_concept_graphs_replica_cad_none.sh @@ -17,25 +17,25 @@ export SEMANTIC_DATASET_CONFIG_PATH=${CG_FOLDER}/dataset/dataconfigs/replica/rep # export SCENE_NAMES=(office0 office1 office2 office3 office4 room0 room1) # room2 export SCENE_NAMES=( v3_sc0_staging_00 -v3_sc0_staging_12 -v3_sc0_staging_16 -v3_sc0_staging_19 -v3_sc0_staging_20 -v3_sc1_staging_00 -v3_sc1_staging_06 -v3_sc1_staging_12 -v3_sc1_staging_19 -v3_sc1_staging_20 -v3_sc2_staging_00 -v3_sc2_staging_11 -v3_sc2_staging_13 -v3_sc2_staging_19 -v3_sc2_staging_20 -v3_sc3_staging_03 -v3_sc3_staging_04 -v3_sc3_staging_08 -v3_sc3_staging_15 -v3_sc3_staging_20 +# v3_sc0_staging_12 +# v3_sc0_staging_16 +# v3_sc0_staging_19 +# v3_sc0_staging_20 +# v3_sc1_staging_00 +# v3_sc1_staging_06 +# v3_sc1_staging_12 +# v3_sc1_staging_19 +# v3_sc1_staging_20 +# v3_sc2_staging_00 +# v3_sc2_staging_11 +# v3_sc2_staging_13 +# v3_sc2_staging_19 +# v3_sc2_staging_20 +# v3_sc3_staging_03 +# v3_sc3_staging_04 +# v3_sc3_staging_08 +# v3_sc3_staging_15 +# v3_sc3_staging_20 ) # v3_sc0_staging_00 # v3_sc0_staging_12 @@ -58,7 +58,7 @@ v3_sc3_staging_20 # v3_sc3_staging_15 # v3_sc3_staging_20 # ) -export SCENE_LABELS=(baseline camera_lights dynamic_lights no_lights velocity) # baseline camera_light dynamic_lights FOV no_lights resolution +export SCENE_LABELS=(dynamic_lights) # baseline camera_light dynamic_lights FOV no_lights resolution # export SCENE_NAME=room1 export CLASS_SET=none export THRESHOLD=1.2 @@ -143,12 +143,12 @@ do # --image_width 640 \ # --stride 5 - python /adaptors/run_slam.py \ - --dataset_root "${DATASET_ROOT}" \ - --dataset_config "/tmp/config/data_config.yaml" \ - --scene_id "${SCENE_LABEL}/${SCENE_NAME}" \ - --stride 5 \ - --downsample_rate 10 + # python /scripts/run_slam.py \ + # --dataset_root "${DATASET_ROOT}" \ + # --dataset_config "/tmp/config/data_config.yaml" \ + # --scene_id "${SCENE_LABEL}/${SCENE_NAME}" \ + # --stride 5 \ + # --downsample_rate 10 done @@ -170,13 +170,13 @@ do # --stride 10 \ # --load_semseg - python /adaptors/run_slam.py \ - --dataset_root "${DATASET_ROOT}/baseline/" \ - --dataset_config "/tmp/config/data_semantic_config.yaml" \ - --scene_id ${SCENE_NAME} \ - --stride 5 \ - --downsample_rate 10 \ - --load_semseg + # python /scripts/run_slam.py \ + # --dataset_root "${DATASET_ROOT}/baseline/" \ + # --dataset_config "/tmp/config/data_semantic_config.yaml" \ + # --scene_id ${SCENE_NAME} \ + # --stride 5 \ + # --downsample_rate 10 \ + # --load_semseg done @@ -186,7 +186,7 @@ do ###### # Then run the following commands to evaluate the semantic segmentation results. ###### - export SCENE_NAMES_STR=$(IFS=' '; echo "${SCENE_NAME[*]}") + export SCENE_NAMES_STR=$(IFS=' '; echo "${SCENE_NAMES[*]}") # echo $SCENE_NAMES_STR @@ -212,14 +212,15 @@ do # # --device "cpu" - # python /adaptors/eval_semseg.py \ - # --replica_root "${DATASET_ROOT}/${SCENE_LABEL}" \ - # --replica_semantic_root "${DATASET_ROOT}/baseline" \ - # --n_exclude 6 \ - # --label "${SCENE_LABEL}" \ - # --scene_ids_str "${SCENE_NAMES_STR}" \ - # --pred_exp_name "none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub" # \ - # # --semseg_classes "${DATASET_ROOT}/baseline/embed_semseg_classes.json" \ - # # --device "cpu" + python /scripts/eval_semseg.py \ + --replica_root "${DATASET_ROOT}/${SCENE_LABEL}" \ + --replica_semantic_root "${DATASET_ROOT}/baseline" \ + --n_exclude 6 \ + --label "${SCENE_LABEL}" \ + --scene_ids_str "${SCENE_NAMES_STR}" \ + --pred_exp_name "none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub" \ + --device "cpu" + # --semseg_classes "${DATASET_ROOT}/baseline/embed_semseg_classes.json" \ + # --device "cpu" done \ No newline at end of file diff --git a/adaptors/__init__.py b/scripts/__init__.py similarity index 100% rename from adaptors/__init__.py rename to scripts/__init__.py diff --git a/adaptors/adaptors/__init__.py b/scripts/adaptors/__init__.py similarity index 100% rename from adaptors/adaptors/__init__.py rename to scripts/adaptors/__init__.py diff --git a/adaptors/adaptors/conceptgraph.py b/scripts/adaptors/conceptgraph.py similarity index 100% rename from adaptors/adaptors/conceptgraph.py rename to scripts/adaptors/conceptgraph.py diff --git a/adaptors/eval_semseg.py b/scripts/eval_semseg.py similarity index 100% rename from adaptors/eval_semseg.py rename to scripts/eval_semseg.py diff --git a/adaptors/run_slam.py b/scripts/run_slam.py similarity index 100% rename from adaptors/run_slam.py rename to scripts/run_slam.py diff --git a/adaptors/utils/eval.py b/scripts/utils/eval.py similarity index 100% rename from adaptors/utils/eval.py rename to scripts/utils/eval.py diff --git a/adaptors/utils/metrics.py b/scripts/utils/metrics.py similarity index 100% rename from adaptors/utils/metrics.py rename to scripts/utils/metrics.py diff --git a/adaptors/utils/utils.py b/scripts/utils/utils.py similarity index 100% rename from adaptors/utils/utils.py rename to scripts/utils/utils.py diff --git a/adaptors/utils/visual.py b/scripts/utils/visual.py similarity index 100% rename from adaptors/utils/visual.py rename to scripts/utils/visual.py From 8636e1924098ae66cd7e8c7c556549c9aa19f0c1 Mon Sep 17 00:00:00 2001 From: Asus ROG Be2R Date: Mon, 23 Sep 2024 17:22:36 +0300 Subject: [PATCH 22/24] addinn results directory --- docker-compose.yaml | 1 + export/export_concept_graphs_replica_cad_none.sh | 2 +- scripts/eval_semseg.py | 16 ++++++++++++---- 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index 0677080..ad57810 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -22,6 +22,7 @@ services: - $DATA_DIR:/data/ - $ROOT_DIR/export:/export/ - $ROOT_DIR/scripts:/scripts/ + - $ROOT_DIR/results:/results/ runtime: nvidia privileged: true command: bash diff --git a/export/export_concept_graphs_replica_cad_none.sh b/export/export_concept_graphs_replica_cad_none.sh index cef927f..d56d8a2 100644 --- a/export/export_concept_graphs_replica_cad_none.sh +++ b/export/export_concept_graphs_replica_cad_none.sh @@ -219,7 +219,7 @@ do --label "${SCENE_LABEL}" \ --scene_ids_str "${SCENE_NAMES_STR}" \ --pred_exp_name "none_overlap_maskconf0.95_simsum${THRESHOLD}_dbscan.1_merge20_masksub" \ - --device "cpu" + --results_path "/results/conceptgraphs/" # --semseg_classes "${DATASET_ROOT}/baseline/embed_semseg_classes.json" \ # --device "cpu" diff --git a/scripts/eval_semseg.py b/scripts/eval_semseg.py index 4a32a85..5c2caf4 100644 --- a/scripts/eval_semseg.py +++ b/scripts/eval_semseg.py @@ -36,6 +36,11 @@ def get_parser(): type=str, default="replica" ) + parser.add_argument( + "--results_path", + type=Path, + default=Path("/results/").expanduser() + ) parser.add_argument( "--n_exclude", type=int, default=1, choices=[1, 4, 6], help='''Number of classes to exclude: @@ -114,15 +119,18 @@ def compute_clip_embeddings(args, class_names): def save_results(args, conf_matrices, df_result): - save_path = "./results/%s/%s_ex%d_results.csv" % ( - args.pred_exp_name, args.label, args.n_exclude + save_path = os.path.join( + args.results_path, + f"{args.pred_exp_name}/{args.label}_ex{args.n_exclude}_results.csv" ) + os.makedirs(os.path.dirname(save_path), exist_ok=True) df_result.to_csv(save_path, index=False) # Also save the conf_matrices - save_path = "./results/%s/%s_ex%d_conf_matrices.pkl" % ( - args.pred_exp_name, args.label, args.n_exclude + save_path = os.path.join( + args.results_path, + f"{args.pred_exp_name}/{args.label}_ex{args.n_exclude}_conf_matrices.pkl" ) pickle.dump(conf_matrices, open(save_path, "wb")) From dd7cf1d6365d5d831778dc92af03507fe6b1578a Mon Sep 17 00:00:00 2001 From: Asus ROG Be2R Date: Mon, 23 Sep 2024 21:12:43 +0300 Subject: [PATCH 23/24] useful notebooks --- notebooks/confmat_analysis.ipynb | 102 ++++++++++++++ notebooks/miou_graphs.ipynb | 177 +++++++++++++++++++++++++ notebooks/segmentation_gt_visual.ipynb | 159 ++++++++++++++++++++++ 3 files changed, 438 insertions(+) create mode 100644 notebooks/confmat_analysis.ipynb create mode 100644 notebooks/miou_graphs.ipynb create mode 100644 notebooks/segmentation_gt_visual.ipynb diff --git a/notebooks/confmat_analysis.ipynb b/notebooks/confmat_analysis.ipynb new file mode 100644 index 0000000..31efc94 --- /dev/null +++ b/notebooks/confmat_analysis.ipynb @@ -0,0 +1,102 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import pickle\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "from sklearn.metrics import ConfusionMatrixDisplay\n", + "\n", + "%matplotlib inline" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "file_path = \"../concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_conf_matrices.pkl\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "file = open(file_path,'rb')\n", + "matrices = pickle.load(file)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "matrices" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "indx = matrices['v3_sc0_staging_00']['keep_index']\n", + "disp = ConfusionMatrixDisplay(np.log10(matrices['v3_sc0_staging_00']['conf_matrix'][indx][:, indx].numpy()), display_labels=indx)\n", + "\n", + "disp.plot()\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "indx = matrices['all']['keep_index'].numpy()\n", + "disp = ConfusionMatrixDisplay(np.log10(matrices['all']['conf_matrix'][indx][:, indx].numpy()), display_labels=indx)\n", + "\n", + "disp.plot()\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "confmatrix = matrices['v3_sc0_staging_00']['conf_matrix'][indx][:, indx].numpy()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.13" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/notebooks/miou_graphs.ipynb b/notebooks/miou_graphs.ipynb new file mode 100644 index 0000000..969ee9f --- /dev/null +++ b/notebooks/miou_graphs.ipynb @@ -0,0 +1,177 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import pandas as pd\n", + "import matplotlib.pyplot as plt\n", + "import seaborn as sns\n", + "import numpy as np\n", + "import os" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "os_csv_files = {\n", + " \"Baseline\": \"baseline.csv\",\n", + " \"Velocity\": \"velocity.csv\",\n", + " \"Camera Lights\": \"camera_lights.csv\",\n", + " \"Dynamic Lights\": \"dynamic_lights.csv\",\n", + " \"No Lights\": \"no_lights.csv\"\n", + "}\n", + "\n", + "cg_csv_files = {\n", + " \"Baseline\": \"baseline_ex6_results.csv\",\n", + " \"Velocity\": \"velocity_ex6_results.csv\",\n", + " \"Camera Lights\": \"camera_lights_ex6_results.csv\",\n", + " \"Dynamic Lights\": \"dynamic_lights_ex6_results.csv\",\n", + " \"No Lights\": \"no_lights_ex6_results.csv\"\n", + "}\n", + "\n", + "metric = 'miou'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def get_scene_data(csv_files, metric, results_path):\n", + " scene_data = {}\n", + "\n", + " for config, filename in csv_files.items():\n", + " # Read the current CSV file\n", + " df = pd.read_csv(os.path.join(results_path, filename))\n", + "\n", + " df_scenes = df[df['scene_id'] != 'all']\n", + "\n", + " for index, row in df_scenes.iterrows():\n", + " scene_id = row['scene_id']\n", + "\n", + " if scene_id not in scene_data:\n", + " scene_data[scene_id] = []\n", + "\n", + " scene_data[scene_id].append(row[metric])\n", + "\n", + " all_metric_value = df_scenes[metric].mean()\n", + "\n", + " if 'all' not in scene_data:\n", + " scene_data['all'] = []\n", + "\n", + " scene_data['all'].append(all_metric_value)\n", + " \n", + " return scene_data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "os_results_path = ''\n", + "cg_results_path = '../results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/'\n", + "\n", + "os_scene_data = get_scene_data(os_csv_files, metric, os_results_path)\n", + "cg_scene_data = get_scene_data(cg_csv_files, metric, cg_results_path)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "output_dir_miou = \"miou_graphs\"\n", + "os.makedirs(output_dir_miou, exist_ok=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def draw_plots(cg_scene_data, cg_csv_files, os_scene_data, os_csv_files, metric, save=True):\n", + " for (cg_scene_id, cg_values), (os_scene_id, os_values) in zip(cg_scene_data.items(), os_scene_data.items()):\n", + " data = {\n", + " 'Scene Configuration': list(cg_csv_files.keys()) + list(os_csv_files.keys()),\n", + " 'Metric Value': list(np.array(cg_values) / 100) + os_values,\n", + " 'Testing Approach': ['Concept Graphs'] * len(cg_values) + ['Open Scene'] * len(os_values)\n", + " }\n", + " \n", + " assert cg_scene_id == os_scene_id\n", + " \n", + " df = pd.DataFrame(data)\n", + "\n", + " # Create the bar chart using seaborn\n", + " plt.figure(figsize=(10, 6))\n", + " # plt.title(f'{metric.upper()} Comparison for {os_scene_id} (CG)', fontsize=16)\n", + " sns.barplot(x='Scene Configuration', y='Metric Value', hue='Testing Approach', data=df, palette=['skyblue', 'salmon'])\n", + "\n", + " # Adding titles and labels\n", + " # plt.title('Metric Dependence on Scene Configurations by Testing Approach', fontsize=16)\n", + " plt.xlabel('Scene Configurations')\n", + " plt.ylabel(metric)\n", + "\n", + " # Show the legend\n", + " plt.legend(title='Approach')\n", + "\n", + " if save:\n", + " plt.savefig(os.path.join(output_dir_miou, f'{cg_scene_id}_{metric}_comparison.png'))\n", + "\n", + " plt.close()\n", + " else:\n", + " plt.tight_layout()\n", + " plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "sns.set_context(\"notebook\", font_scale=1.2)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "draw_plots(cg_scene_data, cg_csv_files, os_scene_data, os_csv_files, metric='mIoU')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.13" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/notebooks/segmentation_gt_visual.ipynb b/notebooks/segmentation_gt_visual.ipynb new file mode 100644 index 0000000..0e3d6bd --- /dev/null +++ b/notebooks/segmentation_gt_visual.ipynb @@ -0,0 +1,159 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "scene_path = \"../../data/Datasets/generated/replica_cad/baseline/v3_sc3_staging_20/\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "import os\n", + "import sys\n", + "\n", + "import imageio\n", + "import numpy as np\n", + "import matplotlib.patches as mpatches\n", + "import matplotlib.pyplot as plt\n", + "\n", + "\n", + "sys.path.append('./')\n", + "\n", + "from scripts.utils.visual import get_semseg_palette" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "palette = get_semseg_palette(100, seed=2001)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "test_img = np.arange(100).reshape((10, 10))\n", + "test_img" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "scale = 30\n", + "test_img = np.repeat(np.repeat(test_img, scale, axis=1), scale, axis=0)\n", + "plt.imshow(palette[test_img])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Open the JSON file\n", + "with open(os.path.join(scene_path, 'embed_semseg_classes.json'), 'r') as file:\n", + " semseg_classes = json.load(file)['classes']\n", + " \n", + " id_to_name_dict = {pair['id']: pair['name'] for pair in semseg_classes}\n", + "\n", + "print(id_to_name_dict)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "image_array = imageio.imread(os.path.join(scene_path, 'results/semantic001028.png')).astype(np.int16)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "exists_classes = list(np.unique(image_array))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "unique_values, inverse_indices = np.unique(image_array, return_inverse=True)\n", + "id_matrix = inverse_indices.reshape(image_array.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "exists_classes" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "color_image = palette[image_array]\n", + "\n", + "plt.imshow(color_image)\n", + "patches = [mpatches.Patch(color=palette[idx], label=id_to_name_dict[idx]) for idx in exists_classes]\n", + "plt.legend(handles=patches, bbox_to_anchor=(1.05, 1), loc='upper left')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# imageio.imwrite('output_image.png', color_image)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.13" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From 3f1596952ad3bbe7de6ce3ded7b1660b1902957e Mon Sep 17 00:00:00 2001 From: Asus ROG Be2R Date: Mon, 23 Sep 2024 21:19:08 +0300 Subject: [PATCH 24/24] moving existing results into new directory --- .../baseline_ex6_conf_matrices.pkl | Bin .../baseline_ex6_results.csv | 0 .../camera_lights_ex6_conf_matrices.pkl | Bin .../camera_lights_ex6_results.csv | 0 .../dynamic_lights_ex6_conf_matrices.pkl | Bin .../dynamic_lights_ex6_results.csv | 0 .../no_lights_ex6_conf_matrices.pkl | Bin .../no_lights_ex6_results.csv | 0 .../velocity_ex6_conf_matrices.pkl | Bin .../velocity_ex6_results.csv | 0 10 files changed, 0 insertions(+), 0 deletions(-) rename {concept-graphs/results => results/conceptgraphs}/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_conf_matrices.pkl (100%) mode change 100755 => 100644 rename {concept-graphs/results => results/conceptgraphs}/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_results.csv (100%) mode change 100755 => 100644 rename {concept-graphs/results => results/conceptgraphs}/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/camera_lights_ex6_conf_matrices.pkl (100%) mode change 100755 => 100644 rename {concept-graphs/results => results/conceptgraphs}/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/camera_lights_ex6_results.csv (100%) mode change 100755 => 100644 rename {concept-graphs/results => results/conceptgraphs}/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/dynamic_lights_ex6_conf_matrices.pkl (100%) rename {concept-graphs/results => results/conceptgraphs}/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/dynamic_lights_ex6_results.csv (100%) rename {concept-graphs/results => results/conceptgraphs}/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/no_lights_ex6_conf_matrices.pkl (100%) rename {concept-graphs/results => results/conceptgraphs}/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/no_lights_ex6_results.csv (100%) rename {concept-graphs/results => results/conceptgraphs}/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/velocity_ex6_conf_matrices.pkl (100%) rename {concept-graphs/results => results/conceptgraphs}/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/velocity_ex6_results.csv (100%) diff --git a/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_conf_matrices.pkl b/results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_conf_matrices.pkl old mode 100755 new mode 100644 similarity index 100% rename from concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_conf_matrices.pkl rename to results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_conf_matrices.pkl diff --git a/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_results.csv b/results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_results.csv old mode 100755 new mode 100644 similarity index 100% rename from concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_results.csv rename to results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/baseline_ex6_results.csv diff --git a/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/camera_lights_ex6_conf_matrices.pkl b/results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/camera_lights_ex6_conf_matrices.pkl old mode 100755 new mode 100644 similarity index 100% rename from concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/camera_lights_ex6_conf_matrices.pkl rename to results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/camera_lights_ex6_conf_matrices.pkl diff --git a/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/camera_lights_ex6_results.csv b/results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/camera_lights_ex6_results.csv old mode 100755 new mode 100644 similarity index 100% rename from concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/camera_lights_ex6_results.csv rename to results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/camera_lights_ex6_results.csv diff --git a/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/dynamic_lights_ex6_conf_matrices.pkl b/results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/dynamic_lights_ex6_conf_matrices.pkl similarity index 100% rename from concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/dynamic_lights_ex6_conf_matrices.pkl rename to results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/dynamic_lights_ex6_conf_matrices.pkl diff --git a/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/dynamic_lights_ex6_results.csv b/results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/dynamic_lights_ex6_results.csv similarity index 100% rename from concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/dynamic_lights_ex6_results.csv rename to results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/dynamic_lights_ex6_results.csv diff --git a/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/no_lights_ex6_conf_matrices.pkl b/results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/no_lights_ex6_conf_matrices.pkl similarity index 100% rename from concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/no_lights_ex6_conf_matrices.pkl rename to results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/no_lights_ex6_conf_matrices.pkl diff --git a/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/no_lights_ex6_results.csv b/results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/no_lights_ex6_results.csv similarity index 100% rename from concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/no_lights_ex6_results.csv rename to results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/no_lights_ex6_results.csv diff --git a/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/velocity_ex6_conf_matrices.pkl b/results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/velocity_ex6_conf_matrices.pkl similarity index 100% rename from concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/velocity_ex6_conf_matrices.pkl rename to results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/velocity_ex6_conf_matrices.pkl diff --git a/concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/velocity_ex6_results.csv b/results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/velocity_ex6_results.csv similarity index 100% rename from concept-graphs/results/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/velocity_ex6_results.csv rename to results/conceptgraphs/none_overlap_maskconf0.95_simsum1.2_dbscan.1_merge20_masksub/velocity_ex6_results.csv