Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Export directory prefix #16

Merged
merged 11 commits into from
Aug 6, 2024
17 changes: 6 additions & 11 deletions drishti/handlers/handle_darshan.py
Original file line number Diff line number Diff line change
Expand Up @@ -752,15 +752,10 @@ def handler():
display_thresholds(console)
display_footer(console, insights_start_time, insights_end_time)

filename = '{}.html'.format(args.log_path)
export_html(console, filename)

filename = '{}.svg'.format(args.log_path)
export_svg(console, filename)

filename = '{}-summary.csv'.format(
args.log_path.replace('.darshan', '')
)

export_csv(filename, job['job']['jobid'])
# Export to HTML, SVG, and CSV
trace_name = os.path.basename(args.log_path).replace('.darshan', '')
out_dir = args.export_dir if args.export_dir != "" else os.getcwd()

export_html(console, out_dir, trace_name)
export_svg(console, out_dir, trace_name)
export_csv(out_dir, trace_name, job['job']['jobid'])
27 changes: 9 additions & 18 deletions drishti/handlers/handle_recorder.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@

import os
import time

import pandas as pd
from recorder_utils import RecorderReader
from recorder_utils.build_offset_intervals import build_offset_intervals

from drishti.includes.module import *


Expand Down Expand Up @@ -577,23 +579,12 @@ def process_helper(file_map, df_intervals, df_posix_records, fid=None):
display_thresholds(console)
display_footer(console, insights_start_time, insights_end_time)

# Export to HTML, SVG, and CSV
trace_name = os.path.basename(os.path.dirname(args.log_path))
if args.split_files:
filename = '{}.{}.html'.format(args.log_path, fid)
else:
filename = '{}.html'.format(args.log_path)

export_html(console, filename)

if args.split_files:
filename = '{}.{}.svg'.format(args.log_path, fid)
else:
filename = '{}.svg'.format(args.log_path)

export_svg(console, filename)

if args.split_files:
filename = '{}.{}.summary.csv'.format(args.log_path, fid)
else:
filename = '{}-summary.csv'.format(args.log_path)
export_csv(filename)
trace_name = f"{trace_name}.{fid}"
out_dir = args.export_dir if args.export_dir != "" else os.getcwd()

export_html(console, out_dir, trace_name)
export_svg(console, out_dir, trace_name)
export_csv(out_dir, trace_name)
142 changes: 78 additions & 64 deletions drishti/includes/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -1823,76 +1823,90 @@ def display_footer(console, insights_start_time, insights_end_time):
)
)

def export_html(console, filename):
'''
'''

if args.export_html:
console.save_html(
filename,
theme=set_export_theme(),
clear=False
)
def export_html(console, export_dir, trace_name):
if not args.export_html:
return

os.makedirs(export_dir, exist_ok=True) # Ensure export directory exists
filepath = os.path.join(export_dir, f"{trace_name}.html")

def export_svg(console, filename):
if args.export_svg:
console.save_svg(
filename,
title='Drishti',
theme=set_export_theme(),
clear=False
)
console.save_html(
filepath,
theme=set_export_theme(),
clear=False
)


def export_csv(filename, jobid=None):
if args.export_csv:
issues = [
'JOB',
INSIGHTS_STDIO_HIGH_USAGE,
INSIGHTS_POSIX_WRITE_COUNT_INTENSIVE,
INSIGHTS_POSIX_READ_COUNT_INTENSIVE,
INSIGHTS_POSIX_WRITE_SIZE_INTENSIVE,
INSIGHTS_POSIX_READ_SIZE_INTENSIVE,
INSIGHTS_POSIX_HIGH_SMALL_READ_REQUESTS_USAGE,
INSIGHTS_POSIX_HIGH_SMALL_WRITE_REQUESTS_USAGE,
INSIGHTS_POSIX_HIGH_MISALIGNED_MEMORY_USAGE,
INSIGHTS_POSIX_HIGH_MISALIGNED_FILE_USAGE,
INSIGHTS_POSIX_REDUNDANT_READ_USAGE,
INSIGHTS_POSIX_REDUNDANT_WRITE_USAGE,
INSIGHTS_POSIX_HIGH_RANDOM_READ_USAGE,
INSIGHTS_POSIX_HIGH_SEQUENTIAL_READ_USAGE,
INSIGHTS_POSIX_HIGH_RANDOM_WRITE_USAGE,
INSIGHTS_POSIX_HIGH_SEQUENTIAL_WRITE_USAGE,
INSIGHTS_POSIX_HIGH_SMALL_READ_REQUESTS_SHARED_FILE_USAGE,
INSIGHTS_POSIX_HIGH_SMALL_WRITE_REQUESTS_SHARED_FILE_USAGE,
INSIGHTS_POSIX_HIGH_METADATA_TIME,
INSIGHTS_POSIX_SIZE_IMBALANCE,
INSIGHTS_POSIX_TIME_IMBALANCE,
INSIGHTS_POSIX_INDIVIDUAL_WRITE_SIZE_IMBALANCE,
INSIGHTS_POSIX_INDIVIDUAL_READ_SIZE_IMBALANCE,
INSIGHTS_MPI_IO_NO_USAGE,
INSIGHTS_MPI_IO_NO_COLLECTIVE_READ_USAGE,
INSIGHTS_MPI_IO_NO_COLLECTIVE_WRITE_USAGE,
INSIGHTS_MPI_IO_COLLECTIVE_READ_USAGE,
INSIGHTS_MPI_IO_COLLECTIVE_WRITE_USAGE,
INSIGHTS_MPI_IO_BLOCKING_READ_USAGE,
INSIGHTS_MPI_IO_BLOCKING_WRITE_USAGE,
INSIGHTS_MPI_IO_AGGREGATORS_INTRA,
INSIGHTS_MPI_IO_AGGREGATORS_INTER,
INSIGHTS_MPI_IO_AGGREGATORS_OK
]
if codes:
issues.extend(codes)
def export_svg(console, export_dir, trace_name):
if not args.export_svg:
return

os.makedirs(export_dir, exist_ok=True) # Ensure export directory exists
filepath = os.path.join(export_dir, f"{trace_name}.svg")

console.save_svg(
filepath,
title='Drishti',
theme=set_export_theme(),
clear=False
)

detected_issues = dict.fromkeys(issues, False)
detected_issues['JOB'] = jobid

for report in csv_report:
detected_issues[report] = True
def export_csv(export_dir, trace_name, jobid=None):
if not args.export_csv:
return

issues = [
'JOB',
INSIGHTS_STDIO_HIGH_USAGE,
INSIGHTS_POSIX_WRITE_COUNT_INTENSIVE,
INSIGHTS_POSIX_READ_COUNT_INTENSIVE,
INSIGHTS_POSIX_WRITE_SIZE_INTENSIVE,
INSIGHTS_POSIX_READ_SIZE_INTENSIVE,
INSIGHTS_POSIX_HIGH_SMALL_READ_REQUESTS_USAGE,
INSIGHTS_POSIX_HIGH_SMALL_WRITE_REQUESTS_USAGE,
INSIGHTS_POSIX_HIGH_MISALIGNED_MEMORY_USAGE,
INSIGHTS_POSIX_HIGH_MISALIGNED_FILE_USAGE,
INSIGHTS_POSIX_REDUNDANT_READ_USAGE,
INSIGHTS_POSIX_REDUNDANT_WRITE_USAGE,
INSIGHTS_POSIX_HIGH_RANDOM_READ_USAGE,
INSIGHTS_POSIX_HIGH_SEQUENTIAL_READ_USAGE,
INSIGHTS_POSIX_HIGH_RANDOM_WRITE_USAGE,
INSIGHTS_POSIX_HIGH_SEQUENTIAL_WRITE_USAGE,
INSIGHTS_POSIX_HIGH_SMALL_READ_REQUESTS_SHARED_FILE_USAGE,
INSIGHTS_POSIX_HIGH_SMALL_WRITE_REQUESTS_SHARED_FILE_USAGE,
INSIGHTS_POSIX_HIGH_METADATA_TIME,
INSIGHTS_POSIX_SIZE_IMBALANCE,
INSIGHTS_POSIX_TIME_IMBALANCE,
INSIGHTS_POSIX_INDIVIDUAL_WRITE_SIZE_IMBALANCE,
INSIGHTS_POSIX_INDIVIDUAL_READ_SIZE_IMBALANCE,
INSIGHTS_MPI_IO_NO_USAGE,
INSIGHTS_MPI_IO_NO_COLLECTIVE_READ_USAGE,
INSIGHTS_MPI_IO_NO_COLLECTIVE_WRITE_USAGE,
INSIGHTS_MPI_IO_COLLECTIVE_READ_USAGE,
INSIGHTS_MPI_IO_COLLECTIVE_WRITE_USAGE,
INSIGHTS_MPI_IO_BLOCKING_READ_USAGE,
INSIGHTS_MPI_IO_BLOCKING_WRITE_USAGE,
INSIGHTS_MPI_IO_AGGREGATORS_INTRA,
INSIGHTS_MPI_IO_AGGREGATORS_INTER,
INSIGHTS_MPI_IO_AGGREGATORS_OK
]
if codes:
issues.extend(codes)

detected_issues = dict.fromkeys(issues, False)
detected_issues['JOB'] = jobid

for report in csv_report:
detected_issues[report] = True


os.makedirs(export_dir, exist_ok=True) # Ensure export directory exists
filepath = os.path.join(export_dir, f"{trace_name}.csv")

with open(filename, 'w') as f:
w = csv.writer(f)
w.writerow(detected_issues.keys())
w.writerow(detected_issues.values())
with open(filepath, 'w') as f:
w = csv.writer(f)
w.writerow(detected_issues.keys())
w.writerow(detected_issues.values())

7 changes: 7 additions & 0 deletions drishti/includes/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,13 @@
help='Export a CSV with the code of all issues that were triggered'
)

parser.add_argument(
'--export_dir',
default="",
dest='export_dir',
help='Specify the directory prefix for the output files (if any)'
)

parser.add_argument(
'--json',
default=False,
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
argparse
darshan==3.4.4.0
darshan>=3.4.4.0
pandas
rich==12.5.1
recorder-utils
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from setuptools import setup, find_packages
from setuptools import find_packages, setup

with open("README.md", "r") as f:
long_description = f.read()
Expand All @@ -19,7 +19,7 @@
install_requires=[
'argparse',
'pandas',
'darshan==3.4.4.0',
'darshan>=3.4.4.0',
'rich==12.5.1',
'recorder-utils',
],
Expand Down
Loading