Skip to content

Commit

Permalink
Ruff autofixes
Browse files Browse the repository at this point in the history
  • Loading branch information
taldcroft committed Nov 15, 2023
1 parent ae3dc98 commit f6012e9
Show file tree
Hide file tree
Showing 16 changed files with 46 additions and 42 deletions.
6 changes: 2 additions & 4 deletions cheta/add_derived.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

import optparse
import os
import pickle
import re
from pathlib import Path

Expand All @@ -12,11 +13,8 @@
import Ska.DBI
import tables
from Chandra.Time import DateTime
from six.moves import cPickle as pickle

import cheta.derived as derived
import cheta.fetch as fetch
import cheta.file_defs as file_defs
from cheta import derived, fetch, file_defs


def get_options():
Expand Down
4 changes: 2 additions & 2 deletions cheta/check_integrity.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,15 @@

import optparse
import os
import pickle
import re

import pyyaks.context
import pyyaks.logger
import Ska.DBI
import tables
from six.moves import cPickle as pickle

import cheta.fetch as fetch
from cheta import fetch

opt = None
ft = fetch.ft
Expand Down
2 changes: 1 addition & 1 deletion cheta/converters.py
Original file line number Diff line number Diff line change
Expand Up @@ -523,7 +523,7 @@ def acisdeahk(dat):
dat = pyfits_to_recarray(dat)
rows = dat[dat["CCD_ID"] >= 10]
if len(rows) == 0:
raise NoValidDataError()
raise NoValidDataError

# Go through input data one row (query) at a time and assemble contemporaneous
# queries into a single row with a column for each query value.
Expand Down
2 changes: 1 addition & 1 deletion cheta/derived/comps.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,7 +340,7 @@ def get_stats_attrs(self, tstart, tstop, msid, match_args, interval):
rows = np.searchsorted(msid_obj.times, times)
vals_stats = calc_stats_vals(msid_obj, rows, indexes, interval)
else:
raise ValueError()
raise ValueError

# Replicate the name munging that fetch does going from the HDF5 columns
# to what is seen in a stats fetch query.
Expand Down
1 change: 0 additions & 1 deletion cheta/derived/mups_valve.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# coding: utf-8

"""
Fetch clean MUPS valve temperature telemetry
Expand Down
8 changes: 6 additions & 2 deletions cheta/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,12 @@
from Chandra.Time import DateTime
from ska_helpers.utils import lru_cache_timed

from . import __version__ # noqa
from . import cache, file_defs, remote_access
from . import (
__version__, # noqa
cache,
file_defs,
remote_access,
)
from .derived.comps import ComputedMsid
from .lazy import LazyDict
from .remote_access import ENG_ARCHIVE
Expand Down
6 changes: 4 additions & 2 deletions cheta/fetch_eng.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import sys

from . import __version__ # noqa
from . import fetch
from . import (
__version__, # noqa
fetch,
)
from .fetch import * # noqa

# Module-level units, defaults to CXC units (e.g. Kelvins etc)
Expand Down
6 changes: 4 additions & 2 deletions cheta/fetch_sci.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import sys

from . import __version__ # noqa
from . import fetch
from . import (
__version__, # noqa
fetch,
)
from .fetch import * # noqa

# Module-level units, defaults to CXC units (e.g. Kelvins etc)
Expand Down
3 changes: 1 addition & 2 deletions cheta/fix_bad_values.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,7 @@
import tables
from Chandra.Time import DateTime

import cheta.file_defs as file_defs
from cheta import fetch
from cheta import fetch, file_defs

ft = fetch.ft
opt = None
Expand Down
2 changes: 1 addition & 1 deletion cheta/units.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,10 @@
"""
import logging
import os
import pickle
import warnings

import numpy as np
import pickle


class NullHandler(logging.Handler):
Expand Down
8 changes: 3 additions & 5 deletions cheta/update_archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,8 @@
from Chandra.Time import DateTime
from ska_helpers.retry import tables_open_file

import cheta.converters as converters
import cheta.derived
import cheta.fetch as fetch
import cheta.file_defs as file_defs
from cheta import converters, fetch, file_defs


def get_options(args=None):
Expand Down Expand Up @@ -299,7 +297,7 @@ def main_loop():
try:
Ska.tdb.msids[colname].Tsc["STATE_CODE"]
except Exception:
if not colname.upper() in fetch.STATE_CODES:
if colname.upper() not in fetch.STATE_CODES:
continue

msid = update_stats(colname, "daily")
Expand Down Expand Up @@ -864,7 +862,7 @@ def truncate_archive(filetype, date):
"SELECT rowstart FROM archfiles WHERE year>={0} AND doy>={1}".format(year, doy)
)
if len(out) == 0:
logger.verbose(f"No rows to delete - skipping")
logger.verbose("No rows to delete - skipping")
db.conn.close()
return

Expand Down
7 changes: 3 additions & 4 deletions cheta/update_client_archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,6 @@ def get_options(args=None):
class RowMismatchError(ValueError):
"""Exception for row mismatch between existing archive and available sync update"""

pass


@contextlib.contextmanager
Expand Down Expand Up @@ -344,7 +343,7 @@ def get_copy_files(logger, msids, msids_content):
f"Found {len(copy_files)} local archive files that are "
"missing and need to be copied"
)
logger.debug(f"Copy_files:")
logger.debug("Copy_files:")
for copy_file in sorted(copy_files):
logger.debug(copy_file)

Expand Down Expand Up @@ -586,7 +585,7 @@ def sync_stat_archive(opt, msid_files, logger, content, stat, index_tbl):
)
logger.warn(msg)
logger.warn(
f"Attempting to fix by removing that file and trying to sync again."
"Attempting to fix by removing that file and trying to sync again."
)
pth.unlink()
_sync_stat_archive(opt, msid_files, logger, content, stat, index_tbl)
Expand Down Expand Up @@ -813,7 +812,7 @@ def get_last_date_id(msid_files, msids, stat, logger):
with open(last_date_id_file, "r") as fh:
last_date_id = fh.read()
else:
logger.verbose(f"Reading stat h5 files to get last update time")
logger.verbose("Reading stat h5 files to get last update time")
times = []
for msid in msids:
fetch.ft["msid"] = msid
Expand Down
2 changes: 1 addition & 1 deletion cheta/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -457,7 +457,7 @@ def get_telem_table(
:returns: Table of requested telemetry values from fetch
"""
from cheta import fetch_eng, fetch_sci, fetch
from cheta import fetch, fetch_eng, fetch_sci

start = CxoTime(start)
stop = CxoTime(stop)
Expand Down
13 changes: 6 additions & 7 deletions make_units_cxc.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,18 @@
"""
Create the default unit system as found in the CXC telemetry FITS files.
"""
from __future__ import print_function

import argparse
import glob
import os
import pickle
import re
import glob
import argparse
import cPickle as pickle

import pyfits
import pyyaks

from cheta.converters import _get_deahk_cols, CXC_TO_MSID
from cheta import file_defs
from cheta import fetch
from cheta import fetch, file_defs
from cheta.converters import CXC_TO_MSID, _get_deahk_cols


def get_options(args=None):
Expand Down
3 changes: 2 additions & 1 deletion make_units_eng.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,10 @@
import pickle
from copy import copy

import cheta.converters
import Ska.tdb

import cheta.converters

units_cxc = pickle.load(open("units_cxc.pkl", "rb"))
units_eng = copy(units_cxc)

Expand Down
15 changes: 9 additions & 6 deletions ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -50,12 +50,15 @@ ignore = [
# TODO : fix these and stop ignoring. Commented out ones are common and OK to except.
extend-ignore = [
"PGH004", # Use specific rule codes when using `noqa`
# "C401", # Unnecessary generator (rewrite as a `set` comprehension)
# "C402", # Unnecessary generator (rewrite as a dict comprehension)
# "C405", # Unnecessary `list` literal (rewrite as a `set` literal)
# "C408", # Unnecessary `dict` call (rewrite as a literal)
# "C416", # Unnecessary `dict` comprehension (rewrite using `dict()`)
# "PGH002", # warn is deprecated in favor of warning
"D205", # 1 blank line required between summary line and description
"ARG001", # Unused function argument
"PLW2901", # `for` loop variable overwritten by assignment target
"C401", # Unnecessary generator (rewrite as a `set` comprehension)
"C402", # Unnecessary generator (rewrite as a dict comprehension)
"C405", # Unnecessary `list` literal (rewrite as a `set` literal)
"C408", # Unnecessary `dict` call (rewrite as a literal)
"C416", # Unnecessary `dict` comprehension (rewrite using `dict()`)
"PGH002", # warn is deprecated in favor of warning
# "PYI056", # Calling `.append()` on `__all__` may not be supported by all type checkers
]

Expand Down

0 comments on commit f6012e9

Please sign in to comment.