Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/pre commit pyupgrade #413

Merged
merged 6 commits into from
Nov 10, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
- run:
name: Run pre-checks
command: |
pre-commit run --all-files
pre-commit run --all-files --show-diff-on-failure

linux-python-38: &linux-template
docker:
Expand Down
7 changes: 7 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,13 @@ repos:
- id: trailing-whitespace
exclude: 'setup.cfg'
- id: debug-statements
- repo: https://github.com/asottile/pyupgrade
rev: v2.7.3
hooks:
- id: pyupgrade
exclude: '(?:configobj/.*)'
args:
- --py36-plus
- repo: local
hooks:
- id: flake8
Expand Down
2 changes: 1 addition & 1 deletion .sync-zenodo-metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def from_citation_author(cls, **citation):

def as_zenodo_creator(self):
ret = dict(
name='{} {}'.format(self.first_names, self.last_names),
name=f'{self.first_names} {self.last_names}',
affiliation=self.affiliation)
if self.orcid:
ret['orcid'] = self.orcid.lstrip('https://orcid.org/')
Expand Down
16 changes: 8 additions & 8 deletions benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@
def size(fn):
try:
return os.path.getsize(fn)
except (OSError, IOError):
except OSError:
return 0


Expand Down Expand Up @@ -125,7 +125,7 @@ def _make_doc(i, num_keys=1, data_size=0):
assert num_keys >= 1
assert data_size >= 0

doc = {'b_{}'.format(j): _random_str(data_size) for j in range(num_keys - 1)}
doc = {f'b_{j}': _random_str(data_size) for j in range(num_keys - 1)}
doc['a'] = '{}{}'.format(i, _random_str(max(0, data_size - len(str(i)))))
return doc

Expand Down Expand Up @@ -162,7 +162,7 @@ def setup_random_project(N, num_keys=1, num_doc_keys=0,
raise TypeError("N must be an integer!")

with TemporaryDirectory(dir=root) as tmp:
project = signac.init_project('benchmark-N={}'.format(N), root=tmp)
project = signac.init_project(f'benchmark-N={N}', root=tmp)
generate_random_data(project, N, num_keys, num_doc_keys, data_size, data_std)
yield project

Expand All @@ -182,15 +182,15 @@ def noop(*args, **kwargs):

def benchmark_project(project, keys=None):
root = project.root_directory()
setup = "import signac; project = signac.get_project(root='{}'); ".format(root)
setup = f"import signac; project = signac.get_project(root='{root}'); "
setup += "from itertools import islice, repeat; import random; "
setup += "from benchmark import noop; "

data = OrderedDict()

def run(key, timer, repeat=3, number=10):
if keys is None or key in keys:
logger.info("Run '{}'...".format(key))
logger.info(f"Run '{key}'...")
data[key] = timer.repeat(repeat=repeat, number=number)

run('determine_len', Timer('len(project)', setup=setup))
Expand Down Expand Up @@ -321,7 +321,7 @@ def tr(s):
'tool,N': "Tool, N",
}.get(cat, cat)
if cplx is not None:
t += ' O({})'.format(cplx)
t += f' O({cplx})'
return t


Expand Down Expand Up @@ -351,10 +351,10 @@ def main_compare(args):
repo = git.Repo(search_parent_directories=True)
rev_this = str(repo.commit(args.rev_this))
doc_this = read_benchmark(args.filename, {'meta.versions.git.sha1': rev_this})
assert len(doc_this), "Can't find results for '{}'.".format(args.rev_this)
assert len(doc_this), f"Can't find results for '{args.rev_this}'."
rev_other = repo.commit(args.rev_other)
doc_other = read_benchmark(args.filename, {'meta.versions.git.sha1': str(rev_other)})
assert len(doc_other), "Can't find results for '{}'.".format(args.rev_other)
assert len(doc_other), f"Can't find results for '{args.rev_other}'."

print("Showing runtime {} ({}) / {} ({}):".format(
args.rev_this, str(rev_this)[:6],
Expand Down
1 change: 1 addition & 0 deletions changelog.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ Added
+++++

- Support for h5py version 3 (#411).
- Added pyupgrade to pre-commit hooks (#413).

[1.5.0] -- 2020-09-20
---------------------
Expand Down
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ python-tag = py3

[flake8]
max-line-length = 100
exclude = configobj,passlib,cite.py,conf.py
exclude = configobj,conf.py
# Use select to ignore unwanted flake8 plugins
select = E,F,W
# Specify errors to ignore by default
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
fn_readme = os.path.join(this_path, 'README.md')
with open(fn_readme) as fh:
long_description = fh.read()
except (IOError, OSError):
except OSError:
long_description = description

setup(
Expand Down
64 changes: 32 additions & 32 deletions signac/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,9 +132,9 @@ def _fmt_bytes(nbytes, suffix='B'):
"""
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(nbytes) < 1024.0:
return "%3.1f %s%s" % (nbytes, unit, suffix)
return f"{nbytes:3.1f} {unit}{suffix}"
nbytes /= 1024.0
return "%.1f %s%s" % (nbytes, 'Yi', suffix)
return "{:.1f} {}{}".format(nbytes, 'Yi', suffix)


def _passlib_available():
Expand Down Expand Up @@ -188,7 +188,7 @@ def hashpw(pw):

def _read_index(project, fn_index=None):
if fn_index is not None:
_print_err("Reading index from file '{}'...".format(fn_index))
_print_err(f"Reading index from file '{fn_index}'...")
file_descriptor = open(fn_index)
return (json.loads(line) for line in file_descriptor)

Expand All @@ -200,7 +200,7 @@ def _open_job_by_id(project, job_id):
except KeyError:
close_matches = difflib.get_close_matches(
job_id, [jid[:len(job_id)] for jid in project.find_job_ids()])
msg = "Did not find job corresponding to id '{}'.".format(job_id)
msg = f"Did not find job corresponding to id '{job_id}'."
if len(close_matches) == 1:
msg += " Did you mean '{}'?".format(close_matches[0])
elif len(close_matches) > 1:
Expand Down Expand Up @@ -245,7 +245,7 @@ def main_project(args):
project = get_project()
if args.access:
fn = project.create_access_module()
_print_err("Created access module '{}'.".format(fn))
_print_err(f"Created access module '{fn}'.")
return
if args.index:
for doc in project.index():
Expand All @@ -267,7 +267,7 @@ def main_job(args):
try:
statepoint = json.loads(sp)
except ValueError:
_print_err("Error while reading statepoint: '{}'".format(sp))
_print_err(f"Error while reading statepoint: '{sp}'")
raise
job = project.open_job(statepoint)
if args.create:
Expand Down Expand Up @@ -331,9 +331,9 @@ def main_move(args):
job.move(dst_project)
except DestinationExistsError:
_print_err(
"Destination already exists: '{}' in '{}'.".format(job, dst_project))
f"Destination already exists: '{job}' in '{dst_project}'.")
else:
_print_err("Moved '{}' to '{}'.".format(job, dst_project))
_print_err(f"Moved '{job}' to '{dst_project}'.")


def main_clone(args):
Expand All @@ -345,9 +345,9 @@ def main_clone(args):
job = _open_job_by_id(project, job_id)
dst_project.clone(job)
except DestinationExistsError:
_print_err("Destination already exists: '{}' in '{}'.".format(job, dst_project))
_print_err(f"Destination already exists: '{job}' in '{dst_project}'.")
else:
_print_err("Cloned '{}' to '{}'.".format(job, dst_project))
_print_err(f"Cloned '{job}' to '{dst_project}'.")


def main_index(args):
Expand Down Expand Up @@ -399,7 +399,7 @@ def format_lines(cat, _id, s):
if len(args.doc) != 0:
doc = {key: doc[key] for key in args.doc if key in doc}
print(format_lines('sp ', job_id, doc))
except IOError as error:
except OSError as error:
if error.errno == errno.EPIPE:
sys.stderr.close()
else:
Expand Down Expand Up @@ -436,7 +436,7 @@ def main_init(args):
name=args.project_id,
root=os.getcwd(),
workspace=args.workspace)
_print_err("Initialized project '{}'.".format(project))
_print_err(f"Initialized project '{project}'.")


def main_schema(args):
Expand Down Expand Up @@ -530,13 +530,13 @@ def _sig(st):
re.compile(args.key)
except re.error as e:
raise RuntimeError(
"Illegal regular expression '{}': '{}'.".format(args.key, e))
f"Illegal regular expression '{args.key}': '{e}'.")
doc_sync = DocSync.ByKey(lambda key: re.match(args.key, key))
else:
doc_sync = DocSync.ByKey()

try:
_print_err("Synchronizing '{}' -> '{}'...".format(source, destination))
_print_err(f"Synchronizing '{source}' -> '{destination}'...")
stats = destination.sync(
other=source,
strategy=strategy,
Expand Down Expand Up @@ -645,11 +645,11 @@ def _main_import_non_interactive(project, origin, args):
for src, copy_executor in tqdm(dict(data_mapping).items(), 'Importing'):
paths[src] = copy_executor(copytree=shutil.move if args.move else None)
except DestinationExistsError as error:
_print_err("Destination '{}' already exists.".format(error.destination))
_print_err(f"Destination '{error.destination}' already exists.")
if not args.sync:
_print_err("Consider using '--sync' or '--sync-interactive'!")
except SyncConflict as error:
_print_err("Synchronization failed with error: {}".format(error))
_print_err(f"Synchronization failed with error: {error}")
_print_err("Consider using '--sync-interactive'!")
else:
return paths
Expand Down Expand Up @@ -700,7 +700,7 @@ def main_export(args):
pbar.update(1)
except _SchemaPathEvaluationError as error:
raise RuntimeWarning(
"An error occurred while evaluating the schema path: {}".format(error))
f"An error occurred while evaluating the schema path: {error}")

if paths:
_print_err("Exported {} job(s).".format(len(paths)))
Expand All @@ -716,7 +716,7 @@ def main_update_cache(args):
if n is None:
_print_err("Cache is up to date.")
else:
_print_err("Updated cache (size={}).".format(n))
_print_err(f"Updated cache (size={n}).")


# UNCOMMENT THE FOLLOWING BLOCK WHEN THE FIRST MIGRATION IS INTRODUCED.
Expand Down Expand Up @@ -791,7 +791,7 @@ def main_config_show(args):
mode = ' global '
else:
mode = ''
_print_err("Did not find a{}configuration file.".format(mode))
_print_err(f"Did not find a{mode}configuration file.")
return
for key in args.key:
for kt in key.split('.'):
Expand Down Expand Up @@ -832,9 +832,9 @@ def main_config_verify(args):
else:
mode = ''
raise RuntimeWarning(
"Did not find a{}configuration file.".format(mode))
f"Did not find a{mode}configuration file.")
if cfg.filename is not None:
_print_err("Verifcation of config file '{}'.".format(cfg.filename))
_print_err(f"Verifcation of config file '{cfg.filename}'.")
verify_config(cfg)


Expand Down Expand Up @@ -875,7 +875,7 @@ def main_config_set(args):
sec = sec.setdefault(key, dict())
try:
sec[keys[-1]] = args.value
_print_err("Updated value '{}'='{}'.".format(args.key, args.value))
_print_err(f"Updated value '{args.key}'='{args.value}'.")
except TypeError:
raise KeyError(args.key)
_print_err("Writing configuration to '{}'.".format(
Expand Down Expand Up @@ -922,7 +922,7 @@ def hostcfg():

if args.test:
if hostcfg():
_print_err("Trying to connect to host '{}'...".format(args.hostname))
_print_err(f"Trying to connect to host '{args.hostname}'...")
try:
client = get_client(hostcfg())
client.address
Expand All @@ -931,9 +931,9 @@ def hostcfg():
"connect to host '{}'.".format(args.hostname))
raise
else:
print("Successfully connected to host '{}'.".format(args.hostname))
print(f"Successfully connected to host '{args.hostname}'.")
else:
_print_err("Host '{}' is not configured.".format(args.hostname))
_print_err(f"Host '{args.hostname}' is not configured.")
return

if args.remove:
Expand All @@ -959,9 +959,9 @@ def hostcfg():
return

if hostcfg():
_print_err("Configuring host '{}'.".format(args.hostname))
_print_err(f"Configuring host '{args.hostname}'.")
else:
_print_err("Configuring new host '{}'.".format(args.hostname))
_print_err(f"Configuring new host '{args.hostname}'.")

def hide_password(k, v):
"""Hide all fields containing sensitive information."""
Expand All @@ -973,7 +973,7 @@ def update_hostcfg(** update):
for k, v in update.items():
if v is None:
if k in hostcfg():
logging.info("Deleting key {}".format(k))
logging.info(f"Deleting key {k}")
del cfg['hosts'][args.hostname][k]
store = True
elif k not in hostcfg() or v != hostcfg()[k]:
Expand Down Expand Up @@ -1025,7 +1025,7 @@ def requires_username():
new_pw = args.password
update_hostcfg(password=new_pw, password_config=None)

_print_err("Configured host '{}':".format(args.hostname))
_print_err(f"Configured host '{args.hostname}':")
print("[hosts]")
for line in config.Config({args.hostname: hostcfg()}).write():
print(_hide_password(line))
Expand Down Expand Up @@ -1105,7 +1105,7 @@ def write_history_file():
python_version=sys.version,
signac_version=__version__,
project_id=project.id,
job_banner='\nJob:\t\t{job._id}'.format(job=job) if job is not None else '',
job_banner=f'\nJob:\t\t{job._id}' if job is not None else '',
root_path=project.root_directory(),
workspace_path=project.workspace(),
size=len(project)))
Expand Down Expand Up @@ -1873,12 +1873,12 @@ def main():
raise
sys.exit(1)
except RuntimeWarning as warning:
_print_err("Warning: {}".format(warning))
_print_err(f"Warning: {warning}")
if args.debug:
raise
sys.exit(1)
except Exception as error:
_print_err('Error: {}'.format(error))
_print_err(f'Error: {error}')
if args.debug:
raise
sys.exit(1)
Expand Down
4 changes: 3 additions & 1 deletion signac/cite.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@
"""


ARXIV_REFERENCE = "Carl S. Adorf, Paul M. Dodd, and Sharon C. Glotzer. signac - A Simple Data Management Framework. 2016. arXiv:1611.03543 [cs.DB]"
ARXIV_REFERENCE = ("Carl S. Adorf, Paul M. Dodd, and Sharon C. Glotzer. "
"signac - A Simple Data Management Framework. 2016. "
"arXiv:1611.03543 [cs.DB]")


def bibtex(file=None):
Expand Down
Loading