diff --git a/.github/workflows/check_release_tag.py b/.github/workflows/check_release_tag.py index 95f80cbe7c..47b45865c5 100644 --- a/.github/workflows/check_release_tag.py +++ b/.github/workflows/check_release_tag.py @@ -1,16 +1,31 @@ # -*- coding: utf-8 -*- """Check that the GitHub release tag matches the package version.""" import argparse -import json +import ast +from pathlib import Path + + +def get_version_from_module(content: str) -> str: + """Get the __version__ value from a module.""" + # adapted from setuptools/config.py + try: + module = ast.parse(content) + except SyntaxError as exc: + raise IOError(f'Unable to parse module: {exc}') + try: + return next( + ast.literal_eval(statement.value) for statement in module.body if isinstance(statement, ast.Assign) + for target in statement.targets if isinstance(target, ast.Name) and target.id == '__version__' + ) + except StopIteration: + raise IOError('Unable to find __version__ in module') + if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('GITHUB_REF', help='The GITHUB_REF environmental variable') - parser.add_argument('SETUP_PATH', help='Path to the setup.json') args = parser.parse_args() assert args.GITHUB_REF.startswith('refs/tags/v'), f'GITHUB_REF should start with "refs/tags/v": {args.GITHUB_REF}' tag_version = args.GITHUB_REF[11:] - with open(args.SETUP_PATH, encoding='utf8') as handle: - data = json.load(handle) - pypi_version = data['version'] - assert tag_version == pypi_version, f'The tag version {tag_version} != {pypi_version} specified in `setup.json`' + pypi_version = get_version_from_module(Path('aiida/__init__.py').read_text(encoding='utf-8')) + assert tag_version == pypi_version, f'The tag version {tag_version} != {pypi_version} specified in `pyproject.toml`' diff --git a/.github/workflows/ci-code.yml b/.github/workflows/ci-code.yml index be5470b5a6..3a5ab9df30 100644 --- a/.github/workflows/ci-code.yml +++ b/.github/workflows/ci-code.yml @@ -33,7 +33,7 @@ jobs: if: failure() && steps.check_reqs.outputs.error uses: peter-evans/commit-comment@v1 with: - path: setup.json + path: pyproject.toml body: | ${{ steps.check_reqs.outputs.error }} diff --git a/.github/workflows/ci-style.yml b/.github/workflows/ci-style.yml index 29d7bbc0de..99f3120e4d 100644 --- a/.github/workflows/ci-style.yml +++ b/.github/workflows/ci-style.yml @@ -29,6 +29,7 @@ jobs: - name: Install python dependencies run: | + pip install --upgrade pip pip install -r requirements/requirements-py-3.8.txt pip install -e .[pre-commit] pip freeze diff --git a/.github/workflows/docs-build.yml b/.github/workflows/docs-build.yml index 73f1a74a35..89eefe39fa 100644 --- a/.github/workflows/docs-build.yml +++ b/.github/workflows/docs-build.yml @@ -22,7 +22,8 @@ jobs: python-version: '3.8' - name: Install python dependencies run: | - pip install -e .[docs,tests] + pip install --upgrade pip + pip install -e .[docs,tests,rest,atomic_tools] - name: Build HTML docs id: linkcheck run: | diff --git a/.github/workflows/post-release.yml b/.github/workflows/post-release.yml index 4ad96e7671..f911732cd2 100644 --- a/.github/workflows/post-release.yml +++ b/.github/workflows/post-release.yml @@ -30,8 +30,9 @@ jobs: - name: Install python dependencies run: | + pip install --upgrade pip pip install transifex-client sphinx-intl - pip install -e .[docs,tests] + pip install -e .[docs,tests,rest,atomic_tools] - name: Build pot files env: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 05555ef5f0..571d257d65 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -23,7 +23,7 @@ jobs: uses: actions/setup-python@v2 with: python-version: '3.8' - - run: python .github/workflows/check_release_tag.py $GITHUB_REF setup.json + - run: python .github/workflows/check_release_tag.py $GITHUB_REF pre-commit: @@ -43,7 +43,11 @@ jobs: sudo apt update sudo apt install libkrb5-dev ruby ruby-dev - name: Install python dependencies - run: pip install -e .[all] + run: | + pip install --upgrade pip + pip install -r requirements/requirements-py-3.8.txt + pip install -e .[pre-commit] + pip freeze - name: Run pre-commit run: pre-commit run --all-files || ( git status --short ; git diff ; exit 1 ) @@ -126,12 +130,12 @@ jobs: uses: actions/setup-python@v2 with: python-version: '3.8' - - name: Build package + - name: install flit run: | - pip install wheel - python setup.py sdist bdist_wheel - - name: Publish to PyPI - uses: pypa/gh-action-pypi-publish@v1.1.0 - with: - user: __token__ - password: ${{ secrets.PYPI_KEY }} + pip install flit~=3.4 + - name: Build and publish + run: | + flit publish + env: + FLIT_USERNAME: __token__ + FLIT_PASSWORD: ${{ secrets.PYPI_KEY }} diff --git a/.github/workflows/test-install.yml b/.github/workflows/test-install.yml index a4744a3703..970523fcb4 100644 --- a/.github/workflows/test-install.yml +++ b/.github/workflows/test-install.yml @@ -3,7 +3,6 @@ name: test-install on: pull_request: paths: - - 'setup.*' - 'environment.yml' - '**/requirements*.txt' - 'pyproject.toml' @@ -251,14 +250,14 @@ jobs: # Add python-version specific requirements/ file to the requirements.txt artifact. # This artifact can be used in the next step to automatically create a pull request - # updating the requirements (in case they are inconsistent with the setup.json file). + # updating the requirements (in case they are inconsistent with the pyproject.toml file). - uses: actions/upload-artifact@v1 if: matrix.backend == 'django' # The requirements are identical between backends. with: name: requirements.txt path: requirements-py-${{ matrix.python-version }}.txt -# Check whether the requirements/ files are consistent with the dependency specification in the setup.json file. +# Check whether the requirements/ files are consistent with the dependency specification in the pyproject.toml file. # If the check fails, warn the user via a comment and try to automatically create a pull request to update the files # (does not work on pull requests from forks). @@ -293,7 +292,7 @@ jobs: uses: peter-evans/commit-comment@v1 with: token: ${{ secrets.GITHUB_TOKEN }} - path: setup.json + path: pyproject.toml body: | The requirements/ files are inconsistent! @@ -328,7 +327,7 @@ jobs: title: "Update requirements/ files." body: | Update requirements files to ensure that they are consistent - with the dependencies specified in the 'setup.json' file. + with the dependencies specified in the 'pyproject.toml' file. Please note, that this pull request was likely created to resolve the inconsistency for a specific dependency, however @@ -344,7 +343,7 @@ jobs: issue-number: ${{ github.event.number }} body: | I automatically created a pull request (#${{ steps.create_update_requirements_pr.outputs.pr_number }}) that adapts the - requirements/ files according to the dependencies specified in the 'setup.json' file. + requirements/ files according to the dependencies specified in the 'pyproject.toml' file. - name: Create PR comment on failure if: steps.create_update_requirements_pr.outcome == 'Failure' @@ -353,4 +352,4 @@ jobs: issue-number: ${{ github.event.number }} body: | Please update the requirements/ files to ensure that they - are consistent with the dependencies specified in the 'setup.json' file. + are consistent with the dependencies specified in the 'pyproject.toml' file. diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 46306c4942..2a428aca3c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ ci: autoupdate_schedule: monthly autofix_prs: true - skip: [mypy, pylint, dm-generate-all, dependencies, verdi-autodocs, version-number] + skip: [mypy, pylint, dm-generate-all, dependencies, verdi-autodocs] repos: - repo: https://github.com/pre-commit/pre-commit-hooks @@ -123,8 +123,7 @@ repos: pass_filenames: false files: >- (?x)^( - setup.py| - setup.json| + pyproject.toml| utils/dependency_management.py )$ @@ -135,8 +134,7 @@ repos: pass_filenames: false files: >- (?x)^( - setup.json| - setup.py| + pyproject.toml| utils/dependency_management.py| environment.yml| )$ @@ -153,15 +151,3 @@ repos: aiida/cmdline/params/types/.*| utils/validate_consistency.py| )$ - - - id: version-number - name: Check version numbers - entry: python ./utils/validate_consistency.py version - language: system - pass_filenames: false - files: >- - (?x)^( - setup.json| - utils/validate_consistency.py| - aiida/__init__.py - )$ diff --git a/.readthedocs.yml b/.readthedocs.yml index ffc1ec2a59..c31727ba10 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -15,6 +15,8 @@ python: extra_requirements: - docs - tests + - rest + - atomic_tools # Let the build fail if there are any warnings sphinx: diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index d64dfc817f..0000000000 --- a/MANIFEST.in +++ /dev/null @@ -1,7 +0,0 @@ -include aiida/cmdline/templates/*.tpl -include aiida/manage/configuration/schema/*.json -include setup.json -include AUTHORS.txt -include CHANGELOG.md -include pyproject.toml -include LICENSE.txt diff --git a/aiida/calculations/diff_tutorial/calculations.py b/aiida/calculations/diff_tutorial/calculations.py index 59bb488925..5e3887a90b 100644 --- a/aiida/calculations/diff_tutorial/calculations.py +++ b/aiida/calculations/diff_tutorial/calculations.py @@ -2,7 +2,7 @@ """ Calculations provided by aiida_diff tutorial plugin. -Register calculations via the "aiida.calculations" entry point in the setup.json file. +Register calculations via the "aiida.calculations" entry point in the pyproject.toml file. """ from aiida.common import datastructures from aiida.engine import CalcJob diff --git a/aiida/parsers/plugins/diff_tutorial/parsers.py b/aiida/parsers/plugins/diff_tutorial/parsers.py index db2c668b2d..d5120f6c80 100644 --- a/aiida/parsers/plugins/diff_tutorial/parsers.py +++ b/aiida/parsers/plugins/diff_tutorial/parsers.py @@ -2,7 +2,7 @@ """ Parsers for DiffCalculation of plugin tutorial. -Register parsers via the "aiida.parsers" entry point in the setup.json file. +Register parsers via the "aiida.parsers" entry point in the pyproject.toml file. """ # START PARSER HEAD from aiida.engine import ExitCode diff --git a/docs/source/howto/plugins_develop.rst b/docs/source/howto/plugins_develop.rst index 070c5070e0..64bae28307 100644 --- a/docs/source/howto/plugins_develop.rst +++ b/docs/source/howto/plugins_develop.rst @@ -84,8 +84,7 @@ Here is an example of a folder structure for an AiiDA plugin, illustrating diffe LICENSE - license of your plugin MANIFEST.in - lists non-python files to be installed, such as LICENSE README.md - project description for github and PyPI - setup.json - plugin metadata: installation requirements, author, entry points, etc. - setup.py - PyPI installation script, parses setup.json and README.md + pyproject.toml - plugin metadata: installation requirements, author, entry points, etc. ... A minimal plugin package instead might look like:: @@ -93,8 +92,7 @@ A minimal plugin package instead might look like:: aiida-minimal/ aiida_minimal/ __init__.py - setup.py - setup.json + pyproject.toml .. _how-to:plugins-develop:entrypoints: @@ -111,14 +109,11 @@ Adding a new entry point consists of the following steps: #. Finding the right entry point group. You can list the entry point groups defined by AiiDA via ``verdi plugin list``. For a documentation of the groups, see :ref:`topics:plugins:entrypointgroups`. - #. Adding the entry point to the ``entry_points`` field in the ``setup.json`` file:: + #. Adding the entry point to the ``entry_points`` field in the ``pyproject.toml`` file:: ... - entry_points={ - "aiida.calculations": [ - "mycode. = aiida_mycode.calcs.some:MysomethingCalculation" - ] - } + [project.entry-points."aiida.calculations"] + "mycode." = "aiida_mycode.calcs.some:MysomethingCalculation" ... Your new entry point should now show up in ``verdi plugin list aiida.calculations``. @@ -227,7 +222,7 @@ Since the source code of most AiiDA plugins is hosted on GitHub, the first conta * Make sure to have a useful ``README.md``, describing what your plugin does and how to install it. * Leaving a contact email and adding a license is also a good idea. - * Make sure the information in the ``setup.json`` file is correct and up to date (in particular the version number), since this information is used to advertise your package on the AiiDA plugin registry. + * Make sure the information in the ``pyproject.toml`` file is correct and up to date (in particular the version number), since this information is used to advertise your package on the AiiDA plugin registry. Source-code-level documentation ------------------------------- @@ -279,8 +274,7 @@ AiiDA plugin packages are published on the `AiiDA plugin registry `_ Before publishing your plugin, make sure your plugin comes with: - * a ``setup.json`` file with the plugin metadata - * a ``setup.py`` file for installing your plugin via ``pip`` + * a ``pyproject.toml`` file with the plugin metadata and for installing your plugin via ``pip`` * a license For examples of these files, see the `aiida-diff demo plugin `_. @@ -297,7 +291,7 @@ In order to register your plugin package, simply go to the `plugin registry `_, which include making the plugin available on the `python package index `_. This makes it possible for users to simply ``pip install aiida-myplugin``. -.. note:: - When updating the version of your plugin, don't forget to update the version number both in the ``setup.json`` and in ``aiida_mycode/__init__.py``. - - .. _plugin-cutter: https://github.com/aiidateam/aiida-plugin-cutter .. _aiida-diff: https://github.com/aiidateam/aiida-diff .. _pytest: https://pytest.org diff --git a/pyproject.toml b/pyproject.toml index b0734aa401..d5a6f7382e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,260 @@ [build-system] -requires = ["setuptools>=40.8.0", "wheel", "fastentrypoints~=0.12"] -build-backend = "setuptools.build_meta" +requires = ["flit_core >=3.4,<4"] +build-backend = "flit_core.buildapi" + +[project] +name = "aiida-core" +dynamic = ["version"] # read from aiida/__init__.py +description = "AiiDA is a workflow manager for computational science with a strong focus on provenance, performance and extensibility." +authors = [{name = "The AiiDA team", email = "developers@aiida.net"}] +readme = "README.md" +license = {file = "LICENSE.txt"} +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Operating System :: POSIX :: Linux", + "Operating System :: MacOS :: MacOS X", + "Programming Language :: Python", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Topic :: Scientific/Engineering" +] +keywords = ["aiida", "workflows"] +requires-python = ">=3.8" +dependencies = [ + "aldjemy~=2.4", + "alembic~=1.2", + "archive-path~=0.3.6", + "aio-pika~=6.6", + "circus~=0.17.1", + "click-config-file~=0.6.0", + "click-spinner~=0.1.8", + "click~=8.0,>=8.0.3", + "disk-objectstore~=0.6.0", + "django~=2.2", + "graphviz~=0.13", + "ipython~=7.20", + "jinja2~=3.0", + "jsonschema~=3.0", + "kiwipy[rmq]~=0.7.4", + "importlib-metadata~=4.3", + "numpy~=1.19", + "pamqp~=2.3", + "paramiko~=2.7,>=2.7.2", + "plumpy~=0.20.0", + "pgsu~=0.2.0", + "psutil~=5.6", + "psycopg2-binary~=2.8.3", + "python-dateutil~=2.8", + "pytz~=2021.1", + "pyyaml~=5.4", + "simplejson~=3.16", + "sqlalchemy~=1.4.22", + "tabulate~=0.8.5", + "typing-extensions; python_version < '3.8'", + "tqdm~=4.45", + "tzlocal~=2.0", + "upf_to_json~=0.9.2", + "wrapt~=1.11.1" +] + +[project.urls] +Home = "http://www.aiida.net/" +Documentation = "https://aiida.readthedocs.io" +Source = "https://github.com/aiidateam/aiida-core" + +[project.optional-dependencies] +ssh_kerberos = [ + "gssapi~=1.6", + "pyasn1~=0.4.8" +] +rest = [ + "flask-cors~=3.0", + "flask-restful~=0.3.7", + "flask~=1.1", + "pyparsing~=2.4", + "python-memcached~=1.59", + "seekpath~=1.9,>=1.9.3" +] +docs = [ + "docutils==0.15.2", + "pygments~=2.5", + "pydata-sphinx-theme~=0.6.3", + "sphinx~=3.2.1", + "sphinxcontrib-details-directive~=0.1.0", + "sphinx-panels~=0.5.0", + "sphinx-copybutton~=0.3.0", + "sphinx-notfound-page~=0.5", + "sphinxext-rediraffe~=0.2.4", + "sphinx-sqlalchemy~=0.1.1" +] +atomic_tools = [ + "PyCifRW~=4.4", + "ase~=3.18", + "matplotlib~=3.3,>=3.3.4", + "pymatgen>=2019.7.2,<=2022.1.9,!=2019.9.7", + "pymysql~=0.9.3", + "seekpath~=1.9,>=1.9.3", + "spglib~=1.14" +] +notebook = [ + "jupyter-client~=6.1,<6.1.13", + "jupyter~=1.0", + "notebook~=6.1,>=6.1.5" +] +pre-commit = [ + "mypy==0.930", + "packaging==20.3", + "pre-commit~=2.2", + "pylint~=2.11.1", + "pylint-aiida~=0.1.1", + "pylint-django", + "sqlalchemy[mypy]~=1.4.29", + "tomli", +] +tests = [ + "aiida-export-migration-tests==0.9.0", + "pg8000~=1.13", + "pgtest~=1.3,>=1.3.1", + "pytest~=6.2", + "pytest-asyncio~=0.12,<0.17", + "pytest-timeout~=1.3", + "pytest-cov~=2.7,<2.11", + "pytest-rerunfailures~=9.1,>=9.1.1", + "pytest-benchmark~=3.2", + "pytest-regressions~=2.2", + "pympler~=0.9", + "coverage<5.0", + "sqlalchemy-diff~=0.1.3", + "sqlalchemy-utils~=0.37.2", + "sphinx~=3.2.1" +] +bpython = [ + "bpython~=0.18.0" +] + +[project.scripts] +verdi = "aiida.cmdline.commands.cmd_verdi:verdi" +runaiida = "aiida.cmdline.commands.cmd_run:run" + +[project.entry-points."aiida.calculations"] +"core.transfer" = "aiida.calculations.transfer:TransferCalculation" +"core.arithmetic.add" = "aiida.calculations.arithmetic.add:ArithmeticAddCalculation" +"core.templatereplacer" = "aiida.calculations.templatereplacer:TemplatereplacerCalculation" + +[project.entry-points."aiida.calculations.importers"] +"core.arithmetic.add" = "aiida.calculations.importers.arithmetic.add:ArithmeticAddCalculationImporter" + +[project.entry-points."aiida.cmdline.computer.configure"] +"core.local" = "aiida.transports.plugins.local:CONFIGURE_LOCAL_CMD" +"core.ssh" = "aiida.transports.plugins.ssh:CONFIGURE_SSH_CMD" + +[project.entry-points."aiida.cmdline.data"] +"core.array" = "aiida.cmdline.commands.cmd_data.cmd_array:array" +"core.bands" = "aiida.cmdline.commands.cmd_data.cmd_bands:bands" +"core.cif" = "aiida.cmdline.commands.cmd_data.cmd_cif:cif" +"core.dict" = "aiida.cmdline.commands.cmd_data.cmd_dict:dictionary" +"core.remote" = "aiida.cmdline.commands.cmd_data.cmd_remote:remote" +"core.singlefile" = "aiida.cmdline.commands.cmd_data.cmd_singlefile:singlefile" +"core.structure" = "aiida.cmdline.commands.cmd_data.cmd_structure:structure" +"core.trajectory" = "aiida.cmdline.commands.cmd_data.cmd_trajectory:trajectory" +"core.upf" = "aiida.cmdline.commands.cmd_data.cmd_upf:upf" + +[project.entry-points."aiida.cmdline.data.structure.import"] + +[project.entry-points."aiida.data"] +"core.array" = "aiida.orm.nodes.data.array.array:ArrayData" +"core.array.bands" = "aiida.orm.nodes.data.array.bands:BandsData" +"core.array.kpoints" = "aiida.orm.nodes.data.array.kpoints:KpointsData" +"core.array.projection" = "aiida.orm.nodes.data.array.projection:ProjectionData" +"core.array.trajectory" = "aiida.orm.nodes.data.array.trajectory:TrajectoryData" +"core.array.xy" = "aiida.orm.nodes.data.array.xy:XyData" +"core.base" = "aiida.orm.nodes.data:BaseType" +"core.bool" = "aiida.orm.nodes.data.bool:Bool" +"core.cif" = "aiida.orm.nodes.data.cif:CifData" +"core.code" = "aiida.orm.nodes.data.code:Code" +"core.dict" = "aiida.orm.nodes.data.dict:Dict" +"core.enum" = "aiida.orm.nodes.data.enum:EnumData" +"core.float" = "aiida.orm.nodes.data.float:Float" +"core.folder" = "aiida.orm.nodes.data.folder:FolderData" +"core.int" = "aiida.orm.nodes.data.int:Int" +"core.list" = "aiida.orm.nodes.data.list:List" +"core.jsonable" = "aiida.orm.nodes.data.jsonable:JsonableData" +"core.numeric" = "aiida.orm.nodes.data.numeric:NumericType" +"core.orbital" = "aiida.orm.nodes.data.orbital:OrbitalData" +"core.remote" = "aiida.orm.nodes.data.remote.base:RemoteData" +"core.remote.stash" = "aiida.orm.nodes.data.remote.stash.base:RemoteStashData" +"core.remote.stash.folder" = "aiida.orm.nodes.data.remote.stash.folder:RemoteStashFolderData" +"core.singlefile" = "aiida.orm.nodes.data.singlefile:SinglefileData" +"core.str" = "aiida.orm.nodes.data.str:Str" +"core.structure" = "aiida.orm.nodes.data.structure:StructureData" +"core.upf" = "aiida.orm.nodes.data.upf:UpfData" + +[project.entry-points."aiida.groups"] +"core" = "aiida.orm.groups:Group" +"core.auto" = "aiida.orm.groups:AutoGroup" +"core.import" = "aiida.orm.groups:ImportGroup" +"core.upf" = "aiida.orm.groups:UpfFamily" + +[project.entry-points."aiida.node"] +"data" = "aiida.orm.nodes.data.data:Data" +"process" = "aiida.orm.nodes.process.process:ProcessNode" +"process.calculation" = "aiida.orm.nodes.process.calculation.calculation:CalculationNode" +"process.calculation.calcfunction" = "aiida.orm.nodes.process.calculation.calcfunction:CalcFunctionNode" +"process.calculation.calcjob" = "aiida.orm.nodes.process.calculation.calcjob:CalcJobNode" +"process.workflow" = "aiida.orm.nodes.process.workflow.workflow:WorkflowNode" +"process.workflow.workchain" = "aiida.orm.nodes.process.workflow.workchain:WorkChainNode" +"process.workflow.workfunction" = "aiida.orm.nodes.process.workflow.workfunction:WorkFunctionNode" + +[project.entry-points."aiida.parsers"] +"core.arithmetic.add" = "aiida.parsers.plugins.arithmetic.add:ArithmeticAddParser" +"core.templatereplacer.doubler" = "aiida.parsers.plugins.templatereplacer.doubler:TemplatereplacerDoublerParser" + +[project.entry-points."aiida.schedulers"] +"core.direct" = "aiida.schedulers.plugins.direct:DirectScheduler" +"core.lsf" = "aiida.schedulers.plugins.lsf:LsfScheduler" +"core.pbspro" = "aiida.schedulers.plugins.pbspro:PbsproScheduler" +"core.sge" = "aiida.schedulers.plugins.sge:SgeScheduler" +"core.slurm" = "aiida.schedulers.plugins.slurm:SlurmScheduler" +"core.torque" = "aiida.schedulers.plugins.torque:TorqueScheduler" + +[project.entry-points."aiida.transports"] +"core.local" = "aiida.transports.plugins.local:LocalTransport" +"core.ssh" = "aiida.transports.plugins.ssh:SshTransport" + +[project.entry-points."aiida.tools.calculations"] + +[project.entry-points."aiida.tools.dbexporters"] + +[project.entry-points."aiida.tools.dbimporters"] +"core.cod" = "aiida.tools.dbimporters.plugins.cod:CodDbImporter" +"core.icsd" = "aiida.tools.dbimporters.plugins.icsd:IcsdDbImporter" +"core.materialsproject" = "aiida.tools.dbimporters.plugins.materialsproject:MaterialsProjectImporter" +"core.mpds" = "aiida.tools.dbimporters.plugins.mpds:MpdsDbImporter" +"core.mpod" = "aiida.tools.dbimporters.plugins.mpod:MpodDbImporter" +"core.nninc" = "aiida.tools.dbimporters.plugins.nninc:NnincDbImporter" +"core.oqmd" = "aiida.tools.dbimporters.plugins.oqmd:OqmdDbImporter" +"core.pcod" = "aiida.tools.dbimporters.plugins.pcod:PcodDbImporter" +"core.tcod" = "aiida.tools.dbimporters.plugins.tcod:TcodDbImporter" + +[project.entry-points."aiida.tools.data.orbitals"] +"core.orbital" = "aiida.tools.data.orbital.orbital:Orbital" +"core.realhydrogen" = "aiida.tools.data.orbital.realhydrogen:RealhydrogenOrbital" + +[project.entry-points."aiida.workflows"] +"core.arithmetic.multiply_add" = "aiida.workflows.arithmetic.multiply_add:MultiplyAddWorkChain" +"core.arithmetic.add_multiply" = "aiida.workflows.arithmetic.add_multiply:add_multiply" + +[tool.flit.module] +name = "aiida" + +[tool.flit.sdist] +exclude = [ + "docs/", + "tests/", +] [tool.pylint.master] load-plugins = ["pylint_aiida", "pylint_django"] diff --git a/requirements/README.md b/requirements/README.md index 253a416e09..678ba5aa1a 100644 --- a/requirements/README.md +++ b/requirements/README.md @@ -2,6 +2,6 @@ The `requirements-*.txt` files within this directory define the Python environment used for the *continuous integration tests* of this package. Note: For instructions on how to install the package for regular use, please see the documentation. -The consistency of the requirements defined here with the dependencies defined in the `setup.json` file is checked automatically as part of the continuous integration workflow. +The consistency of the requirements defined here with the dependencies defined in the `pyproject.toml` file is checked automatically as part of the continuous integration workflow. https://github.com/aiidateam/aiida-core/wiki/AiiDA-Dependency-Management diff --git a/setup.json b/setup.json deleted file mode 100644 index 8d2d65e2f6..0000000000 --- a/setup.json +++ /dev/null @@ -1,240 +0,0 @@ -{ - "name": "aiida-core", - "version": "2.0.0a1", - "url": "http://www.aiida.net/", - "license": "MIT License", - "author": "The AiiDA team", - "author_email": "developers@aiida.net", - "description": "AiiDA is a workflow manager for computational science with a strong focus on provenance, performance and extensibility.", - "include_package_data": true, - "python_requires": ">=3.8", - "classifiers": [ - "Development Status :: 5 - Production/Stable", - "Framework :: AiiDA", - "License :: OSI Approved :: MIT License", - "Operating System :: POSIX :: Linux", - "Operating System :: MacOS :: MacOS X", - "Programming Language :: Python", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Topic :: Scientific/Engineering" - ], - "install_requires": [ - "aldjemy~=2.4", - "alembic~=1.2", - "archive-path~=0.3.6", - "aio-pika~=6.6", - "circus~=0.17.1", - "click-config-file~=0.6.0", - "click-spinner~=0.1.8", - "click~=8.0,>=8.0.3", - "disk-objectstore~=0.6.0", - "django~=2.2", - "graphviz~=0.13", - "ipython~=7.20", - "jinja2~=3.0", - "jsonschema~=3.0", - "kiwipy[rmq]~=0.7.4", - "importlib-metadata~=4.3", - "numpy~=1.19", - "pamqp~=2.3", - "paramiko~=2.7,>=2.7.2", - "plumpy~=0.20.0", - "pgsu~=0.2.0", - "psutil~=5.6", - "psycopg2-binary~=2.8.3", - "python-dateutil~=2.8", - "pytz~=2021.1", - "pyyaml~=5.4", - "simplejson~=3.16", - "sqlalchemy~=1.4.22", - "tabulate~=0.8.5", - "typing-extensions; python_version < '3.8'", - "tqdm~=4.45", - "tzlocal~=2.0", - "upf_to_json~=0.9.2", - "wrapt~=1.11.1" - ], - "extras_require": { - "ssh_kerberos": [ - "gssapi~=1.6", - "pyasn1~=0.4.8" - ], - "rest": [ - "flask-cors~=3.0", - "flask-restful~=0.3.7", - "flask~=1.1", - "pyparsing~=2.4", - "python-memcached~=1.59", - "seekpath~=1.9,>=1.9.3" - ], - "docs": [ - "docutils==0.15.2", - "pygments~=2.5", - "pydata-sphinx-theme~=0.6.3", - "sphinx~=3.2.1", - "sphinxcontrib-details-directive~=0.1.0", - "sphinx-panels~=0.5.0", - "sphinx-copybutton~=0.3.0", - "sphinx-notfound-page~=0.5", - "sphinxext-rediraffe~=0.2.4", - "sphinx-sqlalchemy~=0.1.1" - ], - "atomic_tools": [ - "PyCifRW~=4.4", - "ase~=3.18", - "matplotlib~=3.3,>=3.3.4", - "pymatgen>=2019.7.2,<=2022.1.9,!=2019.9.7", - "pymysql~=0.9.3", - "seekpath~=1.9,>=1.9.3", - "spglib~=1.14" - ], - "notebook": [ - "jupyter-client~=6.1,<6.1.13", - "jupyter~=1.0", - "notebook~=6.1,>=6.1.5" - ], - "pre-commit": [ - "mypy==0.930", - "packaging==20.3", - "pre-commit~=2.2", - "pylint~=2.11.1", - "pylint-aiida~=0.1.1", - "pylint-django", - "sqlalchemy[mypy]~=1.4.29" - ], - "tests": [ - "aiida-export-migration-tests==0.9.0", - "pg8000~=1.13", - "pgtest~=1.3,>=1.3.1", - "pytest~=6.2", - "pytest-asyncio~=0.12,<0.17", - "pytest-timeout~=1.3", - "pytest-cov~=2.7,<2.11", - "pytest-rerunfailures~=9.1,>=9.1.1", - "pytest-benchmark~=3.2", - "pytest-regressions~=2.2", - "pympler~=0.9", - "coverage<5.0", - "sqlalchemy-diff~=0.1.3", - "sqlalchemy-utils~=0.37.2", - "sphinx~=3.2.1" - ], - "bpython": [ - "bpython~=0.18.0" - ] - }, - "entry_points": { - "console_scripts": [ - "verdi=aiida.cmdline.commands.cmd_verdi:verdi", - "runaiida=aiida.cmdline.commands.cmd_run:run" - ], - "aiida.calculations": [ - "core.transfer = aiida.calculations.transfer:TransferCalculation", - "core.arithmetic.add = aiida.calculations.arithmetic.add:ArithmeticAddCalculation", - "core.templatereplacer = aiida.calculations.templatereplacer:TemplatereplacerCalculation" - ], - "aiida.calculations.importers": [ - "core.arithmetic.add = aiida.calculations.importers.arithmetic.add:ArithmeticAddCalculationImporter" - ], - "aiida.cmdline.computer.configure": [ - "core.local = aiida.transports.plugins.local:CONFIGURE_LOCAL_CMD", - "core.ssh = aiida.transports.plugins.ssh:CONFIGURE_SSH_CMD" - ], - "aiida.cmdline.data": [ - "core.array = aiida.cmdline.commands.cmd_data.cmd_array:array", - "core.bands = aiida.cmdline.commands.cmd_data.cmd_bands:bands", - "core.cif = aiida.cmdline.commands.cmd_data.cmd_cif:cif", - "core.dict = aiida.cmdline.commands.cmd_data.cmd_dict:dictionary", - "core.remote = aiida.cmdline.commands.cmd_data.cmd_remote:remote", - "core.singlefile = aiida.cmdline.commands.cmd_data.cmd_singlefile:singlefile", - "core.structure = aiida.cmdline.commands.cmd_data.cmd_structure:structure", - "core.trajectory = aiida.cmdline.commands.cmd_data.cmd_trajectory:trajectory", - "core.upf = aiida.cmdline.commands.cmd_data.cmd_upf:upf" - ], - "aiida.cmdline.data.structure.import": [ - ], - "aiida.data": [ - "core.array = aiida.orm.nodes.data.array.array:ArrayData", - "core.array.bands = aiida.orm.nodes.data.array.bands:BandsData", - "core.array.kpoints = aiida.orm.nodes.data.array.kpoints:KpointsData", - "core.array.projection = aiida.orm.nodes.data.array.projection:ProjectionData", - "core.array.trajectory = aiida.orm.nodes.data.array.trajectory:TrajectoryData", - "core.array.xy = aiida.orm.nodes.data.array.xy:XyData", - "core.base = aiida.orm.nodes.data:BaseType", - "core.bool = aiida.orm.nodes.data.bool:Bool", - "core.cif = aiida.orm.nodes.data.cif:CifData", - "core.code = aiida.orm.nodes.data.code:Code", - "core.dict = aiida.orm.nodes.data.dict:Dict", - "core.enum = aiida.orm.nodes.data.enum:EnumData", - "core.float = aiida.orm.nodes.data.float:Float", - "core.folder = aiida.orm.nodes.data.folder:FolderData", - "core.int = aiida.orm.nodes.data.int:Int", - "core.list = aiida.orm.nodes.data.list:List", - "core.jsonable = aiida.orm.nodes.data.jsonable:JsonableData", - "core.numeric = aiida.orm.nodes.data.numeric:NumericType", - "core.orbital = aiida.orm.nodes.data.orbital:OrbitalData", - "core.remote = aiida.orm.nodes.data.remote.base:RemoteData", - "core.remote.stash = aiida.orm.nodes.data.remote.stash.base:RemoteStashData", - "core.remote.stash.folder = aiida.orm.nodes.data.remote.stash.folder:RemoteStashFolderData", - "core.singlefile = aiida.orm.nodes.data.singlefile:SinglefileData", - "core.str = aiida.orm.nodes.data.str:Str", - "core.structure = aiida.orm.nodes.data.structure:StructureData", - "core.upf = aiida.orm.nodes.data.upf:UpfData" - ], - "aiida.groups": [ - "core = aiida.orm.groups:Group", - "core.auto = aiida.orm.groups:AutoGroup", - "core.import = aiida.orm.groups:ImportGroup", - "core.upf = aiida.orm.groups:UpfFamily" - ], - "aiida.node": [ - "data = aiida.orm.nodes.data.data:Data", - "process = aiida.orm.nodes.process.process:ProcessNode", - "process.calculation = aiida.orm.nodes.process.calculation.calculation:CalculationNode", - "process.calculation.calcfunction = aiida.orm.nodes.process.calculation.calcfunction:CalcFunctionNode", - "process.calculation.calcjob = aiida.orm.nodes.process.calculation.calcjob:CalcJobNode", - "process.workflow = aiida.orm.nodes.process.workflow.workflow:WorkflowNode", - "process.workflow.workchain = aiida.orm.nodes.process.workflow.workchain:WorkChainNode", - "process.workflow.workfunction = aiida.orm.nodes.process.workflow.workfunction:WorkFunctionNode" - ], - "aiida.parsers": [ - "core.arithmetic.add = aiida.parsers.plugins.arithmetic.add:ArithmeticAddParser", - "core.templatereplacer.doubler = aiida.parsers.plugins.templatereplacer.doubler:TemplatereplacerDoublerParser" - ], - "aiida.schedulers": [ - "core.direct = aiida.schedulers.plugins.direct:DirectScheduler", - "core.lsf = aiida.schedulers.plugins.lsf:LsfScheduler", - "core.pbspro = aiida.schedulers.plugins.pbspro:PbsproScheduler", - "core.sge = aiida.schedulers.plugins.sge:SgeScheduler", - "core.slurm = aiida.schedulers.plugins.slurm:SlurmScheduler", - "core.torque = aiida.schedulers.plugins.torque:TorqueScheduler" - ], - "aiida.transports": [ - "core.local = aiida.transports.plugins.local:LocalTransport", - "core.ssh = aiida.transports.plugins.ssh:SshTransport" - ], - "aiida.tools.calculations": [], - "aiida.tools.dbexporters": [], - "aiida.tools.dbimporters": [ - "core.cod = aiida.tools.dbimporters.plugins.cod:CodDbImporter", - "core.icsd = aiida.tools.dbimporters.plugins.icsd:IcsdDbImporter", - "core.materialsproject = aiida.tools.dbimporters.plugins.materialsproject:MaterialsProjectImporter", - "core.mpds = aiida.tools.dbimporters.plugins.mpds:MpdsDbImporter", - "core.mpod = aiida.tools.dbimporters.plugins.mpod:MpodDbImporter", - "core.nninc = aiida.tools.dbimporters.plugins.nninc:NnincDbImporter", - "core.oqmd = aiida.tools.dbimporters.plugins.oqmd:OqmdDbImporter", - "core.pcod = aiida.tools.dbimporters.plugins.pcod:PcodDbImporter", - "core.tcod = aiida.tools.dbimporters.plugins.tcod:TcodDbImporter" - ], - "aiida.tools.data.orbitals": [ - "core.orbital = aiida.tools.data.orbital.orbital:Orbital", - "core.realhydrogen = aiida.tools.data.orbital.realhydrogen:RealhydrogenOrbital" - ], - "aiida.workflows": [ - "core.arithmetic.multiply_add = aiida.workflows.arithmetic.multiply_add:MultiplyAddWorkChain", - "core.arithmetic.add_multiply = aiida.workflows.arithmetic.add_multiply:add_multiply" - ] - } -} diff --git a/setup.py b/setup.py deleted file mode 100644 index 1597683b92..0000000000 --- a/setup.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=wrong-import-order -"""Setup script for aiida-core package.""" -import json -import os - -try: - import fastentrypoints # pylint: disable=unused-import -except ImportError: - # This should only occur when building the package, i.e. when - # executing 'python setup.py sdist' or 'python setup.py bdist_wheel' - pass -from setuptools import find_packages, setup - -if __name__ == '__main__': - THIS_FOLDER = os.path.split(os.path.abspath(__file__))[0] - - with open(os.path.join(THIS_FOLDER, 'setup.json'), 'r', encoding='utf8') as info: - SETUP_JSON = json.load(info) - - with open(os.path.join(THIS_FOLDER, 'README.md'), 'r', encoding='utf8') as readme: - README = readme.read() - - EXTRAS_REQUIRE = SETUP_JSON['extras_require'] - EXTRAS_REQUIRE['tests'] = set(EXTRAS_REQUIRE['tests'] + EXTRAS_REQUIRE['rest'] + EXTRAS_REQUIRE['atomic_tools']) - EXTRAS_REQUIRE['docs'] = set(EXTRAS_REQUIRE['docs'] + EXTRAS_REQUIRE['rest'] + EXTRAS_REQUIRE['atomic_tools']) - EXTRAS_REQUIRE['all'] = list({item for sublist in EXTRAS_REQUIRE.values() for item in sublist if item != 'bpython'}) - - setup( - packages=find_packages(include=['aiida', 'aiida.*']), - long_description=README, - long_description_content_type='text/markdown', - **SETUP_JSON - ) diff --git a/utils/dependency_management.py b/utils/dependency_management.py index b1760633c4..55e3ced081 100755 --- a/utils/dependency_management.py +++ b/utils/dependency_management.py @@ -10,7 +10,6 @@ ########################################################################### """Utility CLI to manage dependencies for aiida-core.""" from collections import OrderedDict, defaultdict -import json import os from pathlib import Path import re @@ -22,6 +21,7 @@ from packaging.version import parse from pkg_resources import Requirement, parse_requirements import requests +import tomli import yaml ROOT = Path(__file__).resolve().parent.parent # repository root @@ -39,15 +39,15 @@ class DependencySpecificationError(click.ClickException): """Indicates an issue in a dependency specification.""" -def _load_setup_cfg(): - """Load the setup configuration from the 'setup.json' file.""" +def _load_pyproject(): + """Load the setup configuration from the 'pyproject.toml' file.""" try: - with open(ROOT / 'setup.json', encoding='utf8') as setup_json_file: - return json.load(setup_json_file) - except json.decoder.JSONDecodeError as error: # pylint: disable=no-member - raise DependencySpecificationError(f"Error while parsing 'setup.json' file: {error}") + with open(ROOT / 'pyproject.toml', 'rb') as handle: + return tomli.load(handle) + except tomli.TOMLDecodeError as error: # pylint: disable=no-member + raise DependencySpecificationError(f"Error while parsing 'pyproject.toml' file: {error}") except FileNotFoundError: - raise DependencySpecificationError("The 'setup.json' file is missing!") + raise DependencySpecificationError("The 'pyproject.toml' file is missing!") def _load_environment_yml(): @@ -80,19 +80,19 @@ def _setuptools_to_conda(req): return Requirement.parse(str(req)) -def _find_linenos_of_requirements_in_setup_json(requirements): - """Determine the line numbers of requirements specified in 'setup.json'. +def _find_linenos_of_requirements_in_pyproject(requirements): + """Determine the line numbers of requirements specified in 'pyproject.toml'. Returns a dict that maps a requirement, e.g., `numpy~=1.15.0` to the - line numbers at which said requirement is defined within the 'setup.json' + line numbers at which said requirement is defined within the 'pyproject.toml' file. """ linenos = defaultdict(list) - with open(ROOT / 'setup.json', encoding='utf8') as setup_json_file: + with open(ROOT / 'pyproject.toml', encoding='utf8') as setup_json_file: lines = list(setup_json_file) - # Determine the lines that correspond to affected requirements in setup.json. + # Determine the lines that correspond to affected requirements in pyproject.toml. for requirement in requirements: for lineno, line in enumerate(lines): if str(requirement) in line: @@ -134,9 +134,9 @@ def generate_environment_yml(): Dumper=yaml.SafeDumper ) - # Read the requirements from 'setup.json' - setup_cfg = _load_setup_cfg() - install_requirements = [Requirement.parse(r) for r in setup_cfg['install_requires']] + # Read the requirements from 'pyproject.toml' + pyproject = _load_pyproject() + install_requirements = [Requirement.parse(r) for r in pyproject['project']['dependencies']] # python version cannot be overriden from outside environment.yml # (even if it is not specified at all in environment.yml) @@ -169,12 +169,12 @@ def generate_all(ctx): @cli.command('validate-environment-yml', help="Validate 'environment.yml'.") def validate_environment_yml(): # pylint: disable=too-many-branches - """Validate that 'environment.yml' is consistent with 'setup.json'.""" + """Validate that 'environment.yml' is consistent with 'pyproject.toml'.""" - # Read the requirements from 'setup.json' and 'environment.yml'. - setup_cfg = _load_setup_cfg() - install_requirements = [Requirement.parse(r) for r in setup_cfg['install_requires']] - python_requires = Requirement.parse('python' + setup_cfg['python_requires']) + # Read the requirements from 'pyproject.toml' and 'environment.yml'. + pyproject = _load_pyproject() + install_requirements = [Requirement.parse(r) for r in pyproject['project']['dependencies']] + python_requires = Requirement.parse('python' + pyproject['project']['requires-python']) environment_yml = _load_environment_yml() try: @@ -199,24 +199,26 @@ def validate_environment_yml(): # pylint: disable=too-many-branches else: # Failed to find Python dependency specification raise DependencySpecificationError("Did not find specification of Python version in 'environment.yml'.") - # The Python version specified in 'setup.json' should be listed as trove classifiers. + # The Python version specified in 'pyproject.toml' should be listed as trove classifiers. for spec in conda_python_dependency.specifier: expected_classifier = 'Programming Language :: Python :: ' + spec.version - if expected_classifier not in setup_cfg['classifiers']: - raise DependencySpecificationError(f"Trove classifier '{expected_classifier}' missing from 'setup.json'.") + if expected_classifier not in pyproject['project']['classifiers']: + raise DependencySpecificationError( + f"Trove classifier '{expected_classifier}' missing from 'pyproject.toml'." + ) - # The Python version should be specified as supported in 'setup.json'. + # The Python version should be specified as supported in 'pyproject.toml'. if not any(spec.version >= other_spec.version for other_spec in python_requires.specifier): raise DependencySpecificationError( f"Required Python version {spec.version} from 'environment.yaml' is not consistent with " + - "required version in 'setup.json'." + "required version in 'pyproject.toml'." ) break else: raise DependencySpecificationError(f"Missing specifier: '{conda_python_dependency}'.") - # Check that all requirements specified in the setup.json file are found in the + # Check that all requirements specified in the pyproject.toml file are found in the # conda environment specification. for req in install_requirements: if any(re.match(ignore, str(req)) for ignore in CONDA_IGNORE): @@ -232,7 +234,7 @@ def validate_environment_yml(): # pylint: disable=too-many-branches if conda_dependencies: raise DependencySpecificationError( "The 'environment.yml' file contains dependencies that are missing " - "in 'setup.json':\n- {}".format('\n- '.join(map(str, conda_dependencies))) + "in 'pyproject.toml':\n- {}".format('\n- '.join(map(str, conda_dependencies))) ) click.secho('Conda dependency specification is consistent.', fg='green') @@ -246,7 +248,7 @@ def validate_all(ctx): Validates that the specification of requirements/dependencies is consistent across the following files: - - setup.json + - pyproject.toml - environment.yml """ @@ -268,7 +270,7 @@ def check_requirements(extras, github_annotate): # pylint disable: too-many-loc """Check the 'requirements/*.txt' files. Checks that the environments specified in the requirements files - match all the dependencies specified in 'setup.json. + match all the dependencies specified in 'pyproject.toml'. The arguments allow to specify which 'extra' requirements to expect. Use 'DEFAULT' to select 'atomic_tools', 'docs', 'notebook', 'rest', and 'tests'. @@ -278,11 +280,11 @@ def check_requirements(extras, github_annotate): # pylint disable: too-many-loc if len(extras) == 1 and extras[0] == 'DEFAULT': extras = ['atomic_tools', 'docs', 'notebook', 'rest', 'tests'] - # Read the requirements from 'setup.json' - setup_cfg = _load_setup_cfg() - install_requires = setup_cfg['install_requires'] + # Read the requirements from 'pyproject.toml'' + pyproject = _load_pyproject() + install_requires = pyproject['project']['dependencies'] for extra in extras: - install_requires.extend(setup_cfg['extras_require'][extra]) + install_requires.extend(pyproject['project']['optional-dependencies'][extra]) install_requires = set(parse_requirements(install_requires)) not_installed = defaultdict(list) @@ -301,12 +303,12 @@ def check_requirements(extras, github_annotate): # pylint disable: too-many-loc not_installed[dependency].append(fn_req) if any(not_installed.values()): - setup_json_linenos = _find_linenos_of_requirements_in_setup_json(not_installed) + setup_json_linenos = _find_linenos_of_requirements_in_pyproject(not_installed) # Format error message to be presented to user. - error_msg = ["The requirements/ files are missing dependencies specified in the 'setup.json' file.", ''] + error_msg = ["The requirements/ files are missing dependencies specified in the 'pyproject.toml' file.", ''] for dependency, fn_reqs in not_installed.items(): - src = 'setup.json:' + ','.join(str(lineno + 1) for lineno in setup_json_linenos[dependency]) + src = 'pyproject.toml' + ','.join(str(lineno + 1) for lineno in setup_json_linenos[dependency]) error_msg.append(f'{src}: No match for dependency `{dependency}` in:') for fn_req in sorted(fn_reqs): error_msg.append(f' - {fn_req.relative_to(ROOT)}') @@ -316,18 +318,18 @@ def check_requirements(extras, github_annotate): # pylint disable: too-many-loc print('::set-output name=error::' + '%0A'.join(error_msg)) if GITHUB_ACTIONS and github_annotate: - # Annotate the setup.json file with specific warnings. + # Annotate the pyproject.toml' file with specific warnings. for dependency, fn_reqs in not_installed.items(): for lineno in setup_json_linenos[dependency]: print( - f'::warning file=setup.json,line={lineno+1}::' + f'::warning file=pyproject.toml,line={lineno+1}::' f"No match for dependency '{dependency}' in: " + ','.join(str(fn_req.relative_to(ROOT)) for fn_req in fn_reqs) ) raise DependencySpecificationError('\n'.join(error_msg)) - click.secho("Requirements files appear to be in sync with specifications in 'setup.json'.", fg='green') + click.secho("Requirements files appear to be in sync with specifications in 'pyproject.toml''.", fg='green') @cli.command() @@ -343,15 +345,15 @@ def show_requirements(extras, fmt): This will show all reqiurements including *all* extras in Pipfile format. """ - # Read the requirements from 'setup.json' - setup_cfg = _load_setup_cfg() + # Read the requirements from 'pyproject.toml'' + pyproject = _load_pyproject() if 'all' in extras: - extras = list(setup_cfg['extras_require']) + extras = list(pyproject['project']['optional-dependencies']) - to_install = {Requirement.parse(r) for r in setup_cfg['install_requires']} + to_install = {Requirement.parse(r) for r in pyproject['project']['dependencies']} for key in extras: - to_install.update(Requirement.parse(r) for r in setup_cfg['extras_require'][key]) + to_install.update(Requirement.parse(r) for r in pyproject['project']['optional-dependencies'][key]) if fmt == 'pip': click.echo('\n'.join(sorted(map(str, to_install)))) @@ -373,12 +375,12 @@ def pip_install_extras(extras): This will install *only* the extra the requirements for docs, but without triggering the installation of the main installations requirements of the aiida-core package. """ - # Read the requirements from 'setup.json' - setup_cfg = _load_setup_cfg() + # Read the requirements from 'pyproject.toml'' + pyproject = _load_pyproject() to_install = set() for key in extras: - to_install.update(Requirement.parse(r) for r in setup_cfg['extras_require'][key]) + to_install.update(Requirement.parse(r) for r in pyproject['project']['optional-dependencies'][key]) cmd = [sys.executable, '-m', 'pip', 'install'] + [str(r) for r in to_install] subprocess.run(cmd, check=True) @@ -401,15 +403,12 @@ def identify_outdated(extras, pre_releases): specification must be loosened. """ - # Read the requirements from 'setup.json' - setup_cfg = _load_setup_cfg() - - if 'all' in extras: - extras = list(setup_cfg['extras_require']) + # Read the requirements from 'pyproject.toml'' + pyproject = _load_pyproject() - to_install = {Requirement.parse(r) for r in setup_cfg['install_requires']} + to_install = {Requirement.parse(r) for r in pyproject['project']['dependencies']} for key in extras: - to_install.update(Requirement.parse(r) for r in setup_cfg['extras_require'][key]) + to_install.update(Requirement.parse(r) for r in pyproject['project']['optional-dependencies'][key]) def get_package_data(name): req = requests.get(f'https://pypi.python.org/pypi/{name}/json') diff --git a/utils/requirements.txt b/utils/requirements.txt index 45058c6135..eb8351b949 100644 --- a/utils/requirements.txt +++ b/utils/requirements.txt @@ -2,3 +2,4 @@ click==7.1.2 packaging==20.3 pyyaml==5.4.1 requests==2.25.1 +tomli==2.0.0 diff --git a/utils/validate_consistency.py b/utils/validate_consistency.py index 42d3ebb808..6b8d9db572 100644 --- a/utils/validate_consistency.py +++ b/utils/validate_consistency.py @@ -10,59 +10,18 @@ ########################################################################### """Validate consistency of versions and dependencies. -Validates consistency of setup.json and +Validates consistency of + * pyproject.toml * environment.yml - * version in aiida/__init__.py """ -import collections -import json import os -import sys import click -FILENAME_TOML = 'pyproject.toml' -FILENAME_SETUP_JSON = 'setup.json' SCRIPT_PATH = os.path.split(os.path.realpath(__file__))[0] ROOT_DIR = os.path.join(SCRIPT_PATH, os.pardir) -FILEPATH_SETUP_JSON = os.path.join(ROOT_DIR, FILENAME_SETUP_JSON) -FILEPATH_TOML = os.path.join(ROOT_DIR, FILENAME_TOML) - - -def get_setup_json(): - """Return the `setup.json` as a python dictionary """ - with open(FILEPATH_SETUP_JSON, 'r', encoding='utf8') as fil: - return json.load(fil, object_pairs_hook=collections.OrderedDict) - - -def write_setup_json(data): - """Write the contents of `data` to the `setup.json`. - - If an exception is encountered during writing, the old content is restored. - - :param data: the dictionary to write to the `setup.json` - """ - backup = get_setup_json() - - try: - dump_setup_json(data) - except Exception: # pylint: disable=broad-except - dump_setup_json(backup) - - -def dump_setup_json(data): - """Write the contents of `data` to the `setup.json`. - - .. warning:: If the writing of the file excepts, the current file will be overwritten and will be left in an - incomplete state. To write with a backup safety use the `write_setup_json` function instead. - - :param data: the dictionary to write to the `setup.json` - """ - with open(FILEPATH_SETUP_JSON, 'w', encoding='utf8') as handle: - # Write with indentation of four spaces and explicitly define separators to not have spaces at end of lines - return json.dump(data, handle, indent=4, separators=(',', ': ')) def determine_block_positions(lines, block_start_marker, block_end_marker): @@ -185,33 +144,5 @@ def validate_verdi_documentation(): replace_block_in_file(filepath_verdi_commands, commands_block_start_marker, commands_block_end_marker, block) -@cli.command('version') -def validate_version(): - """Check that version numbers match. - - Check version number in setup.json and aiida-core/__init__.py and make sure they match. - """ - import pkgutil - - # Get version from python package - loaders = [ - module_loader for (module_loader, name, ispkg) in pkgutil.iter_modules(path=[ROOT_DIR]) - if name == 'aiida' and ispkg - ] - version = loaders[0].find_module('aiida').load_module('aiida').__version__ - - setup_content = get_setup_json() - if version != setup_content['version']: - click.echo('Version number mismatch detected:') - click.echo(f"Version number in '{FILENAME_SETUP_JSON}': {setup_content['version']}") - click.echo(f"Version number in 'aiida/__init__.py': {version}") - click.echo(f"Updating version in '{FILENAME_SETUP_JSON}' to: {version}") - - setup_content['version'] = version - write_setup_json(setup_content) - - sys.exit(1) - - if __name__ == '__main__': cli() # pylint: disable=no-value-for-parameter