diff --git a/.github/workflows/build_documentation.yml b/.github/workflows/build_documentation.yml deleted file mode 100644 index a0e60ecf30a..00000000000 --- a/.github/workflows/build_documentation.yml +++ /dev/null @@ -1,111 +0,0 @@ -name: Documentation Build - -on: [pull_request, workflow_dispatch] - -env: - # Following env vars when changed will "reset" the mentioned cache, - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number, if you go down (or repeat a previous value) - # you might end up reusing a previous cache if it haven't been deleted already. - # It applies 7 days retention policy by default. - RESET_EXAMPLES_CACHE: 3 - RESET_DOC_BUILD_CACHE: 3 - RESET_AUTOSUMMARY_CACHE: 3 - - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - docs-style: - name: "Check documentation style" - runs-on: ubuntu-latest - steps: - - name: "Check documentation style" - uses: ansys/actions/doc-style@v5 - with: - token: ${{ secrets.GITHUB_TOKEN }} - vale-config: "doc/.vale.ini" - vale-version: "2.29.6" - - docs_build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - - name: Update pip - run: | - pip install --upgrade pip - - - name: Install pyaedt - run: | - pip install .[doc] - - - name: Verify pyaedt can be imported - run: python -c "import pyaedt" - - - name: Retrieve PyAEDT version - id: version - run: | - echo "PYAEDT_VERSION=$(python -c 'from pyaedt import __version__; print(__version__)')" >> $GITHUB_OUTPUT - echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" - - # - name: Cache docs build directory - # uses: actions/cache@v3 - # with: - # path: doc/build - # key: doc-build-v${{ env.RESET_DOC_BUILD_CACHE }}-${{ steps.version.outputs.PYAEDT_VERSION }}-${{ github.sha }} - # restore-keys: | - # doc-build-v${{ env.RESET_DOC_BUILD_CACHE }}-${{ steps.version.outputs.PYAEDT_VERSION }} - # - name: Cache autosummary - # uses: actions/cache@v3 - # with: - # path: doc/source/**/_autosummary/*.rst - # key: autosummary-v${{ env.RESET_AUTOSUMMARY_CACHE }}-${{ steps.version.outputs.PYAEDT_VERSION }}-${{ github.sha }} - # restore-keys: | - # autosummary-v${{ env.RESET_AUTOSUMMARY_CACHE }}-${{ steps.version.outputs.PYAEDT_VERSION }} - - - name: Install doc build requirements - run: | - sudo apt update - sudo apt install graphviz texlive-latex-extra latexmk texlive-xetex texlive-fonts-extra -y - - # run doc build, without creating the examples directory - # note that we have to add the examples file here since it won't - # be created as gallery is disabled on linux. - - name: Documentation Build (HTML) - run: | - make -C doc clean - mkdir doc/source/examples -p - echo $'Examples\n========' > doc/source/examples/index.rst - make -C doc phtml-no-examples SPHINXOPTS="-j auto -w build_errors.txt -N" - - # Verify that sphinx generates no warnings - - name: Check for warnings - run: | - python doc/print_errors.py - - - name: Upload Documentation - uses: actions/upload-artifact@v3 - with: - name: Documentation - path: doc/_build/html - retention-days: 7 - - - name: Documentation Build (PDF) - run: | - make -C doc pdf-no-examples - - - name: Upload documentation PDF artifact - uses: actions/upload-artifact@v3 - with: - name: Documentation-pdf - path: doc/_build/latex/*.pdf - retention-days: 7 diff --git a/.github/workflows/ci_cd.yml b/.github/workflows/ci_cd.yml new file mode 100644 index 00000000000..e6c7f71fcf3 --- /dev/null +++ b/.github/workflows/ci_cd.yml @@ -0,0 +1,573 @@ +name: GitHub CI CD +on: + pull_request: + workflow_dispatch: + push: + tags: + - "*" + branches: + - main + +env: + ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} + MAIN_PYTHON_VERSION: '3.10' + PACKAGE_NAME: 'PyAEDT' + DOCUMENTATION_CNAME: 'aedt.docs.pyansys.com' + MEILISEARCH_API_KEY: ${{ secrets.MEILISEARCH_API_KEY }} + MEILISEARCH_HOST_URL: ${{ vars.MEILISEARCH_HOST_URL }} + MEILISEARCH_PUBLIC_API_KEY: ${{ secrets.MEILISEARCH_PUBLIC_API_KEY }} + ON_CI: True + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + + pr-title: + if: github.event_name == 'pull_request' + name: Check the title of the pull request + runs-on: ubuntu-latest + steps: + - name: Check commit name + uses: ansys/actions/commit-style@v6 + with: + token: ${{ secrets.GITHUB_TOKEN }} + use-upper-case: true + + # TODO: Update to ansys/actions/doc-style@v6 + doc-style: + name: Documentation style check + runs-on: ubuntu-latest + steps: + - name: Check documentation style + uses: ansys/actions/doc-style@v4 + with: + token: ${{ secrets.GITHUB_TOKEN }} + vale-config: "doc/.vale.ini" + vale-version: "2.29.6" + + smoke-tests: + name: Build wheelhouse and smoke tests + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest] + python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] + steps: + - name: Build wheelhouse and perform smoke test + uses: ansys/actions/build-wheelhouse@v4 + with: + library-name: ${{ env.PACKAGE_NAME }} + operating-system: ${{ matrix.os }} + python-version: ${{ matrix.python-version }} + target: 'all' + + - name: Import python package + run: | + python -c "import pyaedt; from pyaedt import __version__" + + # TODO: Update to ansys/actions/doc-build@v6 once we remove examples + doc-build: + name: Documentation build without examples + runs-on: ubuntu-latest + needs: [doc-style] + steps: + - name: Install Git and checkout project + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - name: Update pip + run: | + pip install --upgrade pip + + - name: Install pyaedt and documentation dependencies + run: | + pip install .[doc-no-examples] + + - name: Retrieve PyAEDT version + id: version + run: | + echo "PYAEDT_VERSION=$(python -c 'from pyaedt import __version__; print(__version__)')" >> $GITHUB_OUTPUT + echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" + + - name: Install doc build requirements + run: | + sudo apt update + sudo apt install graphviz texlive-latex-extra latexmk texlive-xetex texlive-fonts-extra -y + + # TODO: Update this step once pyaedt-examples is ready + - name: Build HTML documentation without examples + run: | + make -C doc clean + make -C doc html-no-examples + + # Verify that sphinx generates no warnings + - name: Check for warnings + run: | + python doc/print_errors.py + + - name: Upload HTML documentation without examples artifact + uses: actions/upload-artifact@v3 + with: + name: documentation-no-examples-html + path: doc/_build/html + retention-days: 7 + + - name: Build PDF documentation without examples + run: | + make -C doc pdf-no-examples + + - name: Upload PDF documentation without examples artifact + uses: actions/upload-artifact@v3 + with: + name: documentation-no-examples-pdf + path: doc/_build/latex/PyAEDT-Documentation-*.pdf + retention-days: 7 + +# # ================================================================================================= +# # vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv RUNNING ON SELF-HOSTED RUNNER vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv +# # ================================================================================================= + + doc-build-with-examples: + name: Documentation build with examples + if: github.event_name == 'push' && contains(github.ref, 'refs/tags') + runs-on: [ self-hosted, Windows, pyaedt ] + needs: [doc-style] + timeout-minutes: 720 + steps: + - name: Install Git and checkout project + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - name: Create virtual environment + run: | + python -m venv .venv + .venv\Scripts\Activate.ps1 + python -m pip install pip -U + python -m pip install wheel setuptools -U + python -c "import sys; print(sys.executable)" + + - name: Install pyaedt and documentation dependencies + run: | + .venv\Scripts\Activate.ps1 + pip install .[doc] + + - name: Retrieve PyAEDT version + id: version + run: | + .venv\Scripts\Activate.ps1 + echo "PYAEDT_VERSION=$(python -c 'from pyaedt import __version__; print(__version__)')" >> $GITHUB_OUTPUT + echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" + + - name: Install CI dependencies (e.g. vtk-osmesa) + run: | + .venv\Scripts\Activate.ps1 + # Uninstall conflicting dependencies + pip uninstall --yes vtk + pip install --extra-index-url https://wheels.vtk.org vtk-osmesa==9.2.20230527.dev0 + + # TODO: Update this step once pyaedt-examples is ready + # NOTE: Use environment variable to keep the doctree and avoid redundant build for PDF pages + - name: Build HTML documentation with examples + env: + SPHINXBUILD_KEEP_DOCTREEDIR: "1" + run: | + .venv\Scripts\Activate.ps1 + .\doc\make.bat clean + .\doc\make.bat html + + # TODO: Keeping this commented as reminder of https://github.com/ansys/pyaedt/issues/4296 + # # Verify that sphinx generates no warnings + # - name: Check for warnings + # run: | + # .venv\Scripts\Activate.ps1 + # python doc/print_errors.py + + # Use environment variable to remove the doctree after the build of PDF pages + - name: Build PDF documentation with examples + env: + SPHINXBUILD_KEEP_DOCTREEDIR: "0" + run: | + .venv\Scripts\Activate.ps1 + .\doc\make.bat pdf + + - name: Add assets to HTML docs + run: | + zip -r documentation-html.zip ./doc/_build/html + mv documentation-html.zip ./doc/_build/html/_static/assets/download/ + cp doc/_build/latex/PyAEDT-Documentation-*.pdf ./doc/_build/html/_static/assets/download/pyaedt.pdf + + - name: Upload HTML documentation with examples artifact + uses: actions/upload-artifact@v3 + with: + name: documentation-html + path: doc/_build/html + retention-days: 7 + + - name: Upload PDF documentation without examples artifact + uses: actions/upload-artifact@v3 + with: + name: documentation-pdf + path: doc/_build/latex/PyAEDT-Documentation-*.pdf + retention-days: 7 + +# # ================================================================================================= +# # vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv RUNNING ON SELF-HOSTED RUNNER vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv +# # ================================================================================================= + + test-solvers-windows: + name: Testing solvers and coverage (Windows) + needs: [smoke-tests] + runs-on: [ self-hosted, Windows, pyaedt ] + steps: + - name: Install Git and checkout project + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - name: Create virtual environment + run: | + python -m venv .venv + .venv\Scripts\Activate.ps1 + python -m pip install pip -U + python -m pip install wheel setuptools -U + python -c "import sys; print(sys.executable)" + + - name: Install pyaedt and tests dependencies + run: | + .venv\Scripts\Activate.ps1 + pip install .[tests] + pip install pytest-azurepipelines + + - name: Install CI dependencies (e.g. vtk-osmesa) + run: | + .venv\Scripts\Activate.ps1 + # Uninstall conflicting dependencies + pip uninstall --yes vtk + pip install --extra-index-url https://wheels.vtk.org vtk-osmesa==9.2.20230527.dev0 + + - name: Run tests on _unittest_solvers + env: + PYTHONMALLOC: malloc + run: | + .venv\Scripts\Activate.ps1 + pytest --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest_solvers + + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + name: codecov-system-solver-tests + file: ./coverage.xml + flags: system,solver + + - name: Upload pytest test results + uses: actions/upload-artifact@v3 + with: + name: pytest-solver-results + path: junit/test-results.xml + if: ${{ always() }} + +# # ================================================================================================= +# # vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv RUNNING ON SELF-HOSTED RUNNER vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv +# # ================================================================================================= + + # TODO: Si if we can use ansys/actions + test-solvers-linux: + name: Testing solvers and coverage (Linux) + needs: [smoke-tests] + runs-on: [ self-hosted, Linux, pyaedt ] + env: + ANSYSEM_ROOT241: '/opt/AnsysEM/v241/Linux64' + ANS_NODEPCHECK: '1' + steps: + - name: Install Git and checkout project + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - name: Create virtual environment + run: | + export LD_LIBRARY_PATH=${{ env.ANSYSEM_ROOT241 }}/common/mono/Linux64/lib64:${{ env.ANSYSEM_ROOT241 }}/Delcross:$LD_LIBRARY_PATH + python -m venv .venv + source .venv/bin/activate + python -m pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org pip -U + python -m pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org wheel setuptools -U + python -c "import sys; print(sys.executable)" + + - name: Install pyaedt and tests dependencies + run: | + export LD_LIBRARY_PATH=${{ env.ANSYSEM_ROOT241 }}/common/mono/Linux64/lib64:${{ env.ANSYSEM_ROOT241 }}/Delcross:$LD_LIBRARY_PATH + source .venv/bin/activate + pip install .[tests] + pip install pytest-azurepipelines + + - name: Run tests on _unittest_solvers + run: | + export LD_LIBRARY_PATH=${{ env.ANSYSEM_ROOT241 }}/common/mono/Linux64/lib64:${{ env.ANSYSEM_ROOT241 }}/Delcross:$LD_LIBRARY_PATH + source .venv/bin/activate + pytest --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest_solvers + + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + name: codecov-system-solver-tests + file: ./coverage.xml + flags: system,solver + + - name: Upload pytest test results + uses: actions/upload-artifact@v3 + with: + name: pytest-solver-results + path: junit/test-results.xml + if: ${{ always() }} + +# # ================================================================================================= +# # vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv RUNNING ON SELF-HOSTED RUNNER vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv +# # ================================================================================================= + + test-windows: + name: Testing and coverage (Windows) + needs: [smoke-tests] + runs-on: [ self-hosted, Windows, pyaedt ] + steps: + - name: Install Git and checkout project + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - name: Create virtual environment + run: | + python -m venv .venv + .venv\Scripts\Activate.ps1 + python -m pip install pip -U + python -m pip install wheel setuptools -U + python -c "import sys; print(sys.executable)" + + - name: Install pyaedt and tests dependencies + run: | + .venv\Scripts\Activate.ps1 + pip install .[tests] + pip install pytest-azurepipelines + + - name: Install CI dependencies (e.g. vtk-osmesa) + run: | + .venv\Scripts\Activate.ps1 + # Uninstall conflicting dependencies + pip uninstall --yes vtk + pip install --extra-index-url https://wheels.vtk.org vtk-osmesa==9.2.20230527.dev0 + + - name: Run tests on _unittest + uses: nick-fields/retry@v3 + env: + PYTHONMALLOC: malloc + with: + max_attempts: 2 + retry_on: error + timeout_minutes: 50 + command: | + .venv\Scripts\Activate.ps1 + pytest -n 4 --dist loadfile --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest + + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + name: codecov-system-tests + file: ./coverage.xml + flags: system + + - name: Upload pytest test results + uses: actions/upload-artifact@v3 + with: + name: pytest-results + path: junit/test-results.xml + if: ${{ always() }} + +# # ================================================================================================= +# # vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv RUNNING ON SELF-HOSTED RUNNER vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv +# # ================================================================================================= + + # TODO: Si if we can use ansys/actions + test-linux: + name: Testing and coverage (Linux) + needs: [smoke-tests] + runs-on: [ self-hosted, Linux, pyaedt ] + env: + ANSYSEM_ROOT241: '/opt/AnsysEM/v241/Linux64' + ANS_NODEPCHECK: '1' + steps: + - name: Install Git and checkout project + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - name: Create virtual environment + run: | + export LD_LIBRARY_PATH=${{ env.ANSYSEM_ROOT241 }}/common/mono/Linux64/lib64:${{ env.ANSYSEM_ROOT241 }}/Delcross:$LD_LIBRARY_PATH + python -m venv .venv + source .venv/bin/activate + python -m pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org pip -U + python -m pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org wheel setuptools -U + python -c "import sys; print(sys.executable)" + + - name: Install pyaedt and tests dependencies + run: | + export LD_LIBRARY_PATH=${{ env.ANSYSEM_ROOT241 }}/common/mono/Linux64/lib64:${{ env.ANSYSEM_ROOT241 }}/Delcross:$LD_LIBRARY_PATH + source .venv/bin/activate + pip install .[tests] + pip install pytest-azurepipelines + + - name: Install CI dependencies (e.g. vtk-osmesa) + run: | + source .venv/bin/activate + # Uninstall conflicting dependencies + pip uninstall --yes vtk + pip install --extra-index-url https://wheels.vtk.org vtk-osmesa==9.2.20230527.dev0 + + - name: Run tests on _unittest + uses: nick-fields/retry@v3 + with: + max_attempts: 2 + retry_on: error + timeout_minutes: 50 + command: | + export LD_LIBRARY_PATH=${{ env.ANSYSEM_ROOT241 }}/common/mono/Linux64/lib64:${{ env.ANSYSEM_ROOT241 }}/Delcross:$LD_LIBRARY_PATH + source .venv/bin/activate + pytest -n 4 --dist loadfile --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest + + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + name: codecov-system-solver-tests + file: ./coverage.xml + flags: system,solver + + - name: Upload pytest test results + uses: actions/upload-artifact@v3 + with: + name: pytest-solver-results + path: junit/test-results.xml + if: ${{ always() }} + +# # ================================================================================================= +# # vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv RUNNING ON SELF-HOSTED RUNNER vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv +# # ================================================================================================= + + test-ironpython-windows: + name: Testing IronPython and coverage (Windows) + needs: [smoke-tests] + runs-on: [ self-hosted, Windows, pyaedt ] + steps: + - uses: actions/checkout@v4 + + - name: Run Ironpython tests + timeout-minutes: 5 + run: | + $processA = start-process 'cmd' -ArgumentList '/c .\_unittest_ironpython\run_unittests_batchmode.cmd' -PassThru + $processA.WaitForExit() + + - name: Get log content + run: | + get-content .\_unittest_ironpython\pyaedt_unit_test_ironpython.log + + - name: Check for errors + run: | + $test_errors_failures = Select-String -Path .\_unittest_ironpython\pyaedt_unit_test_ironpython.log -Pattern "TextTestResult errors=" + if ($test_errors_failures -ne $null) + { + exit 1 + } + + package: + name: Package library + needs: [test-windows, test-solvers-windows, test-ironpython-windows, test-linux, test-solvers-linux, doc-build] + runs-on: ubuntu-latest + steps: + - name: Build library source and wheel artifacts + uses: ansys/actions/build-library@v4 + with: + library-name: ${{ env.PACKAGE_NAME }} + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + # TODO: Si if we can fix the PDF issue and leverage classic ansys/release-github + release: + name: Release project + if: github.event_name == 'push' && contains(github.ref, 'refs/tags') + needs: [package, doc-build-with-examples] + runs-on: ubuntu-latest + steps: + - name: Release to the public PyPI repository + uses: ansys/actions/release-pypi-public@v4 + with: + library-name: ${{ env.PACKAGE_NAME }} + twine-username: "__token__" + twine-token: ${{ secrets.PYPI_TOKEN }} + + - name: Release to GitHub + uses: ansys/actions/release-github@v4 + with: + library-name: ${{ env.PACKAGE_NAME }} + + upload-release-doc: + name: Upload release documentation + if: github.event_name == 'push' && contains(github.ref, 'refs/tags') + runs-on: ubuntu-latest + needs: [release] + steps: + - name: Deploy the stable documentation + uses: ansys/actions/doc-deploy-stable@v4 + with: + cname: ${{ env.DOCUMENTATION_CNAME }} + token: ${{ secrets.GITHUB_TOKEN }} + doc-artifact-name: 'documentation-html' + + doc-index-stable: + name: Deploy stable docs index + if: github.event_name == 'push' && contains(github.ref, 'refs/tags') + runs-on: ubuntu-latest + needs: upload-release-doc + steps: + - name: Install Git and clone project + uses: actions/checkout@v4 + + - name: Install the package requirements + run: pip install -e . + + - name: Get the version to PyMeilisearch + run: | + VERSION=$(python -c "from pyaedt import __version__; print('.'.join(__version__.split('.')[:2]))") + VERSION_MEILI=$(python -c "from pyaedt import __version__; print('-'.join(__version__.split('.')[:2]))") + echo "Calculated VERSION: $VERSION" + echo "Calculated VERSION_MEILI: $VERSION_MEILI" + echo "VERSION=$VERSION" >> $GITHUB_ENV + echo "VERSION_MEILI=$VERSION_MEILI" >> $GITHUB_ENV + + - name: Deploy the latest documentation index + uses: ansys/actions/doc-deploy-index@v4 + with: + cname: ${{ env.DOCUMENTATION_CNAME }}/version/${{ env.VERSION }} + index-name: pyaedt-v${{ env.VERSION_MEILI }} + host-url: ${{ env.MEILISEARCH_HOST_URL }} + api-key: ${{ env.MEILISEARCH_API_KEY }} + python-version: ${{ env.MAIN_PYTHON_VERSION }} diff --git a/.github/workflows/cpython_linux.yml b/.github/workflows/cpython_linux.yml deleted file mode 100644 index 5d9313e5765..00000000000 --- a/.github/workflows/cpython_linux.yml +++ /dev/null @@ -1,92 +0,0 @@ -name: Linux_CPython_UnitTests - -env: - ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} - python.version: '3.10' - python.venv: 'testvenv' - # Following env vars when changed will "reset" the mentioned cache, - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number, if you go down (or repeat a previous value) - # you might end up reusing a previous cache if it haven't been deleted already. - # It applies 7 days retention policy by default. - RESET_PIP_CACHE: 0 - PACKAGE_NAME: PyAEDT - - -on: - workflow_dispatch: - inputs: - logLevel: - description: 'Log level' - required: true - default: 'warning' - tags: - description: 'Linux CPython daily' - schedule: # UTC at 0100 - - cron: '0 1 * * *' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - test: - runs-on: [Linux, pyaedt] - strategy: - matrix: - python-version: [ '3.10' ] - steps: - - uses: actions/checkout@v3 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - architecture: 'x86' - - - name: 'Install pyaedt' - run: | - python -m venv .pyaedt_test_env - export ANSYSEM_ROOT241=/apps/AnsysEM/v241/Linux64 - export LD_LIBRARY_PATH=$ANSYSEM_ROOT241/common/mono/Linux64/lib64:$LD_LIBRARY_PATH - source .pyaedt_test_env/bin/activate - python -m pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org pip -U - python -m pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org wheel setuptools -U - python -c "import sys; print(sys.executable)" - pip install .[tests] - pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org pytest-azurepipelines - python -c "import pyaedt; print('Imported pyaedt')" - - - name: 'Unit testing' - uses: nick-fields/retry@v3 - with: - max_attempts: 3 - retry_on: error - timeout_minutes: 60 - command: | - export ANS_NODEPCHECK=1 - export ANSYSEM_ROOT241=/apps/AnsysEM/v241/Linux64 - export LD_LIBRARY_PATH=$ANSYSEM_ROOT241/common/mono/Linux64/lib64:$LD_LIBRARY_PATH - source .pyaedt_test_env/bin/activate - pytest --tx 6*popen --durations=50 --dist loadfile -v _unittest - - - name: 'Unit testing Solvers' - continue-on-error: true - uses: nick-fields/retry@v3 - with: - max_attempts: 3 - retry_on: error - timeout_minutes: 60 - command: | - export ANS_NODEPCHECK=1 - export ANSYSEM_ROOT241=/apps/AnsysEM/v241/Linux64 - export LD_LIBRARY_PATH=$ANSYSEM_ROOT241/common/mono/Linux64/lib64:$LD_LIBRARY_PATH - source .pyaedt_test_env/bin/activate - pytest --tx 2*popen --durations=50 --dist loadfile -v _unittest_solvers - - - name: Upload pytest test results - uses: actions/upload-artifact@v4 - with: - name: pytest-results - path: junit/test-results.xml - if: ${{ always() }} diff --git a/.github/workflows/full_documentation.yml b/.github/workflows/full_documentation.yml deleted file mode 100644 index 0a114fa3635..00000000000 --- a/.github/workflows/full_documentation.yml +++ /dev/null @@ -1,155 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: FullDocumentation - -env: - ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} - python.version: '3.10' - python.venv: 'testvenv' - DOCUMENTATION_CNAME: 'aedt.docs.pyansys.com' - MEILISEARCH_API_KEY: ${{ secrets.MEILISEARCH_API_KEY }} - MEILISEARCH_HOST_URL: https://backend.search.pyansys.com - MEILISEARCH_PUBLIC_API_KEY: ${{ secrets.MEILISEARCH_PUBLIC_API_KEY }} -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - push: - tags: - - v* - workflow_dispatch: - inputs: - logLevel: - description: 'Log level' - required: true - default: 'warning' - tags: - description: 'Test scenario tags' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - full_documentation: - # The type of runner that the job will run on - name: full_documentation - runs-on: [Windows, self-hosted, pyaedt] - timeout-minutes: 720 - strategy: - matrix: - python-version: ['3.10'] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: 'Create virtual env' - run: | - python -m venv testenv - testenv\Scripts\Activate.ps1 - python -m pip install pip -U - python -m pip install wheel setuptools -U - python -c "import sys; print(sys.executable)" - - - name: 'Install pyaedt' - run: | - testenv\Scripts\Activate.ps1 - pip install .[doc] - Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv\Lib\site-packages\vtkmodules" -Force - - - name: Retrieve PyAEDT version - id: version - run: | - testenv\Scripts\Activate.ps1 - echo "PYAEDT_VERSION=$(python -c 'from pyaedt import __version__; print(__version__)')" >> $GITHUB_OUTPUT - echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" - - - name: Create HTML Documentations - run: | - testenv\Scripts\Activate.ps1 - sphinx-build -j auto --color -b html -a doc/source doc/_build/html - -# - name: Create PDF Documentations -# run: | -# testenv\Scripts\Activate.ps1 -# .\doc\make.bat pdf - - - name: Upload HTML documentation artifact - uses: actions/upload-artifact@v3 - with: - name: documentation-html - path: doc/_build/html - retention-days: 7 - -# - name: Upload PDF documentation artifact -# uses: actions/upload-artifact@v4 -# with: -# name: documentation-pdf -# path: doc/_build/pdf -# retention-days: 7 - -# - name: Release -# uses: softprops/action-gh-release@v1 -# if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') -# with: -# generate_release_notes: true -# files: | -# doc/_build/pdf - - doc-deploy-stable: - name: Deploy stable documentation - runs-on: ubuntu-latest - needs: full_documentation - if: github.event_name == 'push' && contains(github.ref, 'refs/tags') - steps: - - name: Deploy the stable documentation - uses: ansys/actions/doc-deploy-stable@v4 - with: - cname: ${{ env.DOCUMENTATION_CNAME }} - token: ${{ secrets.GITHUB_TOKEN }} - python-version: ${{ matrix.python-version }} - - - doc-index-stable: - name: "Deploy stable docs index" - if: github.event_name == 'push' && contains(github.ref, 'refs/tags') - runs-on: ubuntu-latest - needs: doc-deploy-stable - - steps: - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.MAIN_PYTHON_VERSION }} - - - uses: actions/checkout@v4 - - - uses: actions/download-artifact@v3 - - - name: Display structure of downloaded files - run: ls -R - - - name: Install the package requirements - run: pip install -e . - - - name: Get the version to PyMeilisearch - run: | - VERSION=$(python -c "from pyaedt import __version__; print('.'.join(__version__.split('.')[:2]))") - VERSION_MEILI=$(python -c "from pyaedt import __version__; print('-'.join(__version__.split('.')[:2]))") - echo "Calculated VERSION: $VERSION" - echo "Calculated VERSION_MEILI: $VERSION_MEILI" - echo "VERSION=$VERSION" >> $GITHUB_ENV - echo "VERSION_MEILI=$VERSION_MEILI" >> $GITHUB_ENV - - - name: "Deploy the stable documentation index for PyAEDT API" - uses: ansys/actions/doc-deploy-index@v4 - with: - cname: ${{ env.DOCUMENTATION_CNAME }}/version/${{ env.VERSION }} - index-name: pyaedt-v${{ env.VERSION_MEILI }} - host-url: ${{ vars.MEILISEARCH_HOST_URL }} - api-key: ${{ env.MEILISEARCH_API_KEY }} diff --git a/.github/workflows/ironpython.yml b/.github/workflows/ironpython.yml deleted file mode 100644 index 0fb334beaaa..00000000000 --- a/.github/workflows/ironpython.yml +++ /dev/null @@ -1,42 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: CI_Ironpython - -env: - ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} - -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - pull_request: - branches: [ main ] - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: [Windows, self-hosted, pyaedt] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - - name: 'Run Unit Tests in Ironpython' - timeout-minutes: 60 - run: | - $processA = start-process 'cmd' -ArgumentList '/c .\_unittest_ironpython\run_unittests_batchmode.cmd' -PassThru - $processA.WaitForExit() - get-content .\_unittest_ironpython\pyaedt_unit_test_ironpython.log - $test_errors_failures = Select-String -Path .\_unittest_ironpython\pyaedt_unit_test_ironpython.log -Pattern "TextTestResult errors=" - if ($test_errors_failures -ne $null) - { - exit 1 - } - else - { - exit 0 - } diff --git a/.github/workflows/nightly-docs.yml b/.github/workflows/nightly-docs.yml index a3c8aec00c0..9c9ccb30c6b 100644 --- a/.github/workflows/nightly-docs.yml +++ b/.github/workflows/nightly-docs.yml @@ -7,6 +7,7 @@ on: env: ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} + MAIN_PYTHON_VERSION: '3.10' DOCUMENTATION_CNAME: 'aedt.docs.pyansys.com' MEILISEARCH_API_KEY: ${{ secrets.MEILISEARCH_API_KEY }} MEILISEARCH_PUBLIC_API_KEY: ${{ secrets.MEILISEARCH_PUBLIC_API_KEY }} @@ -16,43 +17,50 @@ concurrency: cancel-in-progress: true jobs: - docs_build: + doc-build: + name: Documentation build without examples runs-on: ubuntu-latest - + needs: [doc-style] steps: - - uses: actions/checkout@v4 + - name: Install Git and checkout project + uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v5 with: - python-version: "3.10" + python-version: ${{ env.MAIN_PYTHON_VERSION }} - - name: Install pyaedt + - name: Update pip run: | - pip install . + pip install --upgrade pip - - name: Install doc build requirements + - name: Install pyaedt and documentation dependencies + run: | + pip install .[doc-no-examples] + + - name: Retrieve PyAEDT version + id: version run: | - pip install .[doc] + echo "PYAEDT_VERSION=$(python -c 'from pyaedt import __version__; print(__version__)')" >> $GITHUB_OUTPUT + echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" - name: Install doc build requirements run: | sudo apt update sudo apt install graphviz texlive-latex-extra latexmk texlive-xetex texlive-fonts-extra -y - - name: Documentation Build (HTML) + # TODO: Update this step once pyaedt-examples is ready + - name: Build HTML documentation without examples run: | make -C doc clean - mkdir doc/source/examples -p - echo $'Examples\n========' > doc/source/examples/index.rst - make -C doc phtml-no-examples SPHINXOPTS="-j auto -w build_errors.txt -N" + make -C doc html-no-examples # Verify that sphinx generates no warnings - name: Check for warnings run: | python doc/print_errors.py - - name: Documentation Build (PDF) + - name: Build PDF documentation without examples run: | make -C doc pdf-no-examples @@ -62,84 +70,42 @@ jobs: mv documentation-html.zip ./doc/_build/html/_static/assets/download/ cp doc/_build/latex/PyAEDT-Documentation-*.pdf ./doc/_build/html/_static/assets/download/pyaedt.pdf - - name: Upload documentation HTML artifact + - name: Upload HTML documentation without examples artifact uses: actions/upload-artifact@v3 with: - name: documentation-html + name: documentation-no-examples-html path: doc/_build/html retention-days: 7 - - name: Upload documentation PDF artifact + - name: Upload PDF documentation without examples artifact uses: actions/upload-artifact@v3 with: - name: Documentation-pdf - path: doc/_build/latex/*.pdf + name: documentation-pdf + path: doc/_build/latex/PyAEDT-Documentation-*.pdf retention-days: 7 - docs_upload: - needs: docs_build + upload-dev-doc: + name: Upload dev documentation runs-on: ubuntu-latest + needs: [doc-build] steps: - name: Upload development documentation uses: ansys/actions/doc-deploy-dev@v4 with: cname: ${{ env.DOCUMENTATION_CNAME }} token: ${{ secrets.GITHUB_TOKEN }} + doc-artifact-name: 'documentation-no-examples-html' doc-index-dev: - name: "Deploy dev docs index" + name: Deploy dev index docs runs-on: ubuntu-latest - needs: docs_upload + needs: upload-dev-doc steps: - - uses: actions/checkout@v4 - - - uses: actions/download-artifact@v3 - - - name: Display structure of downloaded files - run: ls -R - - - name: "Deploy the dev documentation index for PyAEDT API" + - name: Deploy the latest documentation index uses: ansys/actions/doc-deploy-index@v4 with: cname: ${{ env.DOCUMENTATION_CNAME }}/version/dev index-name: pyaedt-vdev - host-url: ${{ vars.MEILISEARCH_HOST_URL }} + host-url: ${{ env.MEILISEARCH_HOST_URL }} api-key: ${{ env.MEILISEARCH_API_KEY }} - - # docstring_testing: - # runs-on: Windows - - # steps: - # - uses: actions/checkout@v4 - - # - name: Setup Python - # uses: actions/setup-python@v2 - # with: - # python-version: 3.8 - - # - name: 'Create virtual env' - # run: | - # python -m venv testenv - # testenv\Scripts\Activate.ps1 - # python -m pip install pip -U - # python -m pip install wheel setuptools -U - # python -c "import sys; print(sys.executable)" - - # - name: 'Install pyaedt' - # run: | - # testenv\Scripts\Activate.ps1 - # pip install . --use-feature=in-tree-build - # cd _unittest - # python -c "import pyaedt; print('Imported pyaedt')" - - # - name: Install testing requirements - # run: | - # testenv\Scripts\Activate.ps1 - # pip install -r requirements/requirements_test.txt - # pip install pytest-azurepipelines - - # - name: Docstring testing - # run: | - # testenv\Scripts\Activate.ps1 - # pytest -v pyaedt/desktop.py pyaedt/icepak.py - # pytest -v pyaedt/desktop.py pyaedt/hfss.py + python-version: ${{ env.MAIN_PYTHON_VERSION }} diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml deleted file mode 100644 index 473ec840b8c..00000000000 --- a/.github/workflows/unit_tests.yml +++ /dev/null @@ -1,173 +0,0 @@ -name: CI - -env: - ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} - python.version: '3.10' - python.venv: 'testvenv' - # Following env vars when changed will "reset" the mentioned cache, - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number, if you go down (or repeat a previous value) - # you might end up reusing a previous cache if it hasn't been deleted already. - # It applies 7 days retention policy by default. - RESET_PIP_CACHE: 0 - PACKAGE_NAME: PyAEDT -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - push: - tags: - - 'v*' - branches: - - main - pull_request: - branches: [ main ] - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build_solvers: - # The type of runner that the job will run on - runs-on: [Windows, self-hosted, pyaedt] - strategy: - matrix: - python-version: [ '3.10' ] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: 'Create virtual env' - run: | - Remove-Item D:\Temp\* -Recurse -Force -ErrorAction SilentlyContinue - python -m venv testenv_s - testenv_s\Scripts\Activate.ps1 - python -m pip install pip -U - python -m pip install wheel setuptools -U - python -c "import sys; print(sys.executable)" - - - name: 'Install pyaedt' - run: | - testenv_s\Scripts\Activate.ps1 - pip install . - pip install .[tests] - pip install pytest-azurepipelines - Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv_s\Lib\site-packages\vtkmodules" -Force - mkdir tmp - cd tmp - python -c "import pyaedt; print('Imported pyaedt')" - - # - name: "Check licences of packages" - # uses: pyansys/pydpf-actions/check-licenses@v2.0 - - - name: 'Unit testing' - uses: nick-fields/retry@v3 - with: - max_attempts: 1 - retry_on: error - timeout_minutes: 40 - command: | - testenv_s\Scripts\Activate.ps1 - Set-Item -Path env:PYTHONMALLOC -Value "malloc" - pytest --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest_solvers - - - uses: codecov/codecov-action@v4 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - with: - name: 'Upload coverage to Codecov' - - - name: Upload pytest test results - uses: actions/upload-artifact@v4 - with: - name: pytest-solver-results - path: junit/test-results.xml - # Use always() to always run this step to publish test results when there are test failures - if: ${{ always() }} - - - build: - # The type of runner that the job will run on - runs-on: [Windows, self-hosted, pyaedt] - strategy: - matrix: - python-version: ['3.10'] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: 'Create virtual env' - run: | - Remove-Item D:\Temp\* -Recurse -Force -ErrorAction SilentlyContinue - python -m venv testenv - testenv\Scripts\Activate.ps1 - python -m pip install pip -U - python -m pip install wheel setuptools -U - python -c "import sys; print(sys.executable)" - - - name: 'Install pyaedt' - run: | - testenv\Scripts\Activate.ps1 - pip install . - pip install .[tests] - pip install pytest-azurepipelines - Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv\Lib\site-packages\vtkmodules" -Force - mkdir tmp - cd tmp - python -c "import pyaedt; print('Imported pyaedt')" - - # - name: "Check licences of packages" - # uses: pyansys/pydpf-actions/check-licenses@v2.0 - - - name: 'Unit testing' - uses: nick-fields/retry@v3 - with: - max_attempts: 2 - retry_on: error - timeout_minutes: 50 - command: | - testenv\Scripts\Activate.ps1 - Set-Item -Path env:PYTHONMALLOC -Value "malloc" - pytest -n 4 --dist loadfile --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest - - - uses: codecov/codecov-action@v4 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - with: - name: 'Upload coverage to Codecov' - - - name: Upload pytest test results - uses: actions/upload-artifact@v4 - with: - name: pytest-results - path: junit/test-results.xml - # Use always() to always run this step to publish test results when there are test failures - if: ${{ always() }} - - - name: 'Build and validate source distribution' - run: | - testenv\Scripts\Activate.ps1 - python -m pip install build twine - python -m build - python -m twine check dist/* - - - name: "Builds and uploads to PyPI" - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') - run: | - testenv\Scripts\Activate.ps1 - python setup.py sdist - python -m pip install twine - python -m twine upload --skip-existing dist/* - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/unit_tests_solvers.bkp b/.github/workflows/unit_tests_solvers.bkp deleted file mode 100644 index 4d0691a5dab..00000000000 --- a/.github/workflows/unit_tests_solvers.bkp +++ /dev/null @@ -1,103 +0,0 @@ -name: CI_Solvers - -env: - python.version: '3.10' - python.venv: 'testvenv' - # Following env vars when changed will "reset" the mentioned cache, - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number, if you go down (or repeat a previous value) - # you might end up reusing a previous cache if it hasn't been deleted already. - # It applies 7 days retention policy by default. - RESET_PIP_CACHE: 0 - PACKAGE_NAME: PyAEDT -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - push: - tags: - - 'v*' - branches: - - main - pull_request: - branches: [ main ] - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: [Windows, self-hosted, pyaedt] - strategy: - matrix: - python-version: ['3.10'] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: 'Create virtual env' - run: | - Remove-Item D:\Temp\* -Recurse -Force - python -m venv testenv_s - testenv_s\Scripts\Activate.ps1 - python -m pip install pip -U - python -m pip install wheel setuptools -U - python -c "import sys; print(sys.executable)" - - - name: 'Install pyaedt' - run: | - testenv_s\Scripts\Activate.ps1 - pip install . - pip install .[tests] - pip install pytest-azurepipelines - Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv_s\Lib\site-packages\vtkmodules" -Force - mkdir tmp - cd tmp - python -c "import pyaedt; print('Imported pyaedt')" - - # - name: "Check licences of packages" - # uses: pyansys/pydpf-actions/check-licenses@v2.0 - - - name: 'Unit testing' - timeout-minutes: 40 - run: | - testenv_s\Scripts\Activate.ps1 - Set-Item -Path env:PYTHONMALLOC -Value "malloc" - pytest --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest_solvers - - - uses: codecov/codecov-action@v3 - if: matrix.python-version == '3.10' - name: 'Upload coverage to Codecov' - - - name: Upload pytest test results - uses: actions/upload-artifact@v3 - with: - name: pytest-results - path: junit/test-results.xml - # Use always() to always run this step to publish test results when there are test failures - if: ${{ always() }} - - - name: 'Build and validate source distribution' - run: | - testenv_s\Scripts\Activate.ps1 - python -m pip install build twine - python -m build - python -m twine check dist/* - - - name: "Builds and uploads to PyPI" - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') - run: | - testenv_s\Scripts\Activate.ps1 - python setup.py sdist - python -m pip install twine - python -m twine upload --skip-existing dist/* - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/wheelhouse.yml b/.github/workflows/wheelhouse.yml deleted file mode 100644 index 600b1c73e9c..00000000000 --- a/.github/workflows/wheelhouse.yml +++ /dev/null @@ -1,90 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: WheelHouse - -env: - python.venv: 'testvenv' - # Following env vars when changed will "reset" the mentioned cache, - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number, if you go down (or repeat a previous value) - # you might end up reusing a previous cache if it haven't been deleted already. - # It applies 7 days retention policy by default. - RESET_PIP_CACHE: 0 - PACKAGE_NAME: PyAEDT -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - push: - tags: - - 'v*' - - v* - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: [windows-latest] - strategy: - matrix: - python-version: [ 3.7, 3.8, 3.9, '3.10'] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: 'Create virtual env' - run: | - python -m venv testenv - testenv\Scripts\Activate.ps1 - python -m pip install pip -U - python -m pip install wheel setuptools -U - python -c "import sys; print(sys.executable)" - pip install .[all,dotnet] - pip install jupyterlab - - - - name: Retrieve PyAEDT version - run: | - testenv\Scripts\Activate.ps1 - echo "PYAEDT_VERSION=$(python -c 'from pyaedt import __version__; print(__version__)')" >> $GITHUB_OUTPUT - echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" - id: version - - - name: Generate wheelhouse - run: | - testenv\Scripts\Activate.ps1 - $packages=$(pip freeze) - # Iterate over the packages and generate wheels - foreach ($package in $packages) { - echo "Generating wheel for $package" - pip wheel "$package" -w wheelhouse - } - - - name: Zip wheelhouse - uses: vimtor/action-zip@v1 - with: - files: wheelhouse - dest: ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-${{ runner.os }}-${{ matrix.python-version }}.zip - - - name: Upload Wheelhouse - uses: actions/upload-artifact@v4 - with: - name: ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-${{ runner.os }}-${{ matrix.python-version }} - path: '*.zip' - retention-days: 7 - - - name: Release - uses: softprops/action-gh-release@v2 - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') - with: - generate_release_notes: true - files: | - ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-${{ runner.os }}-${{ matrix.python-version }}.zip diff --git a/.github/workflows/wheelhouse_linux.yml b/.github/workflows/wheelhouse_linux.yml deleted file mode 100644 index 67458e53f95..00000000000 --- a/.github/workflows/wheelhouse_linux.yml +++ /dev/null @@ -1,89 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: WheelHouse Linux - -env: - python.venv: 'testvenv' - # Following env vars when changed will "reset" the mentioned cache - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number. If you go down (or repeat a previous value), - # you might end up reusing a previous cache if it hasn't been deleted already. - # It applies a 7-day retention policy by default. - RESET_PIP_CACHE: 0 - PACKAGE_NAME: PyAEDT -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - push: - tags: - - 'v*' - - v* - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: ubuntu-20.04 - strategy: - matrix: - python-version: [ 3.7, 3.8, 3.9, '3.10'] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: Install pyaedt - run: | - pip install .[all,dotnet] - pip install jupyterlab - - - name: Verify pyaedt can be imported - run: python -c "import pyaedt" - - - name: Retrieve PyAEDT version - run: | - echo "PYAEDT_VERSION=$(python -c 'from pyaedt import __version__; print(__version__)')" >> $GITHUB_OUTPUT - echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" - id: version - - - name: Generate wheelhouse - run: | - pip install wheel setuptools -U - pip install --upgrade pip - pip wheel . -w wheelhouse - export wheellist=$(pip freeze) - for file in $wheellist; do - if [[ $file != *"@"* ]] && [[ $file != *"pyaedt"* ]]; then - pip wheel $file -w wheelhouse - fi - done - continue-on-error: true - - - name: Zip wheelhouse - uses: vimtor/action-zip@v1 - with: - files: wheelhouse - dest: ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-wheelhouse-${{ runner.os }}-${{ matrix.python-version }}.zip - - - name: Upload Wheelhouse - uses: actions/upload-artifact@v4 - with: - name: ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-wheelhouse-${{ runner.os }}-${{ matrix.python-version }} - path: '*.zip' - retention-days: 7 - - - name: Release - uses: softprops/action-gh-release@v2 - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') - with: - generate_release_notes: true - files: | - ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-wheelhouse-${{ runner.os }}-${{ matrix.python-version }}.zip \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 74280703f34..9e0d207e297 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -39,7 +39,8 @@ Here's a simple overview of how you can start making contributions: **Push Your Changes:** After committing your changes, "push" them to your forked repository on GitHub with `git push origin `. -**Create a Pull Request:** A Pull Request (PR) lets us know you have changes you think should be included in the main project. Go to your forked repository on GitHub and click on the 'Pull request' button. +**Create a Pull Request:** A Pull Request (PR) lets us know you have changes you think should be included in the main project. Go to your forked repository on GitHub and click on the 'Pull request' button. The title of your +'Pull request' must follow the `conventional commits standard `_ where the type field is expected to be defined with upper cases. Following these steps ensures that your contributions will be easily reviewed and potentially included in the project much faster. diff --git a/doc/Makefile b/doc/Makefile index 1336617c68a..6b135dfaa78 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -3,10 +3,12 @@ # You can set these variables from the command line, and also # from the environment for the first two. -SPHINXOPTS ?= -j auto --color +SPHINXOPTS ?= -j auto --color -w build_errors.txt SPHINXBUILD ?= sphinx-build SOURCEDIR = source BUILDDIR = _build +LINKCHECKDIR = $(BUILDDIR)/linkcheck +LINKCHECKOPTS = -d $(BUILDDIR)/.doctrees -W --keep-going --color # Put it first so that "make" without argument is like "make help". help: @@ -14,41 +16,67 @@ help: .PHONY: help Makefile -clean: - rm -rf $(BUILDDIR)/* - rm -rf examples/ - find . -type d -name "_autosummary" -exec rm -rf {} + +.install-deps: + @pip freeze | grep -q "vtk-osmesa" && is_vtk_osmesa_installed="yes" || is_vtk_osmesa_installed="no" + @if [ "${ON_CI}" = "True" ] && [ "$$is_vtk_osmesa_installed" != "yes" ]; then \ + @echo "Removing package(s) to avoid conflicts with package(s) needed for CI/CD"; \ + pip uninstall --yes vtk; \ + @echo "Installing CI/CD required package(s)"; \ + pip install --extra-index-url https://wheels.vtk.org vtk-osmesa==9.2.20230527.dev0; \ + fi # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile +%: .install-deps Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -# build html docs in parallel using all available CPUs -# WARNING: this is a resource hog -phtml: - $(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -j auto +clean: .install-deps + @echo "Cleaning everything." + rm -rf $(BUILDDIR)/* + rm -rf examples/ + find . -type d -name "_autosummary" -exec rm -rf {} + + +# FIXME: currently linkcheck freezes and further investigation must be performed +# linkcheck: +# @echo "Checking links." +# @$(SPHINXBUILD) -M linkcheck "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(LINKCHECKOPTS) -vv +# @echo +# @echo "Check finished. Report is in $(LINKCHECKDIR)." -phtml-no-examples: - export PYAEDT_SKIP_EXAMPLE="1" - $(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -j auto +html-no-examples: .install-deps + @echo "Building HTML pages without examples." + export PYAEDT_DOC_RUN_EXAMPLES="0" + export PYAEDT_DOC_USE_GIF="1" + @# FIXME: currently linkcheck freezes and further investigation must be performed + @# @$(SPHINXBUILD) -M linkcheck "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(LINKCHECKOPTS) $(O) + @$(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)." -# Build pdf docs. -pdf: +html: .install-deps + @echo "Building HTML pages with examples." + export PYAEDT_DOC_RUN_EXAMPLES="1" + export PYAEDT_DOC_USE_GIF="1" + @# FIXME: currently linkcheck freezes and further investigation must be performed + @# @$(SPHINXBUILD) -M linkcheck "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(LINKCHECKOPTS) $(O) + @$(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)." + +pdf-no-examples: .install-deps + @echo "Building PDF pages without examples." + export PYAEDT_DOC_RUN_EXAMPLES="0" + export PYAEDT_DOC_USE_GIF="0" @$(SPHINXBUILD) -M latex "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) cd $(BUILDDIR)/latex && latexmk -r latexmkrc -pdf *.tex -interaction=nonstopmode || true (test -f $(BUILDDIR)/latex/PyAEDT-Documentation-*.pdf && echo pdf exists) || exit 1 + @echo "Build finished. The PDF pages are in $(BUILDDIR)." -pdf-no-examples: - export PYAEDT_SKIP_EXAMPLE="1" +pdf: .install-deps + @echo "Building PDF pages with examples." + export PYAEDT_DOC_RUN_EXAMPLES="1" + export PYAEDT_DOC_USE_GIF="0" @$(SPHINXBUILD) -M latex "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) cd $(BUILDDIR)/latex && latexmk -r latexmkrc -pdf *.tex -interaction=nonstopmode || true (test -f $(BUILDDIR)/latex/PyAEDT-Documentation-*.pdf && echo pdf exists) || exit 1 - -# build docs like the CI build -cibuild: - mkdir source/examples -p - echo 'Examples' > source/examples/index.rst - echo '========' >> source/examples/index.rst - $(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -j auto -w build_errors.txt -N - python print_errors.py + @echo "Build finished. The PDF pages are in $(BUILDDIR)." diff --git a/doc/make.bat b/doc/make.bat index 226691ae917..bc9100f4b20 100644 --- a/doc/make.bat +++ b/doc/make.bat @@ -4,14 +4,32 @@ pushd %~dp0 REM Command file for Sphinx documentation +if "%SPHINXOPTS%" == "" ( + set SPHINXOPTS=-j auto --color -w build_errors.txt +) if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=source set BUILDDIR=_build +set LINKCHECKDIR=\%BUILDDIR%\linkcheck +set LINKCHECKOPTS=-d %BUILDDIR%\.doctrees -W --keep-going --color + +REM This LOCs are used to uninstall and install specific package(s) during CI/CD +for /f %%i in ('pip freeze ^| findstr /c:"vtk-osmesa"') do set is_vtk_osmesa_installed=%%i +if NOT "%is_vtk_osmesa_installed%" == "vtk-osmesa" if "%ON_CI%" == "True" ( + echo "Removing package(s) to avoid conflicts with package(s) needed for CI/CD" + pip uninstall --yes vtk + echo "Installing CI/CD required package(s)" + pip install --extra-index-url https://wheels.vtk.org vtk-osmesa==9.2.20230527.dev0) +REM End of CICD dedicated setup if "%1" == "" goto help +if "%1" == "clean" goto clean +if "%1" == "html" goto html +if "%1" == "html-no-examples" goto html-no-examples if "%1" == "pdf" goto pdf +if "%1" == "pdf-no-examples" goto pdf-no-examples %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( @@ -31,15 +49,66 @@ goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end -:pdf -set PYAEDT_SKIP_EXAMPLE=1 +:clean +echo Cleaning everything +rmdir /s /q %SOURCEDIR%\examples > /NUL 2>&1 +rmdir /s /q %BUILDDIR% > /NUL 2>&1 +for /d /r %SOURCEDIR% %%d in (_autosummary) do @if exist "%%d" rmdir /s /q "%%d" +goto end + +:html +echo Building HTML pages with examples +set PYAEDT_DOC_RUN_EXAMPLES=1 +set PYAEDT_DOC_USE_GIF=1 +::FIXME: currently linkcheck freezes and further investigation must be performed +::%SPHINXBUILD% -M linkcheck %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %LINKCHECKOPTS% %O% +%SPHINXBUILD% -M html %SOURCEDIR% %BUILDDIR% +echo +echo "Build finished. The HTML pages are in %BUILDDIR%." +goto end + +:html-no-examples +echo Building HTML pages without examples +set PYAEDT_DOC_RUN_EXAMPLES=0 +set PYAEDT_DOC_USE_GIF=1 +if not exist "source\examples" mkdir "source\examples" +echo Examples> source\examples\index.rst +echo ========> source\examples\index.rst +::FIXME: currently linkcheck freezes and further investigation must be performed +::%SPHINXBUILD% -M linkcheck %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %LINKCHECKOPTS% %O% +%SPHINXBUILD% -M html %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +echo +echo "Build finished. The HTML pages are in %BUILDDIR%." +goto end +:pdf +echo Building PDF pages with examples +set PYAEDT_DOC_RUN_EXAMPLES=1 +set PYAEDT_DOC_USE_GIF=0 +%SPHINXBUILD% -M latex %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +cd "%BUILDDIR%\latex" +for %%f in (*.tex) do ( +xelatex "%%f" --interaction=nonstopmode) +echo "Build finished. The PDF pages are in %BUILDDIR%." +goto end +:pdf-no-examples +echo Building PDF pages without examples +set PYAEDT_DOC_RUN_EXAMPLES=0 +set PYAEDT_DOC_USE_GIF=0 +if not exist "source\examples" mkdir "source\examples" +echo Examples> source\examples\index.rst +echo ========> source\examples\index.rst +::FIXME: currently linkcheck freezes and further investigation must be performed +::%SPHINXBUILD% -M linkcheck %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %LINKCHECKOPTS% %O% %SPHINXBUILD% -M latex %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% cd "%BUILDDIR%\latex" for %%f in (*.tex) do ( xelatex "%%f" --interaction=nonstopmode) +echo "Build finished. The PDF pages are in %BUILDDIR%." +goto end :end popd diff --git a/doc/source/Getting_started/About.rst b/doc/source/Getting_started/About.rst index ceff4174ca2..5f42b245b34 100644 --- a/doc/source/Getting_started/About.rst +++ b/doc/source/Getting_started/About.rst @@ -26,7 +26,7 @@ and mechanical solvers for comprehensive multiphysics analysis. Tight integration among these solutions provides unprecedented ease of use for setup and faster resolution of complex simulations for design and optimization. -.. image:: https://images.ansys.com/is/image/ansys/ansys-electronics-technology-collage?wid=941&op_usm=0.9,1.0,20,0&fit=constrain,0 +.. image:: ../Resources/aedt_collage.jpg :width: 800 :alt: AEDT Applications :target: https://www.ansys.com/products/electronics diff --git a/doc/source/Resources/aedt_collage.jpg b/doc/source/Resources/aedt_collage.jpg new file mode 100644 index 00000000000..9028deeb9fb Binary files /dev/null and b/doc/source/Resources/aedt_collage.jpg differ diff --git a/doc/source/User_guide/pyaedt_file_data/project.rst b/doc/source/User_guide/pyaedt_file_data/project.rst index d9b965f0de1..f9800aade81 100644 --- a/doc/source/User_guide/pyaedt_file_data/project.rst +++ b/doc/source/User_guide/pyaedt_file_data/project.rst @@ -33,7 +33,7 @@ File structure examples: :download:`HFSS 3D Layout Example <../../Resources/hfss3dlayout_project_example.json>` -.. code-block:: json +.. code-block:: { "general": { @@ -145,11 +145,10 @@ File structure examples: ], "monitors": [ # Monitor Name : {Monitor Properties} - ], + ], "native components": { # Component Name : {Component Properties} - - } + } } For a practical demonstration, see the diff --git a/doc/source/conf.py b/doc/source/conf.py index 4629c5687cc..712bb1f6c96 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -7,9 +7,7 @@ import sys import warnings -import pyvista import numpy as np -import json from sphinx_gallery.sorting import FileNameSortKey from ansys_sphinx_theme import (ansys_favicon, get_version_match, pyansys_logo_black, @@ -21,6 +19,7 @@ from docutils.parsers.rst import Directive from docutils import nodes from sphinx import addnodes +from sphinx.util import logging import shutil # <-----------------Override the sphinx pdf builder----------------> @@ -42,8 +41,14 @@ def visit_desc_content(self, node: Element) -> None: # <----------------- End of sphinx pdf builder override----------------> + +logger = logging.getLogger(__name__) + + +# Sphinx event hooks + class PrettyPrintDirective(Directive): - """Renders a constant using ``pprint.pformat`` and inserts into the document.""" + """Renders a constant using ``pprint.pformat`` and inserts it into the document.""" required_arguments = 1 def run(self): @@ -71,11 +76,29 @@ def autodoc_skip_member(app, what, name, obj, skip, options): # return True if exclude else None -def remove_doctree(app, exception): - """Remove the .doctree directory created during the documentation build. - """ - shutil.rmtree(app.doctreedir) +def directory_size(directory_path): + """Compute the size (in megabytes) of a directory.""" + res = 0 + for path, _, files in os.walk(directory_path): + for f in files: + fp = os.path.join(path, f) + res += os.stat(fp).st_size + # Convert in megabytes + res /= 1e6 + return res +def remove_doctree(app, exception): + """Remove the ``.doctree`` directory created during the documentation build.""" + + # Keep the ``doctree`` directory to avoid creating it twice. This is typically helpful in CI/CD + # where we want to build both HTML and PDF pages. + if bool(int(os.getenv("SPHINXBUILD_KEEP_DOCTREEDIR", "0"))): + logger.info(f"Keeping directory {app.doctreedir}.") + else: + size = directory_size(app.doctreedir) + logger.info(f"Removing doctree {app.doctreedir} ({size} MB).") + shutil.rmtree(app.doctreedir, ignore_errors=True) + logger.info(f"Doctree removed.") def setup(app): app.add_directive('pprint', PrettyPrintDirective) @@ -101,20 +124,15 @@ def setup(app): author = "Ansys Inc." cname = os.getenv("DOCUMENTATION_CNAME", "nocname.com") switcher_version = get_version_match(__version__) - -# Check for the local config file, otherwise use default desktop configuration -local_config_file = os.path.join(local_path, "local_config.json") -if os.path.exists(local_config_file): - with open(local_config_file) as f: - config = json.load(f) -else: - config = {"run_examples": True} - release = version = __version__ os.environ["PYAEDT_NON_GRAPHICAL"] = "1" os.environ["PYAEDT_DOC_GENERATION"] = "1" +# Do not run examples by default +run_examples = bool(int(os.getenv("PYAEDT_DOC_RUN_EXAMPLES", "0"))) +use_gif = bool(int(os.getenv("PYAEDT_DOC_USE_GIF", "1"))) + # -- General configuration --------------------------------------------------- # Add any Sphinx_PyAEDT extension module names here as strings. They can be @@ -214,6 +232,7 @@ def setup(app): # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ["_build", "sphinx_boogergreen_theme_1", "Thumbs.db", ".DS_Store", "*.txt"] + inheritance_graph_attrs = dict(rankdir="RL", size='"8.0, 10.0"', fontsize=14, ratio="compress") inheritance_node_attrs = dict(shape="ellipse", fontsize=14, height=0.75, color="dodgerblue1", style="filled") @@ -237,63 +256,59 @@ def setup(app): pygments_style = "sphinx" -# Manage errors -pyvista.set_error_output_file("errors.txt") - -# Ensure that offscreen rendering is used for docs generation -pyvista.OFF_SCREEN = True - -# Preferred plotting style for documentation -# pyvista.set_plot_theme('document') - -# must be less than or equal to the XVFB window size -pyvista.global_theme["window_size"] = np.array([1024, 768]) - -# Save figures in specified directory -pyvista.FIGURE_PATH = os.path.join(os.path.abspath("./images/"), "auto-generated/") -if not os.path.exists(pyvista.FIGURE_PATH): - os.makedirs(pyvista.FIGURE_PATH) - # gallery build requires AEDT install -if is_windows and "PYAEDT_CI_NO_EXAMPLES" not in os.environ: +# if is_windows and bool(os.getenv("PYAEDT_CI_RUN_EXAMPLES", "0")): +if run_examples: + import pyvista + + # PyVista settings + + # Ensure that offscreen rendering is used for docs generation + pyvista.OFF_SCREEN = True + # Save figures in specified directory + pyvista.FIGURE_PATH = os.path.join(os.path.abspath("./images/"), "auto-generated/") + if not os.path.exists(pyvista.FIGURE_PATH): + os.makedirs(pyvista.FIGURE_PATH) + # Necessary for pyvista when building the sphinx gallery + pyvista.BUILDING_GALLERY = True + + # Manage errors + pyvista.set_error_output_file("errors.txt") + # Must be less than or equal to the XVFB window size + pyvista.global_theme["window_size"] = np.array([1024, 768]) # suppress annoying matplotlib bug warnings.filterwarnings( "ignore", category=UserWarning, - message="Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.", + message="Matplotlib is currently using agg, which is a non-GUI backend, so it cannot show the figure.", ) - # necessary for pyvista when building the sphinx gallery - pyvista.BUILDING_GALLERY = True - - if config["run_examples"] and not os.environ.get("PYAEDT_SKIP_EXAMPLE", False): - extensions.append("sphinx_gallery.gen_gallery") - - sphinx_gallery_conf = { - # convert rst to md for ipynb - "pypandoc": True, - # path to your examples scripts - "examples_dirs": ["../../examples/"], - # path where to save gallery generated examples - "gallery_dirs": ["examples"], - # Pattern to search for examples files - "filename_pattern": r"\.py", - # Remove the "Download all examples" button from the top level gallery - "download_all_examples": False, - # Sort gallery examples by file name instead of number of lines (default) - "within_subsection_order": FileNameSortKey, - # directory where function granular galleries are stored - "backreferences_dir": None, - # Modules for which function level galleries are created. In - "doc_module": "ansys-pyaedt", - "image_scrapers": ("pyvista", "matplotlib"), - "ignore_pattern": "flycheck*", - "thumbnail_size": (350, 350), - # 'first_notebook_cell': ("%matplotlib inline\n" - # "from pyvista import set_plot_theme\n" - # "set_plot_theme('document')"), - } + extensions.append("sphinx_gallery.gen_gallery") + sphinx_gallery_conf = { + # convert rst to md for ipynb + "pypandoc": True, + # path to your examples scripts + "examples_dirs": ["../../examples/"], + # path where to save gallery generated examples + "gallery_dirs": ["examples"], + # Pattern to search for examples files + "filename_pattern": r"\.py", + # Remove the "Download all examples" button from the top level gallery + "download_all_examples": False, + # Sort gallery examples by file name instead of number of lines (default) + "within_subsection_order": FileNameSortKey, + # Directory where function granular galleries are stored + "backreferences_dir": None, + # Modules for which function level galleries are created. In + "doc_module": "ansys-pyaedt", + "image_scrapers": ("pyvista", "matplotlib"), + "ignore_pattern": r"flycheck.*", + "thumbnail_size": (350, 350), + } + if not use_gif: + gif_ignore_pattern = r"|.*Maxwell2D_Transient\.py|.*Maxwell2D_DCConduction\.py|.*Hfss_Icepak_Coupling\.py|.*SBR_Time_Plot\.py" + sphinx_gallery_conf["ignore_pattern"] = sphinx_gallery_conf["ignore_pattern"] + gif_ignore_pattern # -- Options for HTML output ------------------------------------------------- html_short_title = html_title = "PyAEDT" @@ -334,7 +349,6 @@ def setup(app): "api_key": os.getenv("MEILISEARCH_PUBLIC_API_KEY", ""), "index_uids": { f"pyaedt-v{get_version_match(__version__).replace('.', '-')}": "PyAEDT", - f"pyedb-v{get_version_match(__version__).replace('.', '-')}": "EDB API", }, }, } diff --git a/doc/source/index.rst b/doc/source/index.rst index e2c60197713..b805fb58c85 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -54,6 +54,7 @@ enabling straightforward and efficient automation in your workflow. .. toctree:: :hidden: + Getting_started/index User_guide/index API/index diff --git a/pyproject.toml b/pyproject.toml index 6c00c3a5493..e04ca76f97a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -100,12 +100,12 @@ doc = [ "utm", "vtk==9.2.6", ] -doc-noexamples = [ +doc-no-examples = [ "ansys-sphinx-theme>=0.10.0,<0.16", "imageio>=2.30.0,<2.35", #"imageio-ffmpeg", "numpydoc>=1.5.0,<1.8", - # "recommonmark", + "recommonmark", "Sphinx==5.3.0; python_version == '3.7'", "Sphinx>=7.1.0,<7.4; python_version > '3.7'", "sphinx-autobuild==2021.3.14; python_version == '3.7'",