From 1fe09edd892d53897a9d860c52eade144d15c775 Mon Sep 17 00:00:00 2001 From: Michael Carroll Date: Wed, 8 Feb 2023 17:14:13 -0600 Subject: [PATCH 1/7] Remove existing implementation Signed-off-by: Michael Carroll --- .github/actions/bazel-ci-bionic/Dockerfile | 6 - .github/actions/bazel-ci-bionic/action.yml | 16 - .github/actions/bazel-ci-bionic/entrypoint.sh | 3 - .github/actions/bazel-ci-bionic/run.sh | 67 ----- .github/actions/bazel-ci-focal/Dockerfile | 6 - .github/actions/bazel-ci-focal/action.yml | 16 - .github/actions/bazel-ci-focal/entrypoint.sh | 3 - .github/actions/bazel-ci-focal/run.sh | 70 ----- .github/ci/bazel.repos | 10 - .github/ci/packages-focal.apt | 2 - .github/workflows/ci.yml | 35 --- BUILD.bazel | 40 --- LICENSE | 201 ------------- README.md | 75 ----- build_defs.bzl | 59 ---- cmake_configure_file.bzl | 86 ------ cmake_configure_file.py | 155 ---------- embed_sdf.py | 28 -- example/0001-Add-ign-utils.patch | 279 ------------------ example/BUILD.example | 14 - example/Dockerfile | 96 ------ example/README.md | 13 - example/WORKSPACE.example | 36 --- example/bazel.repos | 73 ----- example/bazelrc.example | 6 - example/build_workspace.sh | 18 -- example/setup_workspace.sh | 14 - generate_file.bzl | 28 -- generate_include_header.bzl | 41 --- generate_yaml.bzl | 24 -- ign_config_header.bzl | 47 --- ign_export_header.bzl | 69 ----- ignition/utilities/ExtraTestMacros.hh | 47 --- ignition/utilities/SuppressWarning.hh | 71 ----- ignition/utilities/detail/ExtraTestMacros.hh | 62 ---- ignition/utilities/detail/SuppressWarning.hh | 156 ---------- pathutils.bzl | 184 ------------ qt.bzl | 102 ------- repositories.bzl | 261 ---------------- third_party/BUILD.bazel | 0 third_party/X.BUILD | 67 ----- third_party/X.build | 66 ----- third_party/assimp.BUILD | 10 - third_party/boost.BUILD | 34 --- third_party/curl.BUILD | 9 - third_party/eigen3.BUILD | 56 ---- third_party/fcl.BUILD | 10 - third_party/ffmpeg.BUILD | 179 ----------- third_party/freeimage.BUILD | 10 - third_party/freetype2.BUILD | 8 - third_party/gl.BUILD | 17 -- third_party/glib.BUILD | 17 -- third_party/gts.BUILD | 15 - third_party/json.BUILD | 11 - third_party/nlopt.BUILD | 11 - third_party/ode.BUILD | 11 - third_party/osg.BUILD | 31 -- third_party/qt.BUILD | 86 ------ third_party/sqlite3.BUILD | 12 - third_party/tinyxml2.BUILD | 9 - third_party/uuid.BUILD | 12 - third_party/yaml.BUILD | 9 - third_party/zip.BUILD | 9 - third_party/zmq.BUILD | 12 - 64 files changed, 3230 deletions(-) delete mode 100644 .github/actions/bazel-ci-bionic/Dockerfile delete mode 100644 .github/actions/bazel-ci-bionic/action.yml delete mode 100755 .github/actions/bazel-ci-bionic/entrypoint.sh delete mode 100755 .github/actions/bazel-ci-bionic/run.sh delete mode 100644 .github/actions/bazel-ci-focal/Dockerfile delete mode 100644 .github/actions/bazel-ci-focal/action.yml delete mode 100755 .github/actions/bazel-ci-focal/entrypoint.sh delete mode 100755 .github/actions/bazel-ci-focal/run.sh delete mode 100644 .github/ci/bazel.repos delete mode 100644 .github/ci/packages-focal.apt delete mode 100644 .github/workflows/ci.yml delete mode 100644 BUILD.bazel delete mode 100644 LICENSE delete mode 100644 README.md delete mode 100644 build_defs.bzl delete mode 100644 cmake_configure_file.bzl delete mode 100644 cmake_configure_file.py delete mode 100644 embed_sdf.py delete mode 100644 example/0001-Add-ign-utils.patch delete mode 100644 example/BUILD.example delete mode 100644 example/Dockerfile delete mode 100644 example/README.md delete mode 100644 example/WORKSPACE.example delete mode 100644 example/bazel.repos delete mode 100644 example/bazelrc.example delete mode 100755 example/build_workspace.sh delete mode 100755 example/setup_workspace.sh delete mode 100644 generate_file.bzl delete mode 100644 generate_include_header.bzl delete mode 100644 generate_yaml.bzl delete mode 100644 ign_config_header.bzl delete mode 100644 ign_export_header.bzl delete mode 100644 ignition/utilities/ExtraTestMacros.hh delete mode 100644 ignition/utilities/SuppressWarning.hh delete mode 100644 ignition/utilities/detail/ExtraTestMacros.hh delete mode 100644 ignition/utilities/detail/SuppressWarning.hh delete mode 100644 pathutils.bzl delete mode 100644 qt.bzl delete mode 100644 repositories.bzl delete mode 100644 third_party/BUILD.bazel delete mode 100644 third_party/X.BUILD delete mode 100644 third_party/X.build delete mode 100644 third_party/assimp.BUILD delete mode 100644 third_party/boost.BUILD delete mode 100644 third_party/curl.BUILD delete mode 100644 third_party/eigen3.BUILD delete mode 100644 third_party/fcl.BUILD delete mode 100644 third_party/ffmpeg.BUILD delete mode 100644 third_party/freeimage.BUILD delete mode 100644 third_party/freetype2.BUILD delete mode 100644 third_party/gl.BUILD delete mode 100644 third_party/glib.BUILD delete mode 100644 third_party/gts.BUILD delete mode 100644 third_party/json.BUILD delete mode 100644 third_party/nlopt.BUILD delete mode 100644 third_party/ode.BUILD delete mode 100644 third_party/osg.BUILD delete mode 100644 third_party/qt.BUILD delete mode 100644 third_party/sqlite3.BUILD delete mode 100644 third_party/tinyxml2.BUILD delete mode 100644 third_party/uuid.BUILD delete mode 100644 third_party/yaml.BUILD delete mode 100644 third_party/zip.BUILD delete mode 100644 third_party/zmq.BUILD diff --git a/.github/actions/bazel-ci-bionic/Dockerfile b/.github/actions/bazel-ci-bionic/Dockerfile deleted file mode 100644 index 09bd9b1..0000000 --- a/.github/actions/bazel-ci-bionic/Dockerfile +++ /dev/null @@ -1,6 +0,0 @@ -FROM ghcr.io/ignition-tooling/ign-ubuntu:citadel-bionic - -COPY ["run.sh", "/run.sh"] -COPY ["entrypoint.sh", "/entrypoint.sh"] - -ENTRYPOINT ["/entrypoint.sh"] diff --git a/.github/actions/bazel-ci-bionic/action.yml b/.github/actions/bazel-ci-bionic/action.yml deleted file mode 100644 index 3a6820a..0000000 --- a/.github/actions/bazel-ci-bionic/action.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: 'Ignition install using bazel under bionic' -description: '' -author: "Michael Carroll" -inputs: - github_token: - description: "Token for the repo. Can be passed in using {{ secrets.GITHUB_TOKEN }}" - required: true - bazel-args: - description: 'Additional Bazel arguments to use when building package under test' - required: true - default: '//...' -runs: - using: 'docker' - image: 'Dockerfile' - args: - - ${{ inputs.bazel-args }} diff --git a/.github/actions/bazel-ci-bionic/entrypoint.sh b/.github/actions/bazel-ci-bionic/entrypoint.sh deleted file mode 100755 index d43cecd..0000000 --- a/.github/actions/bazel-ci-bionic/entrypoint.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/sh -l - -sudo bash /run.sh $@ diff --git a/.github/actions/bazel-ci-bionic/run.sh b/.github/actions/bazel-ci-bionic/run.sh deleted file mode 100755 index 2f6cd39..0000000 --- a/.github/actions/bazel-ci-bionic/run.sh +++ /dev/null @@ -1,67 +0,0 @@ -#!/bin/sh -l - -set -x -set -e - -BAZEL_ARGS=$1 - -echo ::group::Install tools: apt -apt update 2>&1 -apt -y install \ - build-essential \ - cppcheck \ - curl \ - git \ - gnupg \ - lsb-release \ - python3-pip \ - wget - -cd "$GITHUB_WORKSPACE" -echo ::endgroup:: - -echo ::group::Install tools: pip -pip3 install -U pip vcstool colcon-common-extensions -echo ::endgroup:: - -echo ::group::Install tools: bazel -curl https://bazel.build/bazel-release.pub.gpg | apt-key add - -echo "deb [arch=amd64] https://storage.googleapis.com/bazel-apt stable jdk1.8" | tee /etc/apt/sources.list.d/bazel.list - -apt update 2>&1 -apt -y install bazel -echo ::endgroup:: - -mkdir -p /ignition/bazel -cd /ignition/bazel -vcs import . < /github/workspace/.github/ci/bazel.repos - -cp -R /github/workspace ./ign_bazel - -echo ::group::Install dependencies from binaries -EXCLUDE_APT="libignition|libsdformat|libogre|dart" -UBUNTU_VERSION=`lsb_release -cs` -ALL_PACKAGES=$( \ - sort -u $(find . -iname 'packages-'$UBUNTU_VERSION'.apt' -o -iname 'packages.apt') | grep -Ev $EXCLUDE_APT | tr '\n' ' ') -apt-get install --no-install-recommends --quiet --yes $ALL_PACKAGES -echo ::endgroup:: - -ln -sf ./ign_bazel/example/WORKSPACE.example ./WORKSPACE -ln -sf ./ign_bazel/example/BUILD.example ./BUILD.bazel -ln -sf ./ign_bazel/example/bazelrc.example ./.bazelrc - -echo ::group::Bazel sync -bazel sync || { - status=$? - echo "Error during sync" -} -echo ::endgroup:: - -echo ::group::Bazel build -bazel build $BAZEL_ARGS -echo ::endgroup:: - -echo ::group::Bazel test -bazel test $BAZEL_ARGS -echo ::endgroup:: - diff --git a/.github/actions/bazel-ci-focal/Dockerfile b/.github/actions/bazel-ci-focal/Dockerfile deleted file mode 100644 index 906d833..0000000 --- a/.github/actions/bazel-ci-focal/Dockerfile +++ /dev/null @@ -1,6 +0,0 @@ -FROM ghcr.io/ignition-tooling/ign-ubuntu:dome-focal - -COPY ["run.sh", "/run.sh"] -COPY ["entrypoint.sh", "/entrypoint.sh"] - -ENTRYPOINT ["/entrypoint.sh"] diff --git a/.github/actions/bazel-ci-focal/action.yml b/.github/actions/bazel-ci-focal/action.yml deleted file mode 100644 index fa79251..0000000 --- a/.github/actions/bazel-ci-focal/action.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: 'Ignition install using bazel' -description: '' -author: "Michael Carroll" -inputs: - github_token: - description: "Token for the repo. Can be passed in using {{ secrets.GITHUB_TOKEN }}" - required: true - bazel-args: - description: 'Additional Bazel arguments to use when building package under test' - required: true - default: '//...' -runs: - using: 'docker' - image: 'Dockerfile' - args: - - ${{ inputs.bazel-args }} diff --git a/.github/actions/bazel-ci-focal/entrypoint.sh b/.github/actions/bazel-ci-focal/entrypoint.sh deleted file mode 100755 index d43cecd..0000000 --- a/.github/actions/bazel-ci-focal/entrypoint.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/sh -l - -sudo bash /run.sh $@ diff --git a/.github/actions/bazel-ci-focal/run.sh b/.github/actions/bazel-ci-focal/run.sh deleted file mode 100755 index 58cf0ae..0000000 --- a/.github/actions/bazel-ci-focal/run.sh +++ /dev/null @@ -1,70 +0,0 @@ -#!/bin/sh -l - -set -x -set -e - -BAZEL_ARGS=$1 - -echo ::group::Install tools: apt -apt update 2>&1 -apt -y install \ - build-essential \ - cppcheck \ - curl \ - git \ - gnupg \ - lsb-release \ - python3-pip \ - wget - -cd "$GITHUB_WORKSPACE" -SYSTEM_VERSION=`lsb_release -cs` -SOURCE_DEPENDENCIES="`pwd`/.github/ci/dependencies.yaml" -SOURCE_DEPENDENCIES_VERSIONED="`pwd`/.github/ci-$SYSTEM_VERSION/dependencies.yaml" -echo ::endgroup:: - -echo ::group::Install tools: pip -pip3 install -U pip vcstool colcon-common-extensions -echo ::endgroup:: - -echo ::group::Install tools: bazel -curl https://bazel.build/bazel-release.pub.gpg | apt-key add - -echo "deb [arch=amd64] https://storage.googleapis.com/bazel-apt stable jdk1.8" | tee /etc/apt/sources.list.d/bazel.list - -apt update 2>&1 -apt -y install bazel -echo ::endgroup:: - -mkdir -p /ignition/bazel -cd /ignition/bazel -vcs import . < /github/workspace/.github/ci/bazel.repos - -cp -R /github/workspace ./ign_bazel - -echo ::group::Install dependencies from binaries -EXCLUDE_APT="libignition|libsdformat|libogre|dart" -UBUNTU_VERSION=`lsb_release -cs` -ALL_PACKAGES=$( \ - sort -u $(find . -iname 'packages-'$UBUNTU_VERSION'.apt' -o -iname 'packages.apt') | grep -Ev $EXCLUDE_APT | tr '\n' ' ') -apt-get install --no-install-recommends --quiet --yes $ALL_PACKAGES -echo ::endgroup:: - -ln -sf ./ign_bazel/example/WORKSPACE.example ./WORKSPACE -ln -sf ./ign_bazel/example/BUILD.example ./BUILD.bazel -ln -sf ./ign_bazel/example/bazelrc.example ./.bazelrc - -echo ::group::Bazel sync -bazel sync || { - status=$? - echo "Error during sync" -} -echo ::endgroup:: - -echo ::group::Bazel build -bazel build $BAZEL_ARGS -echo ::endgroup:: - -echo ::group::Bazel test -bazel test $BAZEL_ARGS -echo ::endgroup:: - diff --git a/.github/ci/bazel.repos b/.github/ci/bazel.repos deleted file mode 100644 index 0b1c38a..0000000 --- a/.github/ci/bazel.repos +++ /dev/null @@ -1,10 +0,0 @@ -repositories: - ign_math: - type: git - url: https://github.com/ignitionrobotics/ign-math - version: ign-math6 - sdformat: - type: git - url: https://github.com/osrf/sdformat - version: bazel-sdf10 - diff --git a/.github/ci/packages-focal.apt b/.github/ci/packages-focal.apt deleted file mode 100644 index d8ce994..0000000 --- a/.github/ci/packages-focal.apt +++ /dev/null @@ -1,2 +0,0 @@ -python-is-python3 -libnlopt-cxx-dev diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 54d703f..0000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: Ubuntu CI - -on: [push, pull_request] - -jobs: - focal-ci: - runs-on: ubuntu-latest - name: Ubuntu Focal CI - steps: - - name: Checkout - uses: actions/checkout@v2 - - - name: Login to GHCR - run: echo ${{ secrets.CR_PAT }} | docker login ghcr.io -u $GITHUB_ACTOR --password-stdin - - - name: Compile and test - uses: ./.github/actions/bazel-ci-focal - with: - bazel-args: //... - - bionic-ci: - runs-on: ubuntu-latest - name: Ubuntu BionicCI - steps: - - name: Checkout - uses: actions/checkout@v2 - - - name: Login to GHCR - run: echo ${{ secrets.CR_PAT }} | docker login ghcr.io -u $GITHUB_ACTOR --password-stdin - - - name: Compile and test - uses: ./.github/actions/bazel-ci-bionic - with: - bazel-args: //... - diff --git a/BUILD.bazel b/BUILD.bazel deleted file mode 100644 index 306a9b0..0000000 --- a/BUILD.bazel +++ /dev/null @@ -1,40 +0,0 @@ -load( - ":build_defs.bzl", - "IGNITION_FEATURES", - "IGNITION_ROOT", - "IGNITION_VISIBILITY", -) - -package( - default_visibility = IGNITION_VISIBILITY, - features = IGNITION_FEATURES, -) - -licenses(["notice"]) - -exports_files(["LICENSE"]) - -py_binary( - name = "cmake_configure_file", - srcs = ["cmake_configure_file.py"], - python_version = "PY3", - srcs_version = "PY2AND3", -) - -cc_library( - name = "utilities", - hdrs = [ - "ignition/utilities/ExtraTestMacros.hh", - "ignition/utilities/SuppressWarning.hh", - "ignition/utilities/detail/ExtraTestMacros.hh", - "ignition/utilities/detail/SuppressWarning.hh", - ], - includes = ["."], -) - -py_binary( - name = "embed_sdf", - srcs = ["embed_sdf.py"], - python_version = "PY3", - srcs_version = "PY2AND3", -) diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 261eeb9..0000000 --- a/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/README.md b/README.md deleted file mode 100644 index 18fdab0..0000000 --- a/README.md +++ /dev/null @@ -1,75 +0,0 @@ -# ign-bazel - -ignition-bazel provides a set of Bazel build rules that are used by the Ignition projects. - -## Building Ignition with Bazel - -Currently (for Ignition Dome), each Ignition package has a `bazel` branch to maintain the build rules necessary for that package. - -To build, we make the assumption that all necessary libraries and dependencies are checked out in a single directory. -For compatilbility, we use underscores (`_`) to delimit the names, rather than the typical dashes. - -The following instructions assume that you are using Ubuntu 18.04 - -### Installing Bazel - -It is best to use the [Bazel Instructions](https://docs.bazel.build/versions/master/install-ubuntu.html) to install for your platform. - -For Ubuntu 18.04: - -``` -sudo apt install curl gnupg -curl https://bazel.build/bazel-release.pub.gpg | sudo apt-key add - -echo "deb [arch=amd64] https://storage.googleapis.com/bazel-apt stable jdk1.8" | sudo tee /etc/apt/sources.list.d/bazel.list - -sudo apt update && sudo apt install bazel -``` - -### Setting up the workspace - -To begin with, make a directory to contain all of the Ignition packages and dependencies: - -``` -mkdir ~/ignition/ -cd ~/ignition/ -``` - -Then clone each of the Ignition packages. -For convenience, we have provided a `.repos` file that can be used by [vcstool](https://github.com/dirk-thomas/vcstool) - -``` -wget https://raw.githubusercontent.com/ignitionrobotics/ign-bazel/master/example/bazel.repos -vcs import . < bazel.repos -``` - -You can then install all necessary `apt` dependencies with the following: - -``` -sudo apt update -sudo apt install -y -qq --no-install-recommends \ - $(sort -u $(find . -iname 'packages-'$UBUNTU_VERSION'.apt' -o -iname 'packages.apt') | grep -Ev "libignition|libsdformat|ogre" | tr '\n' ' ') -``` - -Finally, it is necessary to add a few files for `bazel` - -``` -cd ~/ignition -ln -sf ./ign_bazel/example/WORKSPACE.example ./WORKSPACE -ln -sf ./ign_bazel/example/BUILD.example ./BUILD.bazel -ln -sf ./ign_bazel/example/bazelrc.example ./.bazelrc -``` - -### Building with Bazel - -Once the workspace is setup, building with `bazel` is straightforward. - -``` -cd ~/ignition -bazel build //... -``` - -To test: - -``` -bazel test //... -``` diff --git a/build_defs.bzl b/build_defs.bzl deleted file mode 100644 index 6b7e8b4..0000000 --- a/build_defs.bzl +++ /dev/null @@ -1,59 +0,0 @@ -""" -General variables and rules for facilitating builds of ignition. -""" - -load( - ":cmake_configure_file.bzl", - _cmake_configure_file = "cmake_configure_file", -) -load( - ":generate_include_header.bzl", - _generate_include_header = "generate_include_header", -) -load( - ":generate_file.bzl", - _generate_file = "generate_file", -) -load( - "ign_config_header.bzl", - _ign_config_header = "ign_config_header", -) -load( - "ign_export_header.bzl", - _ign_export_header = "ign_export_header", -) -load( - "generate_yaml.bzl", - _generate_yaml = "generate_yaml", -) -load( - "qt.bzl", - _qt_cc_binary = "qt_cc_binary", - _qt_cc_library = "qt_cc_library", -) - -cmake_configure_file = _cmake_configure_file -generate_include_header = _generate_include_header -generate_file = _generate_file -ign_config_header = _ign_config_header -ign_export_header = _ign_export_header -generate_yaml = _generate_yaml -qt_cc_binary = _qt_cc_binary -qt_cc_library = _qt_cc_library - -IGNITION_ROOT = "//" - -IGNITION_VISIBILITY = [ - "//:__subpackages__", - "//experimental:__subpackages__", -] - -IGNITION_FEATURES = [ - "-parse_headers", - "-use_header_modules", - "-layering_check", -] - -DEFAULT_COPTS = [ - "-fexceptions", -] diff --git a/cmake_configure_file.bzl b/cmake_configure_file.bzl deleted file mode 100644 index 53a28c1..0000000 --- a/cmake_configure_file.bzl +++ /dev/null @@ -1,86 +0,0 @@ -# -*- python -*- - -# Copied from the Drake project: -# https://github.com/RobotLocomotion/drake/blob/17423f8fb6f292b4af0b4cf3c6c0f157273af501/tools/workspace/cmake_configure_file.bzl - -# Defines the implementation actions to cmake_configure_file. -def _cmake_configure_file_impl(ctx): - arguments = [ - "--input", - ctx.file.src.path, - "--output", - ctx.outputs.out.path, - ] - for item in ctx.attr.defines: - arguments += ["-D" + item] - for item in ctx.attr.undefines: - arguments += ["-U" + item] - for item in ctx.files.cmakelists: - arguments += ["--cmakelists", item.path] - ctx.actions.run( - inputs = [ctx.file.src] + ctx.files.cmakelists, - outputs = [ctx.outputs.out], - arguments = arguments, - env = ctx.attr.env, - executable = ctx.executable.cmake_configure_file_py, - ) - return [] - -# Defines the rule to cmake_configure_file. -_cmake_configure_file_gen = rule( - attrs = { - "src": attr.label( - allow_single_file = True, - mandatory = True, - ), - "out": attr.output(mandatory = True), - "defines": attr.string_list(), - "undefines": attr.string_list(), - "cmakelists": attr.label_list(allow_files = True), - "cmake_configure_file_py": attr.label( - cfg = "host", - executable = True, - default = Label("//ign_bazel:cmake_configure_file"), - ), - "env": attr.string_dict( - mandatory = True, - allow_empty = True, - ), - }, - output_to_genfiles = True, - implementation = _cmake_configure_file_impl, -) - -def cmake_configure_file( - name, - src = None, - out = None, - defines = None, - undefines = None, - cmakelists = None, - **kwargs): - """Creates a rule to generate an out= file from a src= file, using CMake's - configure_file substitution semantics. This implementation is incomplete, - and may not produce the same result as CMake in all cases. - Definitions optionally can be passed in directly as defines= strings (with - the usual defines= convention of either a name-only "HAVE_FOO", or a - key-value "MYSCALAR=DOUBLE"). - Definitions optionally can be read from simple CMakeLists files that - contain statements of the form "set(FOO_MAJOR_VERSION 1)" and similar. - Variables that are known substitutions but which should be undefined can be - passed as undefines= strings. - See cmake_configure_file.py for our implementation of the configure_file - substitution rules. - The CMake documentation of the configure_file macro is: - https://cmake.org/cmake/help/latest/command/configure_file.html - """ - _cmake_configure_file_gen( - name = name, - src = src, - out = out, - defines = defines, - undefines = undefines, - cmakelists = cmakelists, - env = {}, - **kwargs - ) diff --git a/cmake_configure_file.py b/cmake_configure_file.py deleted file mode 100644 index 7a1d188..0000000 --- a/cmake_configure_file.py +++ /dev/null @@ -1,155 +0,0 @@ -"""A re-implementation of CMake's configure_file substitution semantics. This -implementation is incomplete, and may not produce the same result as CMake in -all (or even many) cases. -The CMake documentation of the configure_file macro is: -https://cmake.org/cmake/help/latest/command/configure_file.html - -Copied from the Drake project: -https://github.com/RobotLocomotion/drake/blob/17423f8fb6f292b4af0b4cf3c6c0f157273af501/tools/workspace/cmake_configure_file.py -""" - -import argparse -import os -import re -import sys - -from collections import OrderedDict - -# Looks like "#cmakedefine VAR ..." or "#cmakedefine01 VAR". -_cmakedefine = re.compile(r'^(\s*)#cmakedefine(01)? ([^ \r\n]+)(.*?)([\r\n]+)') - -# Looks like "@VAR@" or "${VAR}". -_varsubst = re.compile(r'^(.*?)(@[^ ]+?@|\$\{[^ ]+?\})(.*)([\r\n]*)') - - -# Transform a source code line per CMake's configure_file semantics. -# -# The 'definitions' provides values for CMake variables. The dict's keys are -# the variable names to substitute, and the dict's values are the values to -# substitute. (The values can be None, for known-but-undefined variable keys.) -# -# The configuration semantics are as follows: -# -# - An input line 'cmakedefine VAR' turns into '#define VAR VALUE' if and only -# if the 'definitions' dict has a non-None value VALUE for VAR, otherwise it -# turns into '/* #undef VAR */'. -# -# - An input line 'cmakedefine01 VAR' turns into '#define VAR 1' if and only if -# the 'definitions' dict has a non-None value for VAR, otherwise it turns -# into '#define VAR 0'. -# -# - An input line with a substitution '@VAR@' or '${VAR}' replaces the -# substitution token with the value in 'definitions' dict for that VAR, or -# else the empty string if the value is None. It is an error if there is no -# such key in the dict. -def _transform(line, definitions): - # Replace define statements. - match = _cmakedefine.match(line) - if match: - blank, maybe01, var, rest, newline = match.groups() - defined = definitions.get(var) is not None - if maybe01: - return blank + '#define ' + var + [' 0', ' 1'][defined] + newline - elif defined: - line = blank + '#define ' + var + rest + newline - else: - return blank + '/* #undef ' + var + ' */' + newline - - # Replace variable substitutions. - while True: - match = _varsubst.match(line) - if not match: - break - before, xvarx, after, newline = match.groups() - if xvarx[0] == '$': - assert len(xvarx) >= 4 - assert xvarx[1] == '{' - assert xvarx[-1] == '}' - var = xvarx[2:-1] - elif xvarx[0] == '@': - assert len(xvarx) >= 3 - assert xvarx[-1] == '@' - var = xvarx[1:-1] - assert len(var) > 0 - - if var not in definitions: - raise KeyError('Missing definition for ' + var) - value = definitions.get(var) - if value is None: - value = '' - line = before + value + after + newline - - return line - - -# Looks like "set(VAR value)". -_set_var = re.compile(r'^\s*set\s*\(\s*(.+)\s+(.+)\s*\)\s*$') - - -# From a line of CMakeLists.txt, return a set(...) key-value pair, if found. -def _extract_definition(line, prior_definitions): - match = _set_var.match(line) - if not match: - return dict() - var, value = match.groups() - try: - value = _transform(value, prior_definitions) - except KeyError: - return dict() - if value.startswith('"'): - assert value.endswith('"') - value = value[1:-1] - return {var: value} - - -# Load our definitions dict, given the command-line args: -# - A command-line '-Dfoo' will add ('foo', 1) to the result. -# - A command-line '-Dfoo=bar' will add ('foo', 'bar') to the result. -# - A command-line '-Ufoo' will add ('foo', None) to the result. -def _setup_definitions(args): - result = OrderedDict() - for item in args.defines: - if '=' in item: - key, value = item.split('=', 1) - result[key] = value - else: - result[item] = 1 - - for item in args.undefines: - result[item] = None - - for filename in args.cmakelists: - with open(filename, 'r') as cmakelist: - for line in cmakelist.readlines(): - definition = _extract_definition(line, result) - result.update(definition) - - return result - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument('--input', metavar='FILE') - parser.add_argument('--output', metavar='FILE') - parser.add_argument( - '-D', metavar='NAME', dest='defines', action='append', default=[]) - parser.add_argument( - '-U', metavar='NAME', dest='undefines', action='append', default=[]) - parser.add_argument( - '--cmakelists', action='append', default=[]) - args = parser.parse_args() - if args.input is None or args.output is None: - parser.print_usage() - sys.exit(1) - definitions = _setup_definitions(args) - - with open(args.input, 'r') as input_file: - with open(args.output + '.tmp', 'w') as output_file: - for input_line in input_file.readlines(): - output_line = _transform(input_line, definitions) - output_file.write(output_line) - os.rename(args.output + '.tmp', args.output) - - -if __name__ == '__main__': - main() diff --git a/embed_sdf.py b/embed_sdf.py deleted file mode 100644 index 1babe75..0000000 --- a/embed_sdf.py +++ /dev/null @@ -1,28 +0,0 @@ -# A re-implementation of upstream's sdf/embedSdf.rb tool in Python. -# Copied from the Drake project: -# https://github.com/RobotLocomotion/drake/blob/17423f8fb6f292b4af0b4cf3c6c0f157273af501/tools/workspace/sdformat/embed_sdf.py -import sys - -assert __name__ == '__main__' - -print(""" -#include "src/EmbeddedSdf.hh" -namespace sdf { inline namespace SDF_VERSION_NAMESPACE { -const std::map& GetEmbeddedSdf() { - static const std::map result{ -""") -for filename in sorted(sys.argv[1:]): - _, relative_path = filename.split('/sdf/') - print('{') - print(f'"{relative_path}",') - with open(filename, 'r', encoding='utf-8') as data: - print('R"raw(') - sys.stdout.flush() - sys.stdout.buffer.write(data.read().encode('utf-8')) - print(')raw"') - print('},') -print(""" - }; - return result; -}}} -""") diff --git a/example/0001-Add-ign-utils.patch b/example/0001-Add-ign-utils.patch deleted file mode 100644 index beb33a9..0000000 --- a/example/0001-Add-ign-utils.patch +++ /dev/null @@ -1,279 +0,0 @@ -From e79de9ed7b60fd6f648d936d38fd90059d43fa32 Mon Sep 17 00:00:00 2001 -From: Michael Carroll -Date: Tue, 9 Mar 2021 11:14:57 -0600 -Subject: [PATCH] Add ign-utils - -Signed-off-by: Michael Carroll ---- - .../ci/{packages.apt => packages-focal.apt} | 0 - build_defs.bzl | 4 +- - example/Dockerfile | 12 +-- - example/bazel.repos | 10 +- - example/build_workspace.sh | 10 +- - example/setup_workspace.sh | 11 ++- - qt.bzl | 94 ++++++++----------- - 7 files changed, 67 insertions(+), 74 deletions(-) - rename .github/ci/{packages.apt => packages-focal.apt} (100%) - -diff --git a/.github/ci/packages.apt b/.github/ci/packages-focal.apt -similarity index 100% -rename from .github/ci/packages.apt -rename to .github/ci/packages-focal.apt -diff --git a/build_defs.bzl b/build_defs.bzl -index 6b7e8b4..893f359 100644 ---- a/build_defs.bzl -+++ b/build_defs.bzl -@@ -28,7 +28,7 @@ load( - ) - load( - "qt.bzl", -- _qt_cc_binary = "qt_cc_binary", -+ _qt_rcc = "qt_rcc", - _qt_cc_library = "qt_cc_library", - ) - -@@ -38,7 +38,7 @@ generate_file = _generate_file - ign_config_header = _ign_config_header - ign_export_header = _ign_export_header - generate_yaml = _generate_yaml --qt_cc_binary = _qt_cc_binary -+qt_rcc = _qt_rcc - qt_cc_library = _qt_cc_library - - IGNITION_ROOT = "//" -diff --git a/example/Dockerfile b/example/Dockerfile -index ec425df..bc10c22 100644 ---- a/example/Dockerfile -+++ b/example/Dockerfile -@@ -9,12 +9,11 @@ RUN apt-get update -qq \ - wget \ - python3-pip \ - python-pip \ -- lsb-release \ -- && apt-get clean -qq -+ lsb-release - --RUN sh -c 'echo "deb http://packages.osrfoundation.org/gazebo/ubuntu-stable `lsb_release -cs` main" > /etc/apt/sources.list.d/gazebo-stable.list' --RUN wget http://packages.osrfoundation.org/gazebo.key -O - | sudo apt-key add - --RUN apt-get update -+# RUN sh -c 'echo "deb http://packages.osrfoundation.org/gazebo/ubuntu-stable `lsb_release -cs` main" > /etc/apt/sources.list.d/gazebo-stable.list' -+# RUN wget http://packages.osrfoundation.org/gazebo.key -O - | sudo apt-key add - -+# RUN apt-get update - - # Common tools - RUN apt-get update -qq \ -@@ -42,9 +41,6 @@ RUN apt-get update -qq \ - libnlopt-dev \ - libfcl-dev \ - libopenscenegraph-dev \ -- libdart6-collision-ode-dev \ -- libdart6-dev \ -- libdart6-utils-urdf-dev \ - libfreetype6-dev \ - libgts-dev \ - libavcodec-dev \ -diff --git a/example/bazel.repos b/example/bazel.repos -index 8bbf041..2361236 100644 ---- a/example/bazel.repos -+++ b/example/bazel.repos -@@ -2,11 +2,11 @@ repositories: - ign_bazel: - type: git - url: https://github.com/ignitionrobotics/ign-bazel -- version: master -+ version: focal_updates - ign_common: - type: git - url: https://github.com/ignitionrobotics/ign-common -- version: bazel-common3 -+ version: ign-common3 - ign_fuel_tools: - type: git - url: https://github.com/ignitionrobotics/ign-fuel-tools -@@ -55,10 +55,14 @@ repositories: - type: git - url: https://github.com/ignitionrobotics/ign-transport - version: bazel-transport9 -+ ign_utils: -+ type: git -+ url: https://github.com/ignitionrobotics/ign-utils -+ version: main - dart: - type: git - url: https://github.com/ignition-forks/dart -- version: azeey/friction_per_shape_more_params -+ version: release-6.10 - ogre2: - type: git - url: https://github.com/ignition-forks/ogre-2.1-release -diff --git a/example/build_workspace.sh b/example/build_workspace.sh -index 4e5bc40..c1d3ad7 100755 ---- a/example/build_workspace.sh -+++ b/example/build_workspace.sh -@@ -5,9 +5,13 @@ set -o verbose - - vcs pull - --apt-get update --apt-get install -y -qq --no-install-recommends \ -- $(sort -u $(find . -iname 'packages.apt') | tr '\n' ' ') -+EXCLUDE_APT="libignition|libsdformat|libogre" -+UBUNTU_VERSION=`lsb_release -cs` -+ALL_PACKAGES=$( \ -+ sort -u $(find . -iname 'packages-'$UBUNTU_VERSION'.apt' -o -iname 'packages.apt') | grep -Ev $EXCLUDE_APT | tr '\n' ' ') -+ -+DEBIAN_FRONTEND=noninteractive \ -+apt-get update && apt-get install --no-install-recommends --quiet --yes $ALL_PACKAGES - apt-get clean -qq - - bazel build //... -diff --git a/example/setup_workspace.sh b/example/setup_workspace.sh -index 80c7b24..32f8c22 100755 ---- a/example/setup_workspace.sh -+++ b/example/setup_workspace.sh -@@ -5,6 +5,11 @@ set -o verbose - - vcs import . < bazel.repos - --apt-get update --apt-get install -y -qq --no-install-recommends \ -- $(sort -u $(find . -iname 'packages.apt') | tr '\n' ' ') -+EXCLUDE_APT="libignition|libsdformat|libogre" -+UBUNTU_VERSION=`lsb_release -cs` -+ALL_PACKAGES=$( \ -+ sort -u $(find . -iname 'packages-'$UBUNTU_VERSION'.apt' -o -iname 'packages.apt') | grep -Ev $EXCLUDE_APT | tr '\n' ' ') -+ -+DEBIAN_FRONTEND=noninteractive \ -+apt-get update && apt-get install --no-install-recommends --quiet --yes $ALL_PACKAGES -+apt-get clean -qq -diff --git a/qt.bzl b/qt.bzl -index 0ec734c..b996428 100644 ---- a/qt.bzl -+++ b/qt.bzl -@@ -18,58 +18,45 @@ - # Copied from - # https://github.com/justbuchanan/bazel_rules_qt - --load("@rules_cc//cc:defs.bzl", "cc_binary") -+load("@rules_cc//cc:defs.bzl", "cc_library") - --def qt_moc(hdrs): -- _moc_srcs = [] -- for hdr in hdrs: -- header_path = "%s" % (hdr.replace("//", "").replace(":", "/")) if len(native.package_name()) > 0 else hdr -- moc_name = "%s_moc" % hdr.replace(".", "_").replace("//", "").replace("/", "_").replace(":", "_") -- native.genrule( -- name = moc_name, -- srcs = [hdr], -- outs = [moc_name + ".cc"], -- cmd = "qtchooser -qt=5 -run-tool=moc $(location %s) -o $@ -f'%s'" % -- (hdr, header_path), -- ) -- _moc_srcs.append(moc_name) -- return _moc_srcs -- --def qt_cc_binary(name, srcs, hdrs, linkopts, normal_hdrs = [], deps = None, **kwargs): -- """Compiles a QT library and generates the MOC for it. -- If a UI file is provided, then it is also compiled with UIC. -- Args: -- name: A name for the rule. -- srcs: The cpp files to compile. -- hdrs: The header files that the MOC compiles to src. -- normal_hdrs: Headers which are not sources for generated code. -- deps: cc_library dependencies for the library. -- kwargs: Any additional arguments are passed to the cc_library rule. -- """ -+def _qt_rcc(ctx): -+ # Symlink QRC file -+ qrc_file = (ctx.file.qrc, ctx.actions.declare_file(ctx.file.qrc.path)) -+ ctx.actions.symlink( -+ output = qrc_file[1], -+ target_file = qrc_file[0], -+ ) - -- _moc_srcs = [] -- for hdr in hdrs: -- header_path = "%s" % (hdr.replace("//", "").replace(":", "/")) if len(native.package_name()) > 0 else hdr -- moc_name = "%s_moc" % hdr.replace(".", "_").replace("//", "").replace("/", "_").replace(":", "_") -- native.genrule( -- name = moc_name, -- srcs = [hdr], -- outs = [moc_name + ".cc"], -- cmd = "qtchooser -qt=5 -run-tool=moc $(location %s) -o $@ -f'%s'" % -- (hdr, header_path), -+ # Symlink resources -+ resource_files = [(f, ctx.actions.declare_file(f.path)) for f in ctx.files.files] -+ for target_file, output in resource_files: -+ ctx.actions.symlink( -+ output = output, -+ target_file = target_file, - ) -- _moc_srcs.append(":" + moc_name) -- cc_binary( -- name = name, -- srcs = srcs + _moc_srcs + hdrs + normal_hdrs, -- linkopts = linkopts, -- deps = deps, -- **kwargs -+ -+ args = ["--name", ctx.attr.resource_name, "--output", ctx.outputs.cpp.path, qrc_file[1].path] -+ ctx.actions.run( -+ inputs = [resource for _, resource in resource_files] + [qrc_file[1]], -+ outputs = [ctx.outputs.cpp], -+ arguments = args, -+ executable = "rcc", - ) -+ return [OutputGroupInfo(cpp = depset([ctx.outputs.cpp]))] -+ -+qt_rcc = rule( -+ implementation = _qt_rcc, -+ attrs = { -+ "resource_name": attr.string(), -+ "files": attr.label_list(allow_files = True, mandatory = False), -+ "qrc": attr.label(allow_single_file = True, mandatory = True), -+ "cpp": attr.output(), -+ }, -+) - --def qt_cc_library(name, srcs, hdrs, linkopts, normal_hdrs = [], deps = None, **kwargs): -+def qt_cc_library(name, srcs, hdrs, normal_hdrs = [], deps = None, **kwargs): - """Compiles a QT library and generates the MOC for it. -- If a UI file is provided, then it is also compiled with UIC. - Args: - name: A name for the rule. - srcs: The cpp files to compile. -@@ -78,25 +65,22 @@ def qt_cc_library(name, srcs, hdrs, linkopts, normal_hdrs = [], deps = None, **k - deps: cc_library dependencies for the library. - kwargs: Any additional arguments are passed to the cc_library rule. - """ -- - _moc_srcs = [] - for hdr in hdrs: -- header_path = "%s" % (hdr.replace("//", "").replace(":", "/")) if len(native.package_name()) > 0 else hdr -- moc_name = "%s_moc" % hdr.replace(".", "_").replace("//", "").replace("/", "_").replace(":", "_") -- header_path = "/".join(header_path.split("/")[1:]) -- -+ header_path = "%s/%s" % (native.package_name(), hdr) if len(native.package_name()) > 0 else hdr -+ moc_name = "%s_moc" % hdr.replace(".", "_") - native.genrule( - name = moc_name, - srcs = [hdr], - outs = [moc_name + ".cc"], -- cmd = "qtchooser -qt=5 -run-tool=moc $(location %s) -o $@ -f'%s'" % -+ cmd = "moc $(location %s) -o $@ -f'%s'" % - (hdr, header_path), - ) - _moc_srcs.append(":" + moc_name) -- native.cc_library( -+ cc_library( - name = name, -- srcs = srcs + _moc_srcs + hdrs + normal_hdrs, -- linkopts = linkopts, -+ srcs = srcs + _moc_srcs, -+ hdrs = hdrs + normal_hdrs, - deps = deps, - **kwargs - ) --- -2.25.1 - diff --git a/example/BUILD.example b/example/BUILD.example deleted file mode 100644 index 12b3273..0000000 --- a/example/BUILD.example +++ /dev/null @@ -1,14 +0,0 @@ -licenses(["notice"]) - -exports_files(["LICENSE"]) - -load( - "//ign_bazel:build_defs.bzl", - "IGNITION_FEATURES", - "IGNITION_VISIBILITY", -) - -package( - default_visibility = IGNITION_VISIBILITY, - features = IGNITION_FEATURES, -) diff --git a/example/Dockerfile b/example/Dockerfile deleted file mode 100644 index c9a47a3..0000000 --- a/example/Dockerfile +++ /dev/null @@ -1,96 +0,0 @@ -FROM ubuntu:bionic - -RUN apt-get update -qq \ - && apt-get install -y -qq \ - build-essential \ - curl \ - git \ - sudo \ - wget \ - python3-pip \ - python-pip \ - lsb-release \ - && apt-get clean -qq - -RUN sh -c 'echo "deb http://packages.osrfoundation.org/gazebo/ubuntu-stable `lsb_release -cs` main" > /etc/apt/sources.list.d/gazebo-stable.list' -RUN wget http://packages.osrfoundation.org/gazebo.key -O - | sudo apt-key add - -RUN apt-get update - -# Common tools -RUN apt-get update -qq \ - && apt-get install -y -qq \ - libprotoc-dev \ - libprotobuf-dev \ - protobuf-compiler \ - uuid-dev \ - libzmq3-dev \ - libsqlite3-dev \ - g++-8 \ - libzip-dev \ - libjsoncpp-dev \ - libglew-dev \ - libfreeimage-dev \ - freeglut3-dev \ - libxmu-dev \ - libxi-dev \ - libyaml-dev \ - libwebsockets-dev \ - libswscale-dev \ - libcurl4-openssl-dev \ - libode-dev \ - libassimp-dev \ - libnlopt-dev \ - libfcl-dev \ - libopenscenegraph-dev \ - libfreetype6-dev \ - libgts-dev \ - libavcodec-dev \ - libavformat-dev \ - libavdevice-dev \ - libxaw7-dev \ - libxrandr-dev \ - ruby \ - ruby-ronn \ - ruby-dev \ - qml-module-qt-labs-folderlistmodel \ - qml-module-qt-labs-settings \ - qml-module-qtquick2 \ - qml-module-qtquick-controls \ - qml-module-qtquick-controls2 \ - qml-module-qtquick-dialogs \ - qml-module-qtquick-layouts \ - qml-module-qtqml-models2 \ - qtbase5-dev \ - qtdeclarative5-dev \ - qtquickcontrols2-5-dev \ - && apt-get clean -qq - -RUN pip3 install vcstool -RUN pip install psutil -RUN sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-8 800 \ - --slave /usr/bin/g++ g++ /usr/bin/g++-8 --slave /usr/bin/gcov gcov /usr/bin/gcov-8 - -# Add Bazel PPA -RUN /bin/sh -c 'echo "deb [arch=amd64] https://storage.googleapis.com/bazel-apt stable jdk1.8" > /etc/apt/sources.list.d/bazel-stable.list \ - && curl https://bazel.build/bazel-release.pub.gpg | apt-key add -' - -# Install Bazel -RUN apt-get update -qq \ - && apt-get install -y -qq \ - bazel \ - && apt-get clean -qq - -RUN mkdir /ignition -WORKDIR /ignition - -ADD WORKSPACE.example /ignition/WORKSPACE -ADD BUILD.example /ignition/BUILD.bazel -ADD bazelrc.example /ignition/.bazelrc - -ADD bazel.repos /ignition/bazel.repos - -ADD setup_workspace.sh /ignition/setup_workspace.sh -RUN /ignition/setup_workspace.sh - -ADD build_workspace.sh /ignition/build_workspace.sh -CMD /ignition/build_workspace.sh diff --git a/example/README.md b/example/README.md deleted file mode 100644 index af96a77..0000000 --- a/example/README.md +++ /dev/null @@ -1,13 +0,0 @@ -# Build Ignition packages with Bazel (and Docker) - - -``` -# Clone this repository somewhere on your computer: -git clone https://github.com/ignitionrobotics/ign-bazel.git -# Change to the example directory -cd ign-bazel/example -# Build the docker image -docker build -t ign-bazel:latest . -# Build ignition using bazel in Docker -docker run ign-bazel:latest -``` diff --git a/example/WORKSPACE.example b/example/WORKSPACE.example deleted file mode 100644 index f51f6e0..0000000 --- a/example/WORKSPACE.example +++ /dev/null @@ -1,36 +0,0 @@ -load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") -load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository") - -load("//ign_bazel:repositories.bzl", "ignition_repositories") -load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository") - -ignition_repositories() - -load( - "@rules_proto//proto:repositories.bzl", - "rules_proto_dependencies", - "rules_proto_toolchains" -) - -rules_proto_dependencies() -rules_proto_toolchains() - -git_repository( - name = "bazelruby_rules_ruby", - remote = "https://github.com/bazelruby/rules_ruby.git", - commit = "6c025e38f0f030b75df2e321740eb31df575c391", - shallow_since = "1596606275 -0700" -) - -load( - "@bazelruby_rules_ruby//ruby:deps.bzl", - "rules_ruby_dependencies", - "rules_ruby_select_sdk", -) - -rules_ruby_dependencies() - -load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace") -bazel_skylib_workspace() - -rules_ruby_select_sdk(version = "2.7.1") diff --git a/example/bazel.repos b/example/bazel.repos deleted file mode 100644 index 8168318..0000000 --- a/example/bazel.repos +++ /dev/null @@ -1,73 +0,0 @@ -repositories: - ign_bazel: - type: git - url: https://github.com/ignitionrobotics/ign-bazel - version: master - ign_common: - type: git - url: https://github.com/ignitionrobotics/ign-common - version: ign-common3 - ign_fuel_tools: - type: git - url: https://github.com/ignitionrobotics/ign-fuel-tools - version: bazel-fuel-tools5 - ign_gazebo: - type: git - url: https://github.com/ignitionrobotics/ign-gazebo - version: bazel-gazebo4 - ign_gui: - type: git - url: https://github.com/ignitionrobotics/ign-gui - version: bazel-gui4 - ign_launch: - type: git - url: https://github.com/ignitionrobotics/ign-launch - version: bazel-launch3 - ign_math: - type: git - url: https://github.com/ignitionrobotics/ign-math - version: ign-math6 - ign_msgs: - type: git - url: https://github.com/ignitionrobotics/ign-msgs - version: ign-msgs6 - ign_physics: - type: git - url: https://github.com/ignitionrobotics/ign-physics - version: bazel-physics3 - ign_plugin: - type: git - url: https://github.com/ignitionrobotics/ign-plugin - version: bazel-plugin1 - ign_rendering: - type: git - url: https://github.com/ignitionrobotics/ign-rendering - version: bazel-rendering4 - ign_sensors: - type: git - url: https://github.com/ignitionrobotics/ign-sensors - version: bazel-sensors4 - ign_tools: - type: git - url: https://github.com/ignitionrobotics/ign-tools - version: bazel-tools1 - ign_transport: - type: git - url: https://github.com/ignitionrobotics/ign-transport - version: bazel-transport9 - ign_utils: - type: git - url: https://github.com/ignitionrobotics/ign-utils - version: main - dart: - type: git - url: https://github.com/ignition-forks/dart - version: release-6.10 - ogre2: - type: git - url: https://github.com/ignition-forks/ogre-2.1-release - version: master - sdformat: - type: git - url: https://github.com/osrf/sdformat - version: bazel-sdf10 diff --git a/example/bazelrc.example b/example/bazelrc.example deleted file mode 100644 index e178546..0000000 --- a/example/bazelrc.example +++ /dev/null @@ -1,6 +0,0 @@ -build --cxxopt=--std=c++17 -build --define enable_ifaddrs=true -build --test_env=DISPLAY -test --define enable_ifaddrs=true -test --test_env=DISPLAY -#build --compilation_mode=dbg diff --git a/example/build_workspace.sh b/example/build_workspace.sh deleted file mode 100755 index 81f56ed..0000000 --- a/example/build_workspace.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/sh - -set -o errexit -set -o verbose - -vcs pull - -EXCLUDE_APT="libignition|libsdformat|libogre|dart" -UBUNTU_VERSION=`lsb_release -cs` -ALL_PACKAGES=$( \ - sort -u $(find . -iname 'packages-'$UBUNTU_VERSION'.apt' -o -iname 'packages.apt') | grep -Ev $EXCLUDE_APT | tr '\n' ' ') - -DEBIAN_FRONTEND=noninteractive \ -apt-get update && apt-get install --no-install-recommends --quiet --yes $ALL_PACKAGES - - -bazel build //... -bazel test //... diff --git a/example/setup_workspace.sh b/example/setup_workspace.sh deleted file mode 100755 index 4752113..0000000 --- a/example/setup_workspace.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh - -set -o errexit -set -o verbose - -vcs import . < bazel.repos - -EXCLUDE_APT="libignition|libsdformat|libogre|dart" -UBUNTU_VERSION=`lsb_release -cs` -ALL_PACKAGES=$( \ - sort -u $(find . -iname 'packages-'$UBUNTU_VERSION'.apt' -o -iname 'packages.apt') | grep -Ev $EXCLUDE_APT | tr '\n' ' ') - -DEBIAN_FRONTEND=noninteractive \ -apt-get update && apt-get install --no-install-recommends --quiet --yes $ALL_PACKAGES diff --git a/generate_file.bzl b/generate_file.bzl deleted file mode 100644 index bcc1690..0000000 --- a/generate_file.bzl +++ /dev/null @@ -1,28 +0,0 @@ -# -*- python -*- - -# Copied from the Drake project -# https://github.com/RobotLocomotion/drake/blob/17423f8fb6f292b4af0b4cf3c6c0f157273af501/tools/workspace/generate_file.bzl - -def _generate_file_impl(ctx): - out = ctx.actions.declare_file(ctx.label.name) - ctx.actions.write(out, ctx.attr.content, ctx.attr.is_executable) - return [DefaultInfo( - files = depset([out]), - data_runfiles = ctx.runfiles(files = [out]), - )] - -generate_file = rule( - attrs = { - "content": attr.string(mandatory = True), - "is_executable": attr.bool(default = False), - }, - output_to_genfiles = True, - implementation = _generate_file_impl, -) - -"""Generate a file with specified content. -This creates a rule to generate a file with specified content (which is either -static or has been previously computed). -Args: - content (:obj:`str`): Desired content of the generated file. -""" diff --git a/generate_include_header.bzl b/generate_include_header.bzl deleted file mode 100644 index ad7c2b1..0000000 --- a/generate_include_header.bzl +++ /dev/null @@ -1,41 +0,0 @@ -# -*- python -*- - -# Copied from the drake project -# https://github.com/RobotLocomotion/drake/blob/17423f8fb6f292b4af0b4cf3c6c0f157273af501/tools/workspace/generate_include_header.bzl - -load(":pathutils.bzl", "output_path") - -# Generate a header that includes a set of other headers -def _generate_include_header_impl(ctx): - # Collect list of headers - hdrs = [] - for h in ctx.attr.hdrs: - for f in h.files.to_list(): - hdrs.append(output_path(ctx, f, ctx.attr.strip_prefix)) - - # Generate include header - content = "#pragma once\n" - content = content + "\n".join(["#include \"%s\"" % h for h in hdrs]) - ctx.actions.write(output = ctx.outputs.out, content = content) - -generate_include_header = rule( - attrs = { - "hdrs": attr.label_list(allow_files = True), - "strip_prefix": attr.string_list(default = ["**/include/"]), - "out": attr.output(mandatory = True), - }, - output_to_genfiles = True, - implementation = _generate_include_header_impl, -) -"""Generate a header that includes a set of other headers. - -This creates a rule to generate a header that includes a list of other headers. -The generated file will be of the form:: - #include "hdr" - #include "hdr" -Args: - hdrs (:obj:`str`): List of files or file labels of headers that the - generated header will include. - strip_prefix (:obj:`list` of :obj:`str`): List of prefixes to strip from - the header names when forming the ``#include`` directives. -""" diff --git a/generate_yaml.bzl b/generate_yaml.bzl deleted file mode 100644 index 91f5480..0000000 --- a/generate_yaml.bzl +++ /dev/null @@ -1,24 +0,0 @@ -def _generate_yaml_impl(ctx): - ctx.actions.run_shell( - inputs = [ctx.file.ruby_target], - outputs = [ctx.outputs.source_file], - command = "\n".join([ - "echo format: 1.0.0>> %s" % (ctx.outputs.source_file.path), - "echo library_name: \"%s\">> %s" % (ctx.attr.library_name, ctx.outputs.source_file.path), - "echo library_version: \"%s\">> %s" % (ctx.attr.library_version, ctx.outputs.source_file.path), - "echo library_path: $(realpath \"%s\") >> %s" % (ctx.file.ruby_target.path, ctx.outputs.source_file.path), - "echo commands:>> %s" % (ctx.outputs.source_file.path), - "echo \"%s\">> %s" % (ctx.attr.commands, ctx.outputs.source_file.path), - ]), - ) - -generate_yaml = rule( - implementation = _generate_yaml_impl, - attrs = { - "library_name": attr.string(mandatory = True), - "library_version": attr.string(mandatory = True), - "commands": attr.string(mandatory = True), - "ruby_target": attr.label(allow_single_file = True), - }, - outputs = {"source_file": "%{name}.yaml"}, -) diff --git a/ign_config_header.bzl b/ign_config_header.bzl deleted file mode 100644 index 711bd80..0000000 --- a/ign_config_header.bzl +++ /dev/null @@ -1,47 +0,0 @@ -load( - ":cmake_configure_file.bzl", - "cmake_configure_file", -) - -def ign_config_header(name, src, cmakelists, project_name, project_version, extra_defines = None, **kwargs): - out = src - idx = out.find(".in") - if (idx > 0): - out = out[0:idx] - - PROJECT_NAME = project_name - IGN_DESIGNATION = project_name.split("-")[1] - PROJECT_MAJOR = project_version[0] - PROJECT_MINOR = project_version[1] - PROJECT_PATCH = project_version[2] - - defines = [ - "PROJECT_VERSION_MAJOR=%d" % PROJECT_MAJOR, - "PROJECT_VERSION_MINOR=%d" % PROJECT_MINOR, - "PROJECT_VERSION_PATCH=%d" % PROJECT_PATCH, - "PROJECT_MAJOR_VERSION=%d" % PROJECT_MAJOR, - "PROJECT_MINOR_VERSION=%d" % PROJECT_MINOR, - "PROJECT_PATCH_VERSION=%d" % PROJECT_PATCH, - "PROJECT_VERSION=%d.%d" % (PROJECT_MAJOR, PROJECT_MINOR), - "PROJECT_VERSION_FULL=%d.%d.%d" % (PROJECT_MAJOR, PROJECT_MINOR, PROJECT_PATCH), # noqa - "PROJECT_NAME=%s" % PROJECT_NAME, - "PROJECT_NAME_NO_VERSION=%s" % PROJECT_NAME, - "IGN_DESIGNATION=%s" % IGN_DESIGNATION, - "IGN_DESIGNATION_UPPER=%s" % IGN_DESIGNATION.upper(), - "IGN_DESIGNATION_LOWER=%s" % IGN_DESIGNATION.lower(), - "PROJECT_BINARY_DIR=", - "PROJECT_SOURCE_DIR=", - ] - - if extra_defines != None: - defines = defines + extra_defines - - cmake_configure_file( - name = name, - src = src, - out = out, - cmakelists = cmakelists, - defines = defines, - visibility = ["//visibility:private"], - **kwargs - ) diff --git a/ign_export_header.bzl b/ign_export_header.bzl deleted file mode 100644 index ea87e6f..0000000 --- a/ign_export_header.bzl +++ /dev/null @@ -1,69 +0,0 @@ -load( - ":generate_file.bzl", - "generate_file", -) - -def ign_export_header(name, lib_name, export_base, visibility, **kwargs): - generate_file( - name = name, - visibility = visibility, - content = """ -/* - * Copyright (C) 2017 Open Source Robotics Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * -*/ - -/* - * ========================================================================== - * This file was automatically generated by Bazel; do not modify it directly. - * To modify this file, make changes to ign-bazel/ign_export_header.bzl - * ========================================================================== -*/ - -#ifndef {export_base}_EXPORT_HH_ -#define {export_base}_EXPORT_HH_ - -#ifndef {export_base}_VISIBLE -/// For {lib_name} developers: Apply this macro to {lib_name} -/// functions and classes which consumers of this library will need to be able -/// to call from their own programs or libraries. -#define {export_base}_VISIBLE \ - __attribute__ ((visibility("default"))) -#endif - - -#ifndef {export_base}_HIDDEN -/// For {lib_name} developers: Apply this macro to {lib_name} -/// functions and classes which must not be used by consumers of this library. -/// By default, this property is applied to all classes and functions which are -/// not tagged with {export_base}_VISIBLE, so this does not -/// generally need to be used. -#define {export_base}_HIDDEN \ - __attribute__ ((visibility("hidden"))) -#endif - - -#ifndef IGN_DEPRECATED -/// For {lib_name} developers: Use this macro to indicate that a -/// function or class has been deprecated and should no longer be used. A -/// version should be specified to provide context to the user about when the -/// function became deprecated. -#define IGN_DEPRECATED(version) __attribute__ ((__deprecated__)) -#endif - -#endif -""".format(lib_name = lib_name, export_base = export_base), - **kwargs - ) diff --git a/ignition/utilities/ExtraTestMacros.hh b/ignition/utilities/ExtraTestMacros.hh deleted file mode 100644 index 5bd800d..0000000 --- a/ignition/utilities/ExtraTestMacros.hh +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (C) 2020 Open Source Robotics Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -#ifndef IGNITION_UTILITIES_EXTRATESTMACROS_HH -#define IGNITION_UTILITIES_EXTRATESTMACROS_HH - -#include - -/// \brief Restrict the execution of the test for the Windows platform. -/// The test will be compiled on Windows too but will never be run as -/// part of the test suite. The macro uses the Disabled_ prefix provided -/// by googletest. See -/// https://chromium.googlesource.com/external/github.com/google/googletest/+/HEAD/googletest/docs/advanced.md -#define IGN_UTILS_TEST_DISABLED_ON_WIN32(TestName) \ - DETAIL_IGN_UTILS_TEST_DISABLED_ON_WIN32(TestName) - -/// \brief Restrict the execution of the test for the Mac platform. -/// The test will be compiled on Windows too but will never be run as -/// part of the test suite. The macro uses the Disabled_ prefix provided -/// by googletest. See -/// https://chromium.googlesource.com/external/github.com/google/googletest/+/HEAD/googletest/docs/advanced.md -#define IGN_UTILS_TEST_DISABLED_ON_MAC(TestName) \ - DETAIL_IGN_UTILS_TEST_DISABLED_ON_MAC(TestName) - -/// \brief Restrict the execution of the test to just the Linux platform -/// Other platforms will get the test compiled but it won't be run -/// as part of the test suite execution. -/// The macro uses the Disabled_ prefix provided by googletest. See -/// https://chromium.googlesource.com/external/github.com/google/googletest/+/HEAD/googletest/docs/advanced.md -#define IGN_UTILS_TEST_ENABLED_ONLY_ON_LINUX(TestName) \ - DETAIL_IGN_UTILS_TEST_ENABLED_ONLY_ON_LINUX(TestName) - -#endif diff --git a/ignition/utilities/SuppressWarning.hh b/ignition/utilities/SuppressWarning.hh deleted file mode 100644 index 7ae99ec..0000000 --- a/ignition/utilities/SuppressWarning.hh +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (C) 2018 Open Source Robotics Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -#ifndef IGNITION_UTILITIES_SUPPRESSWARNING_HH_ -#define IGNITION_UTILITIES_SUPPRESSWARNING_HH_ - -#include - -// This header contains cross-platform macros for suppressing warnings. Please -// only use these macros responsibly when you are certain that the compiler is -// producing a warning that is not applicable to the specific instance. Do not -// use these macros to ignore legitimate warnings, even if you may find them -// irritating. - -/* - * Usage example: - * - * SomeClass *ptr = CreatePtr(); - * IGN_UTILS_WARN_IGNORE__NON_VIRTUAL_DESTRUCTOR - * delete ptr; - * IGN_UTILS_WARN_RESUME__NON_VIRTUAL_DESTRUCTOR - * - */ - -// Be sure to call the IGN_UTILS_WARN_RESUME__XXXXX macro at the end of the -// block of code where the warning suppression is needed. Otherwise, you might -// inadvertently suppress legitimate warnings. - -// ---- List of available suppressions ---- - -/// \brief Compilers might warn about deleting a pointer to a class that has -/// virtual functions without a virtual destructor or a `final` declaration, -/// because the pointer might secretly be pointing to a more derived class type. -/// We want to suppress this warning when we know for certain (via the design -/// of our implementation) that the pointer is definitely not pointing to a more -/// derived type. -#define IGN_UTILS_WARN_IGNORE__NON_VIRTUAL_DESTRUCTOR \ - DETAIL_IGN_UTILS_WARN_IGNORE__NON_VIRTUAL_DESTRUCTOR - -#define IGN_UTILS_WARN_RESUME__NON_VIRTUAL_DESTRUCTOR \ - DETAIL_IGN_UTILS_WARN_RESUME__NON_VIRTUAL_DESTRUCTOR - -/// \brief Microsoft Visual Studio does not automatically export the interface -/// information for member variables that belong to interface classes of a DLL. -/// Instead it issues this warning. When the member variable is private, we -/// choose to suppress the warning instead of needlessly adding the class -/// information to the DLL interface. -#define IGN_UTILS_WARN_IGNORE__DLL_INTERFACE_MISSING \ - DETAIL_IGN_UTILS_WARN_IGNORE__DLL_INTERFACE_MISSING - -#define IGN_UTILS_WARN_RESUME__DLL_INTERFACE_MISSING \ - DETAIL_IGN_UTILS_WARN_RESUME__DLL_INTERFACE_MISSING - -// TODO(anyone): Add more warning types as they become relevant. -// Do not add warning types to suppress unless they are genuinely necessary. - -#endif diff --git a/ignition/utilities/detail/ExtraTestMacros.hh b/ignition/utilities/detail/ExtraTestMacros.hh deleted file mode 100644 index e9b780f..0000000 --- a/ignition/utilities/detail/ExtraTestMacros.hh +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (C) 2020 Open Source Robotics Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -#ifndef IGNITION_UTILITIES_DETAIL_EXTRATESTMACROS_HH -#define IGNITION_UTILITIES_DETAIL_EXTRATESTMACROS_HH - -#include - -#define DETAIL_IGN_UTILS_ADD_DISABLED_PREFIX(x) DISABLED_##x - -#if defined _WIN32 - - #define DETAIL_IGN_UTILS_TEST_DISABLED_ON_WIN32(TestName) \ - DETAIL_IGN_UTILS_ADD_DISABLED_PREFIX(TestName) - -#else - - #define DETAIL_IGN_UTILS_TEST_DISABLED_ON_WIN32(TestName) \ - TestName - -#endif - -#if defined __APPLE__ - - #define DETAIL_IGN_UTILS_TEST_DISABLED_ON_MAC(TestName) \ - DETAIL_IGN_UTILS_ADD_DISABLED_PREFIX(TestName) - -#else - - #define DETAIL_IGN_UTILS_TEST_DISABLED_ON_MAC(TestName) \ - TestName - -#endif - -#if defined __linux__ - - #define DETAIL_IGN_UTILS_TEST_ENABLED_ONLY_ON_LINUX(TestName) \ - TestName - -#else - - #define DETAIL_IGN_UTILS_TEST_ENABLED_ONLY_ON_LINUX(TestName) \ - DETAIL_IGN_UTILS_ADD_DISABLED_PREFIX(TestName) - -#endif - - -#endif diff --git a/ignition/utilities/detail/SuppressWarning.hh b/ignition/utilities/detail/SuppressWarning.hh deleted file mode 100644 index d30bb28..0000000 --- a/ignition/utilities/detail/SuppressWarning.hh +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Copyright (C) 2017 Open Source Robotics Foundation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - - -#ifndef IGNITION_UTILITIES_DETAIL_SUPPRESSWARNING_HH_ -#define IGNITION_UTILITIES_DETAIL_SUPPRESSWARNING_HH_ - -#include - -#define DETAIL_IGN_UTILS_STRINGIFY(x) #x - -/* cppcheck-suppress */ - -// BEGIN / FINISH Macros - -#if defined __clang__ - - #define DETAIL_IGN_UTILS_BEGIN_WARN_SUP_PUSH \ - _Pragma("clang diagnostic push") - - - #define DETAIL_IGN_UTILS_WARN_SUP_HELPER_2(w) \ - DETAIL_IGN_UTILS_STRINGIFY(clang diagnostic ignored w) - - - #define DETAIL_IGN_UTILS_WARN_SUP_HELPER(w) \ - _Pragma(DETAIL_IGN_UTILS_WARN_SUP_HELPER_2(w)) - - - #define DETAIL_IGN_UTILS_WARN_RESUME \ - _Pragma("clang diagnostic pop") - - -#elif defined __GNUC__ - - // NOTE: clang will define both __clang__ and __GNUC__, and it seems that - // clang will gladly accept GCC pragmas. Even so, if we want the pragmas to - // target the "correct" compiler, we should check if __clang__ is defined - // before checking whether __GNUC__ is defined. - - #define DETAIL_IGN_UTILS_BEGIN_WARN_SUP_PUSH \ - _Pragma("GCC diagnostic push") - - - #define DETAIL_IGN_UTILS_WARN_SUP_HELPER_2(w) \ - DETAIL_IGN_UTILS_STRINGIFY(GCC diagnostic ignored w) - - - #define DETAIL_IGN_UTILS_WARN_SUP_HELPER(w) \ - _Pragma(DETAIL_IGN_UTILS_WARN_SUP_HELPER_2(w)) - - - #define DETAIL_IGN_UTILS_WARN_RESUME \ - _Pragma("GCC diagnostic pop") - - -#elif defined _MSC_VER - - - #define DETAIL_IGN_UTILS_BEGIN_WARN_SUP_PUSH \ - __pragma(warning(push)) - - - #define DETAIL_IGN_UTILS_WARN_SUP_HELPER(w) \ - __pragma(warning(disable: w)) - - - #define DETAIL_IGN_UTILS_WARN_RESUME \ - __pragma(warning(pop)) - - -#else - - // Make these into no-ops if we don't know the type of compiler - - #define DETAIL_IGN_UTILS_BEGIN_WARN_SUP_PUSH - - - #define DETAIL_IGN_UTILS_WARN_SUP_HELPER(w) - - - #define DETAIL_IGN_UTILS_WARN_RESUME - - -#endif - - -#define DETAIL_IGN_UTILS_BEGIN_WARNING_SUPPRESSION(warning_token) \ - DETAIL_IGN_UTILS_BEGIN_WARN_SUP_PUSH \ - DETAIL_IGN_UTILS_WARN_SUP_HELPER(warning_token) - - - -// Warning Tokens -#if defined __GNUC__ || defined __clang__ - - #define DETAIL_IGN_UTILS_WARN_IGNORE__NON_VIRTUAL_DESTRUCTOR \ - DETAIL_IGN_UTILS_BEGIN_WARNING_SUPPRESSION("-Wdelete-non-virtual-dtor") - - #define DETAIL_IGN_UTILS_WARN_RESUME__NON_VIRTUAL_DESTRUCTOR \ - DETAIL_IGN_UTILS_WARN_RESUME - - - // There is no analogous warning for this in GCC or Clang so we just make - // blank macros for this warning type. - #define DETAIL_IGN_UTILS_WARN_IGNORE__DLL_INTERFACE_MISSING - #define DETAIL_IGN_UTILS_WARN_RESUME__DLL_INTERFACE_MISSING - - -#elif defined _MSC_VER - - #define DETAIL_IGN_UTILS_WARN_IGNORE__NON_VIRTUAL_DESTRUCTOR \ - DETAIL_IGN_UTILS_BEGIN_WARNING_SUPPRESSION(4265) - - #define DETAIL_IGN_UTILS_WARN_RESUME__NON_VIRTUAL_DESTRUCTOR \ - DETAIL_IGN_UTILS_WARN_RESUME - - - #define DETAIL_IGN_UTILS_WARN_IGNORE__DLL_INTERFACE_MISSING \ - DETAIL_IGN_UTILS_BEGIN_WARNING_SUPPRESSION(4251) - - #define DETAIL_IGN_UTILS_WARN_RESUME__DLL_INTERFACE_MISSING \ - DETAIL_IGN_UTILS_WARN_RESUME - - -#else - - // If the compiler is unknown, we simply leave these macros blank to avoid - // compilation errors. - - #define DETAIL_IGN_UTILS_WARN_IGNORE__NON_VIRTUAL_DESTRUCTOR - #define DETAIL_IGN_UTILS_WARN_RESUME__NON_VIRTUAL_DESTRUCTOR - - - #define DETAIL_IGN_UTILS_WARN_IGNORE__DLL_INTERFACE_MISSING - #define DETAIL_IGN_UTILS_WARN_RESUME__DLL_INTERFACE_MISSING - - -#endif - - -#endif diff --git a/pathutils.bzl b/pathutils.bzl deleted file mode 100644 index 51b54d7..0000000 --- a/pathutils.bzl +++ /dev/null @@ -1,184 +0,0 @@ -#============================================================================== -#BEGIN internal helpers - -# Copied from the Drake project: -# https://github.com/RobotLocomotion/drake/blob/17423f8fb6f292b4af0b4cf3c6c0f157273af501/tools/skylark/pathutils.bzl - -#------------------------------------------------------------------------------ -# Remove prefix from path. -def ___remove_prefix(path, prefix): - # If the prefix has more parts than the path, failure is certain. - if len(prefix) > len(path): - return None - - # Iterate over components to determine if a match exists. - for n in range(len(prefix)): - if prefix[n] == path[n]: - continue - elif prefix[n] == "*": - continue - else: - return None - - return "/".join(path[len(prefix):]) - -def __remove_prefix(path, prefix): - # Ignore trailing empty element (happens if prefix string ends with "/"). - if len(prefix[-1]) == 0: - prefix = prefix[:-1] - - # If the prefix has more parts than the path, failure is certain. (We also - # need at least one component of the path left over so the stripped path is - # not empty.) - if len(prefix) > (len(path) - 1): - return None - - # Iterate over components to determine if a match exists. - for n in range(len(prefix)): - # Same path components match. - if prefix[n] == path[n]: - continue - - # Single-glob matches any (one) path component. - if prefix[n] == "*": - continue - - # Mulit-glob matches one or more components. - if prefix[n] == "**": - # If multi-glob is at the end of the prefix, return the last path - # component. - if n + 1 == len(prefix): - return path[-1] - - # Otherwise, the most components the multi-glob can match is the - # remaining components (len(prefix) - n - 1; the 1 is the current - # prefix component) less one (since we need to keep at least one - # component of the path). - k = len(path) - (len(prefix) - n - 2) - - # Try to complete the match, iterating (backwards) over the number - # of components that the multi-glob might match. - for t in reversed(range(n, k)): - x = ___remove_prefix(path[t:], prefix[n + 1:]) - if x != None: - return x - - # Multi-glob failed to match. - return None - - # Components did not match. - return None - - return "/".join(path[len(prefix):]) - -def _remove_prefix(path, prefix): - """Remove prefix from path. - This attempts to remove the specified prefix from the specified path. The - prefix may contain the globs ``*`` or ``**``, which match one or many - path components, respectively. Matching is greedy. Globs may only be - matched against complete path components (e.g. ``a/*/`` is okay, but - ``a*/`` is not treated as a glob and will be matched literally). Due to - Skylark limitations, at most one ``**`` may be matched. - Args: - path (:obj:`str`) The path to modify. - prefix (:obj:`str`) The prefix to remove. - Returns: - :obj:`str`: The path with the prefix removed if successful, or None if - the prefix does not match the path. - """ - return __remove_prefix(path.split("/"), prefix.split("/")) - -#END internal helpers -#============================================================================== -#BEGIN macros - -#------------------------------------------------------------------------------ -def basename(path): - """Return the file name portion of a file path.""" - return path.split("/")[-1] - -#------------------------------------------------------------------------------ -def dirname(path): - """Return the directory portion of a file path.""" - if path == "/": - return "/" - - parts = path.split("/") - - if len(parts) > 1: - return "/".join(parts[:-1]) - - return "." - -#------------------------------------------------------------------------------ -def join_paths(*args): - """Join paths without duplicating separators. - This is roughly equivalent to Python's `os.path.join`. - Args: - *args (:obj:`list` of :obj:`str`): Path components to be joined. - Returns: - :obj:`str`: The concatenation of the input path components. - """ - result = "" - - for part in args: - if part.endswith("/"): - part = part[-1] - - if part == "" or part == ".": - continue - - result += part + "/" - - return result[:-1] - -#------------------------------------------------------------------------------ -def output_path(ctx, input_file, strip_prefix, package_root = None): - """Compute "output path". - This computes the adjusted output path for an input file. Specifically, it - a) determines the path relative to the invoking context (which is usually, - but not always, the same as the path as specified by the user when the file - was mentioned in a rule), without Bazel's various possible extras, and b) - optionally removes prefixes from this path. When removing prefixes, the - first matching prefix is removed. - This is used primarily to compute the output install path, without the - leading install prefix, for install actions. - For example:: - install_files( - dest = "docs", - files = ["foo/bar.txt"], - strip_prefix = ["foo/"], - ...) - The :obj:`File`'s path components will have various Bazel bits added. Our - first step is to recover the input path, ``foo/bar.txt``. Then we remove - the prefix ``foo``, giving a path of ``bar.txt``, which will become - ``docs/bar.txt`` when the install destination is added. - The input file must belong to the current package; otherwise, ``None`` is - returned. - Args: - input_file (:obj:`File`): Artifact to be installed. - strip_prefix (:obj:`list` of :obj:`str`): List of prefixes to strip - from the input path before prepending the destination. - Returns: - :obj:`str`: The install destination path for the file. - """ - - if package_root == None: - # Determine base path of invoking context. - package_root = join_paths(ctx.label.workspace_root, ctx.label.package) - - input_path = input_file.short_path - - # Deal with possible case of file outside the package root. - if input_path == None: - return None - - # Possibly remove prefixes. - for p in strip_prefix: - output_path = _remove_prefix(input_path, p) - if output_path != None: - return output_path - - return input_path - -#END macros diff --git a/qt.bzl b/qt.bzl deleted file mode 100644 index 0ec734c..0000000 --- a/qt.bzl +++ /dev/null @@ -1,102 +0,0 @@ -# -# Copyright 2020 Justin Buchanan -# Copyright 2016 Ben Breslauer -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Copied from -# https://github.com/justbuchanan/bazel_rules_qt - -load("@rules_cc//cc:defs.bzl", "cc_binary") - -def qt_moc(hdrs): - _moc_srcs = [] - for hdr in hdrs: - header_path = "%s" % (hdr.replace("//", "").replace(":", "/")) if len(native.package_name()) > 0 else hdr - moc_name = "%s_moc" % hdr.replace(".", "_").replace("//", "").replace("/", "_").replace(":", "_") - native.genrule( - name = moc_name, - srcs = [hdr], - outs = [moc_name + ".cc"], - cmd = "qtchooser -qt=5 -run-tool=moc $(location %s) -o $@ -f'%s'" % - (hdr, header_path), - ) - _moc_srcs.append(moc_name) - return _moc_srcs - -def qt_cc_binary(name, srcs, hdrs, linkopts, normal_hdrs = [], deps = None, **kwargs): - """Compiles a QT library and generates the MOC for it. - If a UI file is provided, then it is also compiled with UIC. - Args: - name: A name for the rule. - srcs: The cpp files to compile. - hdrs: The header files that the MOC compiles to src. - normal_hdrs: Headers which are not sources for generated code. - deps: cc_library dependencies for the library. - kwargs: Any additional arguments are passed to the cc_library rule. - """ - - _moc_srcs = [] - for hdr in hdrs: - header_path = "%s" % (hdr.replace("//", "").replace(":", "/")) if len(native.package_name()) > 0 else hdr - moc_name = "%s_moc" % hdr.replace(".", "_").replace("//", "").replace("/", "_").replace(":", "_") - native.genrule( - name = moc_name, - srcs = [hdr], - outs = [moc_name + ".cc"], - cmd = "qtchooser -qt=5 -run-tool=moc $(location %s) -o $@ -f'%s'" % - (hdr, header_path), - ) - _moc_srcs.append(":" + moc_name) - cc_binary( - name = name, - srcs = srcs + _moc_srcs + hdrs + normal_hdrs, - linkopts = linkopts, - deps = deps, - **kwargs - ) - -def qt_cc_library(name, srcs, hdrs, linkopts, normal_hdrs = [], deps = None, **kwargs): - """Compiles a QT library and generates the MOC for it. - If a UI file is provided, then it is also compiled with UIC. - Args: - name: A name for the rule. - srcs: The cpp files to compile. - hdrs: The header files that the MOC compiles to src. - normal_hdrs: Headers which are not sources for generated code. - deps: cc_library dependencies for the library. - kwargs: Any additional arguments are passed to the cc_library rule. - """ - - _moc_srcs = [] - for hdr in hdrs: - header_path = "%s" % (hdr.replace("//", "").replace(":", "/")) if len(native.package_name()) > 0 else hdr - moc_name = "%s_moc" % hdr.replace(".", "_").replace("//", "").replace("/", "_").replace(":", "_") - header_path = "/".join(header_path.split("/")[1:]) - - native.genrule( - name = moc_name, - srcs = [hdr], - outs = [moc_name + ".cc"], - cmd = "qtchooser -qt=5 -run-tool=moc $(location %s) -o $@ -f'%s'" % - (hdr, header_path), - ) - _moc_srcs.append(":" + moc_name) - native.cc_library( - name = name, - srcs = srcs + _moc_srcs + hdrs + normal_hdrs, - linkopts = linkopts, - deps = deps, - **kwargs - ) diff --git a/repositories.bzl b/repositories.bzl deleted file mode 100644 index cc14ab1..0000000 --- a/repositories.bzl +++ /dev/null @@ -1,261 +0,0 @@ -load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") - -def assimp(): - native.new_local_repository( - name = "assimp", - path = "/usr/include", - build_file = "//ign_bazel/third_party:assimp.BUILD", - ) - -def boost(): - native.new_local_repository( - name = "boost", - path = "/usr/include/boost", - build_file = "//ign_bazel/third_party:boost.BUILD", - ) - -def curl(): - native.new_local_repository( - name = "curl", - path = "/usr/include/x86_64-linux-gnu", - build_file = "//ign_bazel/third_party:curl.BUILD", - ) - -def fcl(): - native.new_local_repository( - name = "fcl", - path = "/usr/include", - build_file = "//ign_bazel/third_party:fcl.BUILD", - ) - -def ffmpeg(): - native.new_local_repository( - name = "ffmpeg", - path = "/usr/include/x86_64-linux-gnu", - build_file = "//ign_bazel/third_party:ffmpeg.BUILD", - ) - -def freetype(): - native.new_local_repository( - name = "freetype2", - path = "/usr/include/freetype2", - build_file = "//ign_bazel/third_party:freetype2.BUILD", - ) - -def freeimage(): - native.new_local_repository( - name = "freeimage", - path = "/usr/include", - build_file = "//ign_bazel/third_party:freeimage.BUILD", - ) - -def gl(): - native.new_local_repository( - name = "gl", - path = "/usr/include", - build_file = "//ign_bazel/third_party:gl.BUILD", - ) - -def glib(): - native.new_local_repository( - name = "glib", - path = "/usr", - build_file = "//ign_bazel/third_party:glib.BUILD", - ) - -def gts(): - native.new_local_repository( - name = "gts", - path = "/usr", - build_file = "//ign_bazel/third_party:gts.BUILD", - ) - -def json(): - native.new_local_repository( - name = "json", - path = "/usr/include/jsoncpp/", - build_file = "//ign_bazel/third_party:json.BUILD", - ) - -def nlopt(): - native.new_local_repository( - name = "nlopt", - path = "/usr/include", - build_file = "//ign_bazel/third_party:nlopt.BUILD", - ) - -def ode(): - native.new_local_repository( - name = "ode", - path = "/usr/include", - build_file = "//ign_bazel/third_party:ode.BUILD", - ) - -def osg(): - native.new_local_repository( - name = "osg", - path = "/usr/include", - build_file = "//ign_bazel/third_party:osg.BUILD", - ) - -def sqlite3(): - native.new_local_repository( - name = "sqlite3", - path = "/usr/include", - build_file = "//ign_bazel/third_party:sqlite3.BUILD", - ) - -def tinyxml2(): - _maybe( - http_archive, - name = "tinyxml2", - build_file = "//ign_bazel/third_party:tinyxml2.BUILD", - sha256 = "6ce574fbb46751842d23089485ae73d3db12c1b6639cda7721bf3a7ee862012c", - strip_prefix = "tinyxml2-8.0.0", - urls = [ - "https://github.com/leethomason/tinyxml2/archive/8.0.0.tar.gz", - ], - ) - -def uuid(): - native.new_local_repository( - name = "uuid", - path = "/usr/include", - build_file = "//ign_bazel/third_party:uuid.BUILD", - ) - -def X(): - native.new_local_repository( - name = "X", - path = "/usr/include", - build_file = "//ign_bazel/third_party:X.BUILD", - ) - -def yaml(): - native.new_local_repository( - name = "yaml", - path = "/usr/include", - build_file = "//ign_bazel/third_party:yaml.BUILD", - ) - -def libzip(): - native.new_local_repository( - name = "zip", - path = "/usr/include", - build_file = "//ign_bazel/third_party:zip.BUILD", - ) - -def zmq(): - native.new_local_repository( - name = "zmq", - path = "/usr/include", - build_file = "//ign_bazel/third_party:zmq.BUILD", - ) - -def eigen3(): - _maybe( - http_archive, - name = "eigen3", - build_file = "//ign_bazel/third_party:eigen3.BUILD", - sha256 = "ca7beac153d4059c02c8fc59816c82d54ea47fe58365e8aded4082ded0b820c4", - strip_prefix = "eigen-eigen-f3a22f35b044", - urls = [ - "http://mirror.bazel.build/bitbucket.org/eigen/eigen/get/f3a22f35b044.tar.gz", - "https://bitbucket.org/eigen/eigen/get/f3a22f35b044.tar.gz", - ], - ) - -def dart_repositories(): - assimp() - boost() - fcl() - gl() - ode() - osg() - nlopt() - -def ogre_repositories(): - freetype() - -def ign_bazel_repositories(): - _maybe( - http_archive, - name = "rules_python", - urls = ["https://github.com/bazelbuild/rules_python/releases/download/0.1.0/rules_python-0.1.0.tar.gz"], - sha256 = "b6d46438523a3ec0f3cead544190ee13223a52f6a6765a29eae7b7cc24cc83a0", - ) - - _maybe( - http_archive, - name = "gtest", - sha256 = "9dc9157a9a1551ec7a7e43daea9a694a0bb5fb8bec81235d8a1e6ef64c716dcb", - strip_prefix = "googletest-release-1.10.0", - urls = ["https://github.com/google/googletest/archive/release-1.10.0.tar.gz"], - ) - -def ign_math_repositories(): - eigen3() - -def ign_common_repositories(): - ffmpeg() - freeimage() - glib() - gts() - uuid() - -def ign_msgs_repositories(): - _maybe( - http_archive, - name = "rules_proto", - sha256 = "9fc210a34f0f9e7cc31598d109b5d069ef44911a82f507d5a88716db171615a8", - strip_prefix = "rules_proto-f7a30f6f80006b591fa7c437fe5a951eb10bcbcf", - urls = [ - "https://github.com/bazelbuild/rules_proto/archive/f7a30f6f80006b591fa7c437fe5a951eb10bcbcf.tar.gz" - ], - ) - - tinyxml2() - -def ign_physics_repositories(): - eigen3() - -def ign_transport_repositories(): - sqlite3() - zmq() - -def ign_fuel_tools_repositories(): - curl() - json() - yaml() - libzip() - -def ign_rendering_repositories(): - native.new_local_repository( - name = "X", - build_file = "//ign_bazel/third_party:X.BUILD", - path = "/usr/include/", - ) - -def ign_gui_repositories(): - native.new_local_repository( - name = "qt", - build_file = "//ign_bazel/third_party:qt.BUILD", - path = "/usr/include/x86_64-linux-gnu/qt5/", - ) - -def ignition_repositories(): - dart_repositories() - ogre_repositories() - ign_bazel_repositories() - ign_math_repositories() - ign_common_repositories() - ign_msgs_repositories() - ign_rendering_repositories() - ign_transport_repositories() - ign_physics_repositories() - ign_fuel_tools_repositories() - ign_gui_repositories() - -def _maybe(repo_rule, name, **kwargs): - if name not in native.existing_rules(): - repo_rule(name = name, **kwargs) diff --git a/third_party/BUILD.bazel b/third_party/BUILD.bazel deleted file mode 100644 index e69de29..0000000 diff --git a/third_party/X.BUILD b/third_party/X.BUILD deleted file mode 100644 index 4c3723d..0000000 --- a/third_party/X.BUILD +++ /dev/null @@ -1,67 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "X11", - linkopts = [ - "-lX11", - ], -) - -cc_library( - name = "SM", - hdrs = glob(["X11/SM/*.h"]), - includes = ["."], - linkopts = [ - "-lSM", - ], -) - -cc_library( - name = "ICE", - hdrs = glob(["X11/ICE/*.h"]), - includes = ["."], - linkopts = [ - "-lICE", - ], -) - -cc_library( - name = "Xaw", - hdrs = glob(["X11/Xaw/*.h"]), - includes = ["."], - linkopts = [ - "-lXaw7", - ], - deps = [ - ":Xt", - ":X11", - ] -) - -cc_library( - name = "Xext", - hdrs = glob(["X11/extensions/*.h"]), - includes = ["."], - linkopts = [ - "-lXext", - ], -) - -cc_library( - name = "Xt", - hdrs = glob(["X11/*.h"]), - includes = ["."], - linkopts = [ - "-lXt", - ], -) - -cc_library( - name = "Xrandr", - hdrs = ["X11/extensions/Xrandr.h"], - includes = ["."], - linkopts = [ - "-lXrandr", - ], -) - diff --git a/third_party/X.build b/third_party/X.build deleted file mode 100644 index cb0c563..0000000 --- a/third_party/X.build +++ /dev/null @@ -1,66 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "X11", - linkopts = [ - "-lX11", - ], -) - -cc_library( - name = "SM", - hdrs = glob(["X11/SM/*.h"]), - includes = ["."], - linkopts = [ - "-lSM", - ], -) - -cc_library( - name = "ICE", - hdrs = glob(["X11/ICE/*.h"]), - includes = ["."], - linkopts = [ - "-lICE", - ], -) - -cc_library( - name = "Xaw", - hdrs = glob(["X11/Xaw/*.h"]), - includes = ["."], - linkopts = [ - "-lXaw7", - ], - deps = [ - ":Xt", - ":X11", - ] -) - -cc_library( - name = "Xext", - hdrs = glob(["X11/extensions/*.h"]), - includes = ["."], - linkopts = [ - "-lXext", - ], -) - -cc_library( - name = "Xt", - hdrs = glob(["X11/*.h"]), - includes = ["."], - linkopts = [ - "-lXt", - ], -) - -cc_library( - name = "Xrandr", - hdrs = ["X11/extensions/Xrandr.h"], - includes = ["."], - linkopts = [ - "-lXrandr", - ], -) diff --git a/third_party/assimp.BUILD b/third_party/assimp.BUILD deleted file mode 100644 index ad6acc2..0000000 --- a/third_party/assimp.BUILD +++ /dev/null @@ -1,10 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "assimp", - hdrs = glob(["assimp/*.h", "assimp/*.hpp", "assimp/*.inl", "assimp/Compiler/*.h"]), - includes = [".", "assimp"], - linkopts = [ - "-lassimp", - ], -) diff --git a/third_party/boost.BUILD b/third_party/boost.BUILD deleted file mode 100644 index 408b4a8..0000000 --- a/third_party/boost.BUILD +++ /dev/null @@ -1,34 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "system", - hdrs = glob(["system/*.hpp", "system/**/*.hpp"]), - includes = ["."], - linkopts = [ - "-lboost_system", - ], -) - -cc_library( - name = "filesystem", - hdrs = ["filesystem.hpp"] + - glob(["filesystem/*.hpp", "filesystem/**/*.hpp"]), - includes = ["."], - linkopts = [ - "-lboost_filesystem", - ], - deps = [ - ":system", - ], -) - -cc_library( - name = "regex", - hdrs = ["regex.hpp"] + - glob(["regex/*.hpp", "regex/**/*.hpp"]), - includes = ["."], - linkopts = [ - "-lboost_regex", - ], -) - diff --git a/third_party/curl.BUILD b/third_party/curl.BUILD deleted file mode 100644 index 66de94d..0000000 --- a/third_party/curl.BUILD +++ /dev/null @@ -1,9 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "curl", - hdrs = glob(["curl/*.h"]), - linkopts = [ - "-lcurl", - ], -) diff --git a/third_party/eigen3.BUILD b/third_party/eigen3.BUILD deleted file mode 100644 index 4d5db75..0000000 --- a/third_party/eigen3.BUILD +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2018 The Cartographer Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Description: -# Eigen is a C++ template library for linear algebra: vectors, -# matrices, and related algorithms. - -licenses([ - # Note: Eigen is an MPL2 library that includes GPL v3 and LGPL v2.1+ code. - # We've taken special care to not reference any restricted code. - "reciprocal", # MPL2 - "notice", # Portions BSD -]) - -exports_files(["COPYING.MPL2"]) - -EIGEN_FILES = [ - "Eigen/**", - "unsupported/Eigen/CXX11/**", - "unsupported/Eigen/FFT", - "unsupported/Eigen/KroneckerProduct", - "unsupported/Eigen/src/FFT/**", - "unsupported/Eigen/src/KroneckerProduct/**", - "unsupported/Eigen/MatrixFunctions", - "unsupported/Eigen/SpecialFunctions", - "unsupported/Eigen/src/MatrixFunctions/**", - "unsupported/Eigen/src/SpecialFunctions/**", -] - -# List of files picked up by glob but actually part of another target. -EIGEN_EXCLUDE_FILES = [ - "Eigen/src/Core/arch/AVX/PacketMathGoogleTest.cc", -] - -EIGEN_MPL2_HEADER_FILES = glob( - EIGEN_FILES, - exclude = EIGEN_EXCLUDE_FILES, -) - -cc_library( - name = "eigen3", - hdrs = EIGEN_MPL2_HEADER_FILES, - includes = ["."], - visibility = ["//visibility:public"], -) diff --git a/third_party/fcl.BUILD b/third_party/fcl.BUILD deleted file mode 100644 index 827fefa..0000000 --- a/third_party/fcl.BUILD +++ /dev/null @@ -1,10 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "fcl", - hdrs = glob(["fcl/*.h", "fcl/**/*.h"]), - linkopts = [ - "-lfcl", - ], -) - diff --git a/third_party/ffmpeg.BUILD b/third_party/ffmpeg.BUILD deleted file mode 100644 index 6bda4f8..0000000 --- a/third_party/ffmpeg.BUILD +++ /dev/null @@ -1,179 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "avcodec", - hdrs = [ - "libavcodec/avcodec.h", - "libavcodec/avdct.h", - "libavcodec/avfft.h", - "libavcodec/d3d11va.h", - "libavcodec/dirac.h", - "libavcodec/dv_profile.h", - "libavcodec/dxva2.h", - "libavcodec/jni.h", - "libavcodec/mediacodec.h", - "libavcodec/qsv.h", - "libavcodec/vaapi.h", - "libavcodec/vdpau.h", - "libavcodec/version.h", - "libavcodec/videotoolbox.h", - "libavcodec/vorbis_parser.h", - "libavcodec/xvmc.h", - ], - includes = ["."], - linkopts = [ - "-lavcodec", - ], -) - -cc_library( - name = "avdevice", - hdrs = [ - "libavdevice/avdevice.h", - "libavdevice/version.h" - ], - includes = ["."], - linkopts = [ - "-lavdevice", - ], -) - -cc_library( - name = "avfilter", - hdrs = [ - "libavfilter/avfilter.h", - "libavfilter/buffersink.h", - "libavfilter/buffersrc.h", - "libavfilter/version.h" - ], - includes = ["."], - linkopts = [ - "-lavfilter", - ], -) - -cc_library( - name = "avformat", - hdrs = [ - "libavformat/avformat.h", - "libavformat/avio.h", - "libavformat/version.h", - ], - includes = ["."], - linkopts = [ - "-lavformat", - ], -) - -cc_library( - name = "avutil", - hdrs = [ - "libavutil/adler32.h", - "libavutil/aes.h", - "libavutil/aes_ctr.h", - "libavutil/attributes.h", - "libavutil/audio_fifo.h", - "libavutil/avassert.h", - "libavutil/avconfig.h", - "libavutil/avstring.h", - "libavutil/avutil.h", - "libavutil/base64.h", - "libavutil/blowfish.h", - "libavutil/bprint.h", - "libavutil/bswap.h", - "libavutil/buffer.h", - "libavutil/camellia.h", - "libavutil/cast5.h", - "libavutil/channel_layout.h", - "libavutil/common.h", - "libavutil/cpu.h", - "libavutil/crc.h", - "libavutil/des.h", - "libavutil/dict.h", - "libavutil/display.h", - "libavutil/downmix_info.h", - "libavutil/error.h", - "libavutil/eval.h", - "libavutil/ffversion.h", - "libavutil/fifo.h", - "libavutil/file.h", - "libavutil/frame.h", - "libavutil/hash.h", - "libavutil/hmac.h", - "libavutil/hwcontext.h", - "libavutil/hwcontext_cuda.h", - "libavutil/hwcontext_d3d11va.h", - "libavutil/hwcontext_drm.h", - "libavutil/hwcontext_dxva2.h", - "libavutil/hwcontext_qsv.h", - "libavutil/hwcontext_vaapi.h", - "libavutil/hwcontext_vdpau.h", - "libavutil/hwcontext_videotoolbox.h", - "libavutil/imgutils.h", - "libavutil/intfloat.h", - "libavutil/intreadwrite.h", - "libavutil/lfg.h", - "libavutil/log.h", - "libavutil/lzo.h", - "libavutil/macros.h", - "libavutil/mastering_display_metadata.h", - "libavutil/mathematics.h", - "libavutil/md5.h", - "libavutil/mem.h", - "libavutil/motion_vector.h", - "libavutil/murmur3.h", - "libavutil/opt.h", - "libavutil/parseutils.h", - "libavutil/pixdesc.h", - "libavutil/pixelutils.h", - "libavutil/pixfmt.h", - "libavutil/random_seed.h", - "libavutil/rational.h", - "libavutil/rc4.h", - "libavutil/replaygain.h", - "libavutil/ripemd.h", - "libavutil/samplefmt.h", - "libavutil/sha.h", - "libavutil/sha512.h", - "libavutil/spherical.h", - "libavutil/stereo3d.h", - "libavutil/tea.h", - "libavutil/threadmessage.h", - "libavutil/time.h", - "libavutil/timecode.h", - "libavutil/timestamp.h", - "libavutil/tree.h", - "libavutil/twofish.h", - "libavutil/version.h", - "libavutil/xtea.h", - ], - includes = ["."], - linkopts = [ - "-lavutil", - ], -) - -cc_library( - name = "swresample", - hdrs = [ - "libswresample/swresample.h", - "libswresample/version.h" - ], - includes = ["."], - linkopts = [ - "-lswresample", - ], -) - -cc_library( - name = "swscale", - hdrs = [ - "libswscale/swscale.h", - "libswscale/version.h" - ], - includes = ["."], - linkopts = [ - "-lswscale", - ], -) - diff --git a/third_party/freeimage.BUILD b/third_party/freeimage.BUILD deleted file mode 100644 index 0483394..0000000 --- a/third_party/freeimage.BUILD +++ /dev/null @@ -1,10 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "freeimage", - hdrs = ["FreeImage.h"], - linkopts = [ - "-lfreeimage", - ], -) - diff --git a/third_party/freetype2.BUILD b/third_party/freetype2.BUILD deleted file mode 100644 index f773063..0000000 --- a/third_party/freetype2.BUILD +++ /dev/null @@ -1,8 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "freetype2", - hdrs = glob(["ft2build.h", "freetype/*.h", "freetype/config/*.h"]), - includes = ["."] -) - diff --git a/third_party/gl.BUILD b/third_party/gl.BUILD deleted file mode 100644 index 44732fc..0000000 --- a/third_party/gl.BUILD +++ /dev/null @@ -1,17 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "GL", - linkopts = ["-lGL"], -) - -cc_library( - name = "GLU", - linkopts = ["-lGLU"], -) - -cc_library( - name = "glut", - linkopts = ["-lglut"], -) - diff --git a/third_party/glib.BUILD b/third_party/glib.BUILD deleted file mode 100644 index afe2e97..0000000 --- a/third_party/glib.BUILD +++ /dev/null @@ -1,17 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "glib", - hdrs = glob(["include/glib-2.0/**"]) + - ["lib/x86_64-linux-gnu/glib-2.0/include/glibconfig.h"], - includes = [ - "include/glib-2.0/", - "lib/x86_64-linux-gnu/glib-2.0/include/" - ], - linkopts = [ - "-lglib-2.0", - "-lgobject-2.0", - "-lgio-2.0", - "-lgthread-2.0", - ], -) diff --git a/third_party/gts.BUILD b/third_party/gts.BUILD deleted file mode 100644 index e32e04a..0000000 --- a/third_party/gts.BUILD +++ /dev/null @@ -1,15 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "gts", - hdrs = glob(["include/glib-2.0/**"]), - includes = ["include", - "lib/x86_64-linux-gnu/glib-2.0/include"], - linkopts = [ - "-lgts", - "-lm" - ], - deps = [ - "@glib" - ] -) diff --git a/third_party/json.BUILD b/third_party/json.BUILD deleted file mode 100644 index a03125b..0000000 --- a/third_party/json.BUILD +++ /dev/null @@ -1,11 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "json", - includes = ["."], - hdrs = glob(["json/*.h"]), - linkopts = [ - "-ljsoncpp", - ], -) - diff --git a/third_party/nlopt.BUILD b/third_party/nlopt.BUILD deleted file mode 100644 index c521cc8..0000000 --- a/third_party/nlopt.BUILD +++ /dev/null @@ -1,11 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "nlopt", - hdrs = ["nlopt.h", "nlopt.hpp"], - includes = ["."], - linkopts = [ - "-lnlopt", - ], -) - diff --git a/third_party/ode.BUILD b/third_party/ode.BUILD deleted file mode 100644 index f7faab7..0000000 --- a/third_party/ode.BUILD +++ /dev/null @@ -1,11 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "ode", - hdrs = glob(["ode/*.h"]), - includes = [".", "ode"], - linkopts = [ - "-lode", - ], -) - diff --git a/third_party/osg.BUILD b/third_party/osg.BUILD deleted file mode 100644 index 901927b..0000000 --- a/third_party/osg.BUILD +++ /dev/null @@ -1,31 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "core", - hdrs = glob(["osg/*"]), - includes = ["."], - linkopts = [ - "-losg" - ], -) - -modules = ["Animation", "DB", "FX", "GA", "Manipulator", "Particle", "Presentation", "Shadow", "Sim", "Terrain", "Text", "UI", "Util", "Viewer", "Widget"] - - -[ - cc_library( - name = "osg" + module, - hdrs = glob(["osg" + module + "*"]), - includes = ["."], - linkopts = [ - "-losg" + module, - ], - deps = [ ":core" ], - ) - for module in modules -] - -cc_library( - name = "osg", - deps = [":osg" + module for module in modules] -) diff --git a/third_party/qt.BUILD b/third_party/qt.BUILD deleted file mode 100644 index 1ba270b..0000000 --- a/third_party/qt.BUILD +++ /dev/null @@ -1,86 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "qt_core", - hdrs = glob(["QtCore/**"]), - includes = [".", "QtCore"], - linkopts = [ - "-lQt5Core", - ], -) - -cc_library( - name = "qt_network", - hdrs = glob(["QtNetwork/**"]), - includes = [".", "QtNetwork"], - linkopts = [ - "-lQt5Network", - ], -) - -cc_library( - name = "qt_widgets", - hdrs = glob(["QtWidgets/**"]), - includes = [".", "QtWidgets"], - linkopts = [ - "-lQt5Widgets", - ], - deps = [":qt_core", ":qt_gui"], -) - -cc_library( - name = "qt_quick_control", - hdrs = glob(["QtQuickControls2/**"]), - includes = [".", "QtQuickControls2"], - linkopts = [ - "-lQt5Quick", - ], - deps = [ - ":qt_gui", - ":qt_qml", - ":qt_quick", - ], -) - -cc_library( - name = "qt_quick", - hdrs = glob(["QtQuick/**"]), - includes = [".", "QtQuick"], - linkopts = [ - "-lQt5Quick", - ], - deps = [ - ":qt_gui", - ":qt_qml", - ], -) - -cc_library( - name = "qt_qml", - hdrs = glob(["QtQml/**"]), - includes = [".", "QtQml"], - linkopts = [ - "-lQt5Qml", - ], - deps = [ - ":qt_core", - ":qt_network", - ], -) - -cc_library( - name = "qt_gui", - hdrs = glob(["QtGui/**"]), - includes = [".", "QtGui"], - linkopts = [ - "-lQt5Gui", - ], - deps = [":qt_core"], -) - -cc_library( - name = "qt_opengl", - hdrs = glob(["QtOpenGL/**"]), - includes = [".", "QtOpenGL"], - linkopts = ["-lQt5OpenGL"], -) diff --git a/third_party/sqlite3.BUILD b/third_party/sqlite3.BUILD deleted file mode 100644 index d080573..0000000 --- a/third_party/sqlite3.BUILD +++ /dev/null @@ -1,12 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "sqlite3", - hdrs = ["sqlite3.h", "sqlite3ext.h"], - includes = ["."], - linkopts = [ - "-lsqlite3", - ], -) - - diff --git a/third_party/tinyxml2.BUILD b/third_party/tinyxml2.BUILD deleted file mode 100644 index ca59265..0000000 --- a/third_party/tinyxml2.BUILD +++ /dev/null @@ -1,9 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "tinyxml2", - hdrs = ["tinyxml2.h"], - srcs = ["tinyxml2.cpp"], - includes = ["."], -) - diff --git a/third_party/uuid.BUILD b/third_party/uuid.BUILD deleted file mode 100644 index be56071..0000000 --- a/third_party/uuid.BUILD +++ /dev/null @@ -1,12 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "uuid", - hdrs = ["uuid/uuid.h"], - includes = ["."], - linkopts = [ - "-luuid", - ], -) - - diff --git a/third_party/yaml.BUILD b/third_party/yaml.BUILD deleted file mode 100644 index aafa4b1..0000000 --- a/third_party/yaml.BUILD +++ /dev/null @@ -1,9 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "yaml", - hdrs = glob(["yaml.h"]), - linkopts = [ - "-lyaml", - ], -) diff --git a/third_party/zip.BUILD b/third_party/zip.BUILD deleted file mode 100644 index 8dfb30d..0000000 --- a/third_party/zip.BUILD +++ /dev/null @@ -1,9 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "zip", - hdrs = glob(["zip.h"]), - linkopts = [ - "-lzip", - ], -) diff --git a/third_party/zmq.BUILD b/third_party/zmq.BUILD deleted file mode 100644 index c10c0d3..0000000 --- a/third_party/zmq.BUILD +++ /dev/null @@ -1,12 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -cc_library( - name = "zmq", - hdrs = ["zmq.h", "zmq_utils.h"], - includes = ["."], - linkopts = [ - "-lzmq", - ], -) - - From 6b0f7bf077f6f9d5b7eb07176d561ad50af726ae Mon Sep 17 00:00:00 2001 From: Michael Carroll Date: Wed, 8 Feb 2023 17:26:14 -0600 Subject: [PATCH 2/7] Include initial package information for garden Signed-off-by: Michael Carroll --- .github/actions/bazel-ci-jammy/Dockerfile | 6 + .github/actions/bazel-ci-jammy/action.yml | 16 + .github/actions/bazel-ci-jammy/entrypoint.sh | 3 + .github/actions/bazel-ci-jammy/run.sh | 64 +++ .github/ci/packages-jammy.apt | 1 + .github/workflows/ci.yml | 19 + BUILD.bazel | 6 + bazel.rc | 24 ++ cc_toolchain/BUILD.bazel | 21 + cc_toolchain/bazel.rc | 6 + example/BUILD.bazel.example | 9 + example/WORKSPACE.example | 5 + example/bazel.repos | 1 + example/bazelproject.example | 20 + example/bazelrc.example | 1 + lint/BUILD.bazel | 45 +++ lint/bazel_lint.bzl | 85 ++++ lint/buildifier-tables.json | 97 +++++ lint/buildifier.py | 149 +++++++ lint/bzlcodestyle.py | 101 +++++ lint/find_data.py | 24 ++ lint/lint.bzl | 12 + lint/util.py | 75 ++++ py_toolchain/BUILD.bazel | 94 +++++ py_toolchain/interpreter_paths.bzl | 21 + skylark/BUILD.bazel | 21 + skylark/build_defs.bzl | 34 ++ skylark/cmake_configure_file.bzl | 86 ++++ skylark/cmake_configure_file.py | 155 ++++++++ skylark/gz_configure_file.bzl | 83 ++++ skylark/gz_configure_file.py | 192 +++++++++ skylark/gz_export_header.bzl | 68 ++++ skylark/gz_include_header.bzl | 38 ++ skylark/gz_py.bzl | 260 +++++++++++++ skylark/pathutils.bzl | 203 ++++++++++ skylark/protobuf.bzl | 235 +++++++++++ skylark/py.bzl | 14 + skylark/python_env.bzl | 22 ++ ubuntu-jammy.bazelrc | 2 + workspace/BUILD.bazel | 3 + workspace/bazel_skylib/BUILD.bazel | 3 + workspace/bazel_skylib/repository.bzl | 12 + workspace/buildifier/BUILD.bazel | 3 + workspace/buildifier/package.BUILD.bazel | 7 + workspace/buildifier/repository.bzl | 69 ++++ workspace/default.bzl | 63 +++ workspace/execute.bzl | 77 ++++ workspace/generate_file.bzl | 25 ++ workspace/github.bzl | 363 +++++++++++++++++ workspace/mirrors.bzl | 30 ++ workspace/os.bzl | 344 ++++++++++++++++ workspace/pkg_config.BUILD.tpl | 21 + workspace/pkg_config.bzl | 388 +++++++++++++++++++ workspace/pycodestyle/BUILD.bazel | 3 + workspace/pycodestyle/package.BUILD.bazel | 40 ++ workspace/pycodestyle/repository.bzl | 15 + workspace/rules_python/BUILD.bazel | 3 + workspace/rules_python/repository.bzl | 21 + workspace/workspace-status.bash | 25 ++ 59 files changed, 3833 insertions(+) create mode 100644 .github/actions/bazel-ci-jammy/Dockerfile create mode 100644 .github/actions/bazel-ci-jammy/action.yml create mode 100755 .github/actions/bazel-ci-jammy/entrypoint.sh create mode 100755 .github/actions/bazel-ci-jammy/run.sh create mode 100644 .github/ci/packages-jammy.apt create mode 100644 .github/workflows/ci.yml create mode 100644 BUILD.bazel create mode 100644 bazel.rc create mode 100644 cc_toolchain/BUILD.bazel create mode 100644 cc_toolchain/bazel.rc create mode 100644 example/BUILD.bazel.example create mode 100644 example/WORKSPACE.example create mode 100644 example/bazel.repos create mode 100644 example/bazelproject.example create mode 100644 example/bazelrc.example create mode 100644 lint/BUILD.bazel create mode 100644 lint/bazel_lint.bzl create mode 100644 lint/buildifier-tables.json create mode 100644 lint/buildifier.py create mode 100644 lint/bzlcodestyle.py create mode 100644 lint/find_data.py create mode 100644 lint/lint.bzl create mode 100644 lint/util.py create mode 100644 py_toolchain/BUILD.bazel create mode 100644 py_toolchain/interpreter_paths.bzl create mode 100644 skylark/BUILD.bazel create mode 100644 skylark/build_defs.bzl create mode 100644 skylark/cmake_configure_file.bzl create mode 100644 skylark/cmake_configure_file.py create mode 100644 skylark/gz_configure_file.bzl create mode 100644 skylark/gz_configure_file.py create mode 100644 skylark/gz_export_header.bzl create mode 100644 skylark/gz_include_header.bzl create mode 100644 skylark/gz_py.bzl create mode 100644 skylark/pathutils.bzl create mode 100644 skylark/protobuf.bzl create mode 100644 skylark/py.bzl create mode 100644 skylark/python_env.bzl create mode 100644 ubuntu-jammy.bazelrc create mode 100644 workspace/BUILD.bazel create mode 100644 workspace/bazel_skylib/BUILD.bazel create mode 100644 workspace/bazel_skylib/repository.bzl create mode 100644 workspace/buildifier/BUILD.bazel create mode 100644 workspace/buildifier/package.BUILD.bazel create mode 100644 workspace/buildifier/repository.bzl create mode 100644 workspace/default.bzl create mode 100644 workspace/execute.bzl create mode 100644 workspace/generate_file.bzl create mode 100644 workspace/github.bzl create mode 100644 workspace/mirrors.bzl create mode 100644 workspace/os.bzl create mode 100644 workspace/pkg_config.BUILD.tpl create mode 100644 workspace/pkg_config.bzl create mode 100644 workspace/pycodestyle/BUILD.bazel create mode 100644 workspace/pycodestyle/package.BUILD.bazel create mode 100644 workspace/pycodestyle/repository.bzl create mode 100644 workspace/rules_python/BUILD.bazel create mode 100644 workspace/rules_python/repository.bzl create mode 100755 workspace/workspace-status.bash diff --git a/.github/actions/bazel-ci-jammy/Dockerfile b/.github/actions/bazel-ci-jammy/Dockerfile new file mode 100644 index 0000000..25ba350 --- /dev/null +++ b/.github/actions/bazel-ci-jammy/Dockerfile @@ -0,0 +1,6 @@ +FROM ghcr.io/gazebo-tooling/gz-ubuntu:garden-jammy + +COPY ["run.sh", "/run.sh"] +COPY ["entrypoint.sh", "/entrypoint.sh"] + +ENTRYPOINT ["/entrypoint.sh"] diff --git a/.github/actions/bazel-ci-jammy/action.yml b/.github/actions/bazel-ci-jammy/action.yml new file mode 100644 index 0000000..3063595 --- /dev/null +++ b/.github/actions/bazel-ci-jammy/action.yml @@ -0,0 +1,16 @@ +name: 'Gazebo install using bazel' +description: '' +author: "Michael Carroll" +inputs: + github_token: + description: "Token for the repo. Can be passed in using {{ secrets.GITHUB_TOKEN }}" + required: true + bazel-args: + description: 'Additional Bazel arguments to use when building package under test' + required: true + default: '//...' +runs: + using: 'docker' + image: 'Dockerfile' + args: + - ${{ inputs.bazel-args }} diff --git a/.github/actions/bazel-ci-jammy/entrypoint.sh b/.github/actions/bazel-ci-jammy/entrypoint.sh new file mode 100755 index 0000000..d43cecd --- /dev/null +++ b/.github/actions/bazel-ci-jammy/entrypoint.sh @@ -0,0 +1,3 @@ +#!/bin/sh -l + +sudo bash /run.sh $@ diff --git a/.github/actions/bazel-ci-jammy/run.sh b/.github/actions/bazel-ci-jammy/run.sh new file mode 100755 index 0000000..57f12ae --- /dev/null +++ b/.github/actions/bazel-ci-jammy/run.sh @@ -0,0 +1,64 @@ +#!/bin/sh -l + +set -x +set -e + +BAZEL_ARGS=$1 + +echo ::group::Install tools: apt +apt update 2>&1 +apt -y install \ + build-essential \ + cppcheck \ + curl \ + git \ + gnupg \ + lsb-release \ + python3-pip \ + wget + +cd "$GITHUB_WORKSPACE" +SYSTEM_VERSION=`lsb_release -cs` +SOURCE_DEPENDENCIES="`pwd`/.github/ci/dependencies.yaml" +SOURCE_DEPENDENCIES_VERSIONED="`pwd`/.github/ci-$SYSTEM_VERSION/dependencies.yaml" +echo ::endgroup:: + +echo ::group::Install tools: pip +pip3 install -U pip vcstool colcon-common-extensions +echo ::endgroup:: + +# Install bazelisk +wget https://github.com/bazelbuild/bazelisk/releases/download/v1.16.0/bazelisk-linux-amd64 +mv ./bazelisk-linux-amd64 /usr/bin/bazel +chmod +x /usr/bin/bazel + +# Import repos +mkdir -p /gz +cd /gz +cp -R /github/workspace /gz/bazel +vcs import . < /github/workspace/example/bazel.repos + +echo ::group::Install dependencies from binaries +EXCLUDE_APT="libignition|libgz|libsdformat|libogre|dart" +UBUNTU_VERSION=`lsb_release -cs` +ALL_PACKAGES=$( \ + sort -u $(find . -iname 'packages-'$UBUNTU_VERSION'.apt' -o -iname 'packages.apt') | grep -Ev $EXCLUDE_APT | tr '\n' ' ') +apt-get install --no-install-recommends --quiet --yes $ALL_PACKAGES +echo ::endgroup:: + +ln -sf /gz/bazel/example/WORKSPACE.example /gz/WORKSPACE +ln -sf /gz/bazel/example/BUILD.bazel.example /gz/BUILD.bazel +ln -sf /gz/bazel/example/bazelrc.example /gz/.bazelrc +ln -sf /gz/bazel/example/bazelproject.example /gz/.bazelproject + +echo ::group::Bazel query +bazel query $BAZEL_ARGS +echo ::endgroup:: + +echo ::group::Bazel build +bazel build $BAZEL_ARGS +echo ::endgroup:: + +echo ::group::Bazel test +bazel test $BAZEL_ARGS +echo ::endgroup:: diff --git a/.github/ci/packages-jammy.apt b/.github/ci/packages-jammy.apt new file mode 100644 index 0000000..05e6572 --- /dev/null +++ b/.github/ci/packages-jammy.apt @@ -0,0 +1 @@ +libnlopt-cxx-dev diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..937ee33 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,19 @@ +name: Ubuntu CI + +on: [push, pull_request] + +jobs: + jammy-ci: + runs-on: ubuntu-latest + name: Ubuntu Jammy CI + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Login to GHCR + run: echo ${{ secrets.CR_PAT }} | docker login ghcr.io -u $GITHUB_ACTOR --password-stdin + + - name: Compile and test + uses: ./.github/actions/bazel-ci-jammy + with: + bazel-args: //... diff --git a/BUILD.bazel b/BUILD.bazel new file mode 100644 index 0000000..0139e6e --- /dev/null +++ b/BUILD.bazel @@ -0,0 +1,6 @@ +# -*- python -*- +load("@gz//bazel/lint:lint.bzl", "add_lint_tests") + +package(default_visibility = ["//visibility:public"]) + +add_lint_tests() diff --git a/bazel.rc b/bazel.rc new file mode 100644 index 0000000..75249e9 --- /dev/null +++ b/bazel.rc @@ -0,0 +1,24 @@ +import %workspace%/bazel/cc_toolchain/bazel.rc + +# Disable native Python rules in Bazel versions before 4.0. +build --incompatible_load_python_rules_from_bzl=yes + +# Default to an optimized build. +build -c opt + +# Default build options. +build --force_pic +build --strip=never +build --strict_system_includes +build --workspace_status_command=./bazel/workspace/workspace-status.bash +build --define enable_ifaddrs=true + +test --define enable_ifaddrs=true + +# -- Options for explicitly using GCC. +common:gcc --repo_env=CC=gcc +common:gcc --repo_env=CXX=g++ +build:gcc --action_env=CC=gcc +build:gcc --action_env=CXX=g++ +build:gcc --host_action_env=CC=gcc +build:gcc --host_action_env=CXX=g++ diff --git a/cc_toolchain/BUILD.bazel b/cc_toolchain/BUILD.bazel new file mode 100644 index 0000000..5f3fcf3 --- /dev/null +++ b/cc_toolchain/BUILD.bazel @@ -0,0 +1,21 @@ +# -*- python -*- + +package(default_visibility = ["//visibility:public"]) + +config_setting( + name = "debug", + values = {"compilation_mode": "dbg"}, +) + +config_setting( + name = "linux", + constraint_values = ["@bazel_tools//platforms:linux"], +) + +filegroup( + name = "toolchain_deps", + data = select({ + "//conditions:default": [], + }), + visibility = ["//common:__pkg__"], +) diff --git a/cc_toolchain/bazel.rc b/cc_toolchain/bazel.rc new file mode 100644 index 0000000..252711c --- /dev/null +++ b/cc_toolchain/bazel.rc @@ -0,0 +1,6 @@ +# Disable ccache due to incompatibility with Bazel. +build --action_env=CCACHE_DISABLE=1 + +# Add C++17 compiler flags. +build --cxxopt=-std=c++17 +build --host_cxxopt=-std=c++17 diff --git a/example/BUILD.bazel.example b/example/BUILD.bazel.example new file mode 100644 index 0000000..ae79369 --- /dev/null +++ b/example/BUILD.bazel.example @@ -0,0 +1,9 @@ +load("@gz//bazel/lint:lint.bzl", "add_lint_tests") + +package( + default_visibility = ["//visibility:public"], +) + +exports_files([ + ".bazelproject", +]) diff --git a/example/WORKSPACE.example b/example/WORKSPACE.example new file mode 100644 index 0000000..5733d18 --- /dev/null +++ b/example/WORKSPACE.example @@ -0,0 +1,5 @@ +workspace(name = "gz") + +load("@gz//bazel/workspace:default.bzl", "add_default_workspace") + +add_default_workspace() diff --git a/example/bazel.repos b/example/bazel.repos new file mode 100644 index 0000000..56f46b6 --- /dev/null +++ b/example/bazel.repos @@ -0,0 +1 @@ +repositories: diff --git a/example/bazelproject.example b/example/bazelproject.example new file mode 100644 index 0000000..af8405d --- /dev/null +++ b/example/bazelproject.example @@ -0,0 +1,20 @@ +# This is the default project view file for CLion. It describes which +# directories and targets CLion should traverse when ingesting the Bazel build. +# +# Developers who only wish to work on a subset of Drake may maintain custom +# project views locally. The more narrowly scoped the project view, the faster +# CLion indexing will be. Restricted project views of general interest may also +# be committed to this directory, with file names of the form foo.bazelproject. +# +# Detailed documentation for the file format is available at +# https://ij.bazel.build/docs/project-views.html + +directories: + . + -build + +targets: + //...:all + +build_flags: + --experimental_google_legacy_api diff --git a/example/bazelrc.example b/example/bazelrc.example new file mode 100644 index 0000000..11b7574 --- /dev/null +++ b/example/bazelrc.example @@ -0,0 +1 @@ +import %workspace%/bazel/bazel.rc diff --git a/lint/BUILD.bazel b/lint/BUILD.bazel new file mode 100644 index 0000000..4f00122 --- /dev/null +++ b/lint/BUILD.bazel @@ -0,0 +1,45 @@ +package(default_visibility = ["//visibility:public"]) + +load("@gz//bazel/lint:lint.bzl", "add_lint_tests") +load( + "@gz//bazel/skylark:gz_py.bzl", + "gz_py_binary", + "gz_py_library", + "gz_py_unittest", +) + +gz_py_library( + name = "find_data", + srcs = ["find_data.py"], +) + +gz_py_library( + name = "util", + srcs = ["util.py"], + data = ["//:.bazelproject"], + visibility = ["//visibility:private"], +) + +gz_py_binary( + name = "buildifier", + srcs = ["buildifier.py"], + data = [ + ":buildifier-tables.json", + "@buildifier", + ], + deps = [ + ":find_data", + ":util", + ], +) + +gz_py_binary( + name = "bzlcodestyle", + srcs = ["bzlcodestyle.py"], + main = "bzlcodestyle.py", + deps = ["@pycodestyle"], +) + +# === test === + +add_lint_tests() diff --git a/lint/bazel_lint.bzl b/lint/bazel_lint.bzl new file mode 100644 index 0000000..49b0fd9 --- /dev/null +++ b/lint/bazel_lint.bzl @@ -0,0 +1,85 @@ +# -*- mode: python -*- +# vi: set ft=python : + +load("@gz//bazel/skylark:gz_py.bzl", "py_test_isolated") + +#------------------------------------------------------------------------------ +# Internal helper; set up test given name and list of files. Will do nothing +# if no files given. +def _bazel_lint(name, files, ignore): + if files: + ignores = ["E%s" % e for e in (ignore or [])] + + # W504 relates to linebreaks around binary operators; buildifier + # disagrees with what pycodestyle wants to do. + ignores.append("W504") + + ignores_as_arg = ["--ignore=" + ",".join(ignores)] + locations = ["$(locations %s)" % f for f in files] + + py_test_isolated( + name = name + "_codestyle", + size = "small", + srcs = ["@gz//bazel/lint:bzlcodestyle"], + data = files, + args = ignores_as_arg + locations, + main = "@gz//bazel/lint:bzlcodestyle.py", + tags = ["bzlcodestyle", "lint"], + ) + + py_test_isolated( + name = name + "_buildifier", + size = "small", + srcs = ["@gz//bazel/lint:buildifier"], + data = files, + args = ["-mode=check"] + locations, + main = "@gz//bazel/lint:buildifier.py", + tags = ["buildifier", "lint"], + ) + +#------------------------------------------------------------------------------ +def bazel_lint( + name = "bazel", + ignore = None, + extra_srcs = None, + exclude = None): + """ + Runs the ``bzlcodestyle`` code style checker on all Bazel files in the + current directory. The tool is based on the ``pycodestyle`` :pep:`8` code + style checker, but always disables certain checks while adding others. + + Args: + name: Name prefix of the test (default = "bazel"). + ignore: List of errors (as integers, without the 'E') to ignore + (default = [265, 302, 305]). + extra_srcs: List of files to lint that would otherwise be missed by the + default glob pattern for Bazel source code. + exclude: List to be passed to the skylark glob function for files that + should not be linted (e.g., vendored files). + + Example: + BUILD: + load("//bazel/lint:bazel_lint.bzl", "bazel_lint") + + bazel_lint() + """ + + if ignore == None: + ignore = [265, 302, 305] + if extra_srcs == None: + extra_srcs = [] + if exclude == None: + exclude = [] + + _bazel_lint( + name = name, + files = native.glob([ + "*.bzl", + "*.BUILD", + "*.BUILD.bazel", + "BUILD", + "BUILD.bazel", + "WORKSPACE", + ], exclude = exclude) + extra_srcs, + ignore = ignore, + ) diff --git a/lint/buildifier-tables.json b/lint/buildifier-tables.json new file mode 100644 index 0000000..53cd401 --- /dev/null +++ b/lint/buildifier-tables.json @@ -0,0 +1,97 @@ +{ + "DOCUMENTATION_FOR_DEVELOPERS_EDITING_THIS_FILE": { + "What": "This file explains to buildifier how to sort attribute names, and which lists of labels get internally sorted.", + "Why": "Drake's skylark macros have custom attributes, and we want them to sort in a reasonable way.", + "See": "https://github.com/bazelbuild/buildtools/blob/master/tables/tables.go" + }, + "IsLabelArg": { + }, + "LabelBlacklist": { + }, + "IsSortableListArg": { + "allowed_externals": true + }, + "SortableWhitelist": { + "install.data": true, + "install.docs": true, + "install.hdrs": true, + "install.targets": true, + "install_files.files": true + }, + "NamePriority": { + "drake_cc_library.interface_deps": 3, + + "drake_cc_optional_library.opt_in_condition": -96, + "drake_cc_optional_library.opt_out_condition": -96, + + "drake_cc_variant_library.opt_in_condition": -96, + "drake_cc_variant_library.opt_out_condition": -96, + + "drake_cc_variant_library.srcs_always": -92, + "drake_cc_variant_library.srcs_enabled": -91, + "drake_cc_variant_library.srcs_disabled": -90, + "drake_cc_variant_library.interface_deps": 3, + "drake_cc_variant_library.deps_always": 4, + "drake_cc_variant_library.deps_enabled": 4, + + "git_repository.remote": 300, + "git_repository.commit": 310, + + "pypi_archive.package": 300, + "pypi_archive.version": 310, + "pypi_archive.sha256": 340, + "pypi_archive.strip_prefix": 350, + "pypi_archive.build_file": 360, + + "new_local_repository.path": 300, + "new_local_repository.build_file_content": 360, + + "pkg_config_repository.licenses": 100, + "pkg_config_repository.atleast_version": 200, + "pkg_config_repository.static": 201, + "pkg_config_repository.pkg_config_paths": 202, + "pkg_config_repository.extra_srcs": 300, + "pkg_config_repository.extra_hdrs": 301, + "pkg_config_repository.extra_copts": 302, + "pkg_config_repository.extra_defines": 303, + "pkg_config_repository.extra_includes": 304, + "pkg_config_repository.extra_linkopts": 305, + "pkg_config_repository.extra_deps": 306, + + "install.targets": 400, + "install.archive_dest": 401, + "install.archive_strip_prefix": 401, + "install.library_dest": 401, + "install.library_strip_prefix": 401, + "install.runtime_dest": 401, + "install.runtime_strip_prefix": 401, + "install.java_dest": 402, + "install.java_strip_prefix": 402, + "install.py_dest": 402, + "install.py_strip_prefix": 402, + "install.hdrs": 410, + "install.hdr_dest": 411, + "install.hdr_strip_prefix": 411, + "install.guess_hdrs": 415, + "install.guess_hdrs_exclude": 416, + "install.data": 420, + "install.data_dest": 421, + "install.guess_data": 425, + "install.guess_data_exclude": 426, + "install.docs": 440, + "install.doc_dest": 441, + "install.doc_strip_prefix": 441, + "install.rename": 460, + + "install_files.files": 400, + "install_files.strip_prefix": 401, + "install_files.rename": 460, + + "install.allowed_externals": 465, + "install_files.allowed_externals": 465, + "install.visibility": 490, + "install_files.visibility": 490, + "install.deps": 499, + "install_files.deps": 499 + } +} diff --git a/lint/buildifier.py b/lint/buildifier.py new file mode 100644 index 0000000..bb8763d --- /dev/null +++ b/lint/buildifier.py @@ -0,0 +1,149 @@ +"""Run Google's buildifier tool to fix, check, or fix-diff BUILD files. + +All arguments except "--all" are passed through to Google's tool. As with the +Google tool, the default mode is "-mode=fix". In "-mode=check", we promote +lint errors to a non-zero exitcode. +""" + + +import os +import re +import subprocess +import sys +from subprocess import Popen, PIPE, STDOUT + +from gz.bazel.lint.find_data import find_data +from gz.bazel.lint.util import find_all_sources + +# These match data=[] in our BUILD.bazel file. +_BUILDIFIER = "external/buildifier/buildifier" +_TABLES = "bazel/lint/buildifier-tables.json" + + +def _make_buildifier_command(): + """Returns a list starting with the buildifier executable, followed by any + required default arguments.""" + return [ + find_data(_BUILDIFIER), + "-add_tables={}".format(find_data(_TABLES))] + + +def _help(command): + """Perform the --help operation (display output) and return an exitcode.""" + process = Popen(command, stdout=PIPE, stderr=STDOUT) + stdout, _ = process.communicate() + lines = stdout.splitlines() + # Edit the first line to allow "--all" as a disjunction from "files...", + # and make one or the other required. + head = re.sub(r'\[(files\.\.\.)\]', r'<\1 | --all>', lines.pop(0)) + for line in [head] + lines: + print(line) + print("") + print("=== Drake-specific additions ===") + print("") + print("If the --all flag is given, buildifier operates on every BUILD,") + print("*.BUILD, *.bazel, and *.bzl file in the tree except third_party.") + print("") + print("Without '--all', 'files...' are required; stdin cannot be used.") + return process.returncode + + +def _find_buildifier_sources(workspace_name): + """Return a list of all filenames to be covered by buildifier.""" + workspace, sources_relpath = find_all_sources(workspace_name) + exact_filenames = ["BUILD", "WORKSPACE"] + extensions = ["bazel", "bzl", "BUILD"] + return workspace, [ + os.path.join(workspace, relpath) + for relpath in sources_relpath + if os.path.splitext(relpath)[1][1:] in extensions + or os.path.basename(relpath) in exact_filenames + ] + + +def _passes_check_mode(args): + """The `args` list should be as per subprocess.check_call. Returns True + iff builfidier runs with exitcode 0 and no output, or else returns False + iff reformat is needed, or else raises an exception. + """ + try: + output = subprocess.check_output(args) + return (len(output) == 0) + except subprocess.CalledProcessError as e: + # https://github.com/bazelbuild/buildtools/blob/1a7c0ec10697afcb87af8a09f12c3f9b9ca56fb2/buildifier/buildifier.go#L227 + REFORMAT_IS_NEEDED = 4 + if e.returncode == REFORMAT_IS_NEEDED: + return False + raise e + + +def main(workspace_name="gz"): + # Slice out our overlay command-line argument "--all". + argv = sys.argv[1:] + find_all = False + if "--all" in argv: + find_all = True + argv.remove("--all") + + # Find the wrapped tool. + tool_cmds = _make_buildifier_command() + + # Process --help. + if "--help" in argv or "-help" in argv: + return _help(tool_cmds + argv) + + # Process --all. + has_files = len([x for x in argv if not x.startswith("-")]) > 0 + if find_all and has_files: + print("ERROR: cannot combine single inputs with '--all'") + return 1 + if not find_all and not has_files: + print("ERROR: no input files; did you want '--all'?") + return 1 + if find_all: + workspace_dir, found = _find_buildifier_sources(workspace_name) + if len(found) == 0: + print("ERROR: '--all' could not find anything") + return 1 + print(f"This will reformat {len(found)} files " + f"within {workspace_dir}") + if input("Are you sure [y/N]? ") not in ["y", "Y"]: + print("... canceled") + sys.exit(1) + argv.extend(found) + + # Provide helpful diagnostics when in check mode. Buildifier's -mode=check + # uses exitcode 0 even when lint exists; we use whether or not its output + # was empty to tell whether there was lint. + if "-mode=check" in argv or "--mode=check" in argv: + if _passes_check_mode(tool_cmds + argv): + return 0 + switches = [x for x in argv if x.startswith("-")] + files = [x for x in argv if not x.startswith("-")] + print("ERROR: buildifier: the required formatting is incorrect") + for one_file in files: + if not _passes_check_mode(tool_cmds + switches + [one_file]): + print("ERROR: %s:1: error: %s" % ( + one_file, "the required formatting is incorrect")) + print("ERROR: %s:1: note: fix via %s %s" % ( + one_file, "bazel-bin/bazel/lint/buildifier", one_file)) + print(("ERROR: %s:1: note: if that program does not exist, " + "you might need to compile it first: " + "bazel build //bazel/lint/...") % + one_file) + print("NOTE: see https://drake.mit.edu/bazel.html#buildifier") + return 1 + + # In fix mode, disallow running from within the Bazel sandbox. + if "-mode=diff" not in argv and "--mode=diff" not in argv: + if os.getcwd().endswith(".runfiles/gz"): + print("ERROR: do not use 'bazel run' for buildifier in fix mode") + print("ERROR: use bazel-bin/bazel/lint/buildifier instead") + return 1 + + # In fix or diff mode, just let buildifier do its thing. + return subprocess.call(tool_cmds + argv) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/lint/bzlcodestyle.py b/lint/bzlcodestyle.py new file mode 100644 index 0000000..d4a1f23 --- /dev/null +++ b/lint/bzlcodestyle.py @@ -0,0 +1,101 @@ +# bzlcodestyle.py - A Skylark code style checker. +# +# Copyright 2017 Robot Locomotion Group @ CSAIL +# +# Portions of this code are based on pycodestyle.py: +# +# Copyright 2006-2009 Johann C. Rocholl +# Copyright 2009-2014 Florent Xicluna +# Copyright 2014-2016 Ian Lee +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation files +# (the "Software"), to deal in the Software without restriction, +# including without limitation the rights to use, copy, modify, merge, +# publish, distribute, sublicense, and/or sell copies of the Software, +# and to permit persons to whom the Software is furnished to do so, +# subject to the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +import pycodestyle +import sys + + +def whitespace_around_named_parameter_equals_positive(logical_line, tokens): + """Require spaces around the '=' sign in function arguments.""" + # N.B. Nominal `pycodestyle` includes a method named + # `whitespace_around_named_parameter_equals`, which is the negative case. + # Python3 struggles with a same-named method because it does not want to + # order functions (the sorting tuple is `(name, function, args)`). + # We must avoid colliding with this name, and ensure this takes precedence. + parens = 0 + no_space = False + prev_end = None + annotated_func_arg = False + in_def = logical_line.startswith('def') + message = "B251 expected spaces around keyword / parameter equals" + for token_type, text, start, end, line in tokens: + if token_type == pycodestyle.tokenize.NL: + continue + if no_space: + no_space = False + if start == prev_end: + yield (prev_end, message) + if token_type == pycodestyle.tokenize.OP: + if text in '([': + parens += 1 + elif text in ')]': + parens -= 1 + elif in_def and text == ':' and parens == 1: + annotated_func_arg = True + elif parens and text == ',' and parens == 1: + annotated_func_arg = False + elif parens and text == '=' and not annotated_func_arg: + no_space = True + if start == prev_end: + yield (prev_end, message) + if not parens: + annotated_func_arg = False + + prev_end = end + + +def _main(): + """Parse options and run checks on Skylark source.""" + + pycodestyle.register_check( + whitespace_around_named_parameter_equals_positive) + + style_guide = pycodestyle.StyleGuide(parse_argv=True) + options = style_guide.options + + ignore = set(options.ignore) + ignore.add('E251') # Skylark wants spaces around named parameters + ignore.add('E711') # Skylark has no `is` + ignore.add('E721') # Skylark has no `isinstance` + options.ignore = tuple(ignore) + + report = style_guide.check_files() + + if options.statistics: + report.print_statistics() + + if report.total_errors: + if options.count: + sys.stderr.write(str(report.total_errors) + '\n') + sys.exit(1) + + +if __name__ == '__main__': + _main() diff --git a/lint/find_data.py b/lint/find_data.py new file mode 100644 index 0000000..488c1c9 --- /dev/null +++ b/lint/find_data.py @@ -0,0 +1,24 @@ +"""Helper for locating data resources when running as a Bazel-compiled python +program. In particular, when running a program created by a `py_binary()` rule +within a `BUILD.bazel` file, this module provides access to files that were +declared as resources via the `data = [...]` attribute on the `py_binary` rule. +""" + +import errno +import os +import sys + + +def find_data(relpath): + """Given a relpath like drake/pkg/res.txt or external/repo/pkg/res.txt, + find the data file and return its path""" + # Because we are in a py_binary, Bazel's wrapper script sets up our + # $PYTHONPATH to have our resources somewhere on a sys.path entry. + for one_path in sys.path: + possible = os.path.join(one_path, relpath) + if os.path.exists(possible): + return possible + raise IOError( + errno.ENOENT, + "Could not find data {}".format(relpath), + relpath) diff --git a/lint/lint.bzl b/lint/lint.bzl new file mode 100644 index 0000000..cf94189 --- /dev/null +++ b/lint/lint.bzl @@ -0,0 +1,12 @@ +load("@gz//bazel/lint:bazel_lint.bzl", "bazel_lint") + +def add_lint_tests( + bazel_lint_ignore = None, + bazel_lint_extra_srcs = None, + bazel_lint_exclude = None): + existing_rules = native.existing_rules().values() + bazel_lint( + ignore = bazel_lint_ignore, + extra_srcs = bazel_lint_extra_srcs, + exclude = bazel_lint_exclude, + ) diff --git a/lint/util.py b/lint/util.py new file mode 100644 index 0000000..c9529bc --- /dev/null +++ b/lint/util.py @@ -0,0 +1,75 @@ +"""Common helpers for source-tree linter utilities. +""" + +import os +import sys + + +def find_all_sources(workspace_name): + """Return [workspace, paths] list, where `workspace` is a path to the root + of the given `workspace_name`, and `paths` are relative paths under it that + are all of `workspace_name`'s source files, excluding third_party files. + Because this abuses (escapes from) the Bazel sandbox, this function should + *only* be used by linter tools and their unit tests. It is thus given + private visibility in our BUILD.bazel file. + """ + # Our outermost `myprogram.runfiles` directory will contain a file named + # MANIFEST. Because this py_library declares a `data=[]` dependency on + # the top-level .bazelproject file, the manifest will cite the original + # location of that file, which we can abuse to find the absolute path to + # the root of the source tree. (For workspace_name values other than + # "drake", callers should declare a data dependency on their workspace's + # top-level .bazelproject file.) + workspace_root = None + for entry in sys.path: + if workspace_root is not None: + break + if not entry.endswith(".runfiles"): + continue + manifest = os.path.join(entry, "MANIFEST") + if not os.path.exists(manifest): + continue + with open(manifest, "r") as infile: + lines = infile.readlines() + for one_line in lines: + if not one_line.startswith(workspace_name + "/.bazelproject"): + continue + _, source_sentinel = one_line.split(" ") + workspace_root = os.path.dirname(os.path.realpath(source_sentinel)) + assert workspace_root.startswith("/"), workspace_root + assert os.path.isdir(workspace_root), workspace_root + break + if not workspace_root: + raise RuntimeError("Cannot find .bazelproject in MANIFEST") + # Make sure we found the right place. + workspace_file = os.path.join(workspace_root, "WORKSPACE") + if not os.path.exists(workspace_file): + raise RuntimeError(f"Cannot find WORKSPACE at {workspace_root}") + required_line = f'workspace(name = "{workspace_name}")' + with open(workspace_file, "r") as f: + if (required_line + "\n") not in f.readlines(): + raise RuntimeError( + f"Cannot find {required_line} in {workspace_file}") + # Walk the tree (ignoring symlinks), and collect a list of all workspace- + # relative filenames, but excluding a few specific items. + relpaths = [] + for abs_dirpath, dirs, files in os.walk(workspace_root): + assert abs_dirpath.startswith(workspace_root) + rel_dirpath = abs_dirpath[len(workspace_root) + 1:] + # Take all files within the currently-walked directory. + for one_filename in files: + if one_filename == ".DS_Store": + continue + relpaths.append(os.path.join(rel_dirpath, one_filename)) + # Don't recurse into children of "third_party". + if abs_dirpath.endswith("/third_party"): + dirs[:] = () + continue + # Don't recurse into dotfile directories (such as ".git"), nor into + # build directories. + for i, one_dir in reversed(list(enumerate(list(dirs)))): + if one_dir.startswith("."): + dirs.pop(i) + elif rel_dirpath == "" and one_dir.startswith("bazel-"): + dirs.pop(i) + return workspace_root, sorted(relpaths) diff --git a/py_toolchain/BUILD.bazel b/py_toolchain/BUILD.bazel new file mode 100644 index 0000000..db57a3c --- /dev/null +++ b/py_toolchain/BUILD.bazel @@ -0,0 +1,94 @@ +# -*- python -*- + +load("@rules_python//python:defs.bzl", "py_runtime", "py_runtime_pair") +load( + ":interpreter_paths.bzl", + "LINUX_DBG_INTERPRETER_PATH", + "LINUX_INTERPRETER_PATH", + "MACOS_ARM64_INTERPRETER_PATH", + "MACOS_I386_INTERPRETER_PATH", +) + +# The value of interpreter_path must match the value of linux_interpreter_path +# used when declaring the @python repository. +py_runtime( + name = "linux_py3_runtime", + interpreter_path = LINUX_INTERPRETER_PATH, + python_version = "PY3", +) + +py_runtime_pair( + name = "linux_py_runtime_pair", + py3_runtime = ":linux_py3_runtime", +) + +toolchain( + name = "linux_toolchain", + target_compatible_with = ["@platforms//os:linux"], + toolchain = ":linux_py_runtime_pair", + toolchain_type = "@rules_python//python:toolchain_type", +) + +py_runtime( + name = "linux_dbg_py3_runtime", + interpreter_path = LINUX_DBG_INTERPRETER_PATH, + python_version = "PY3", +) + +py_runtime_pair( + name = "linux_dbg_py_runtime_pair", + py3_runtime = ":linux_dbg_py3_runtime", +) + +# Typically this toolchain would be loaded manually using the +# --extra_toolchains command line option. +toolchain( + name = "linux_dbg_toolchain", + target_compatible_with = ["@platforms//os:linux"], + toolchain = ":linux_dbg_py_runtime_pair", + toolchain_type = "@rules_python//python:toolchain_type", +) + +# The value of interpreter_path must match the value of macos_interpreter_path +# used when declaring the @python repository. +py_runtime( + name = "macos_i386_py3_runtime", + interpreter_path = MACOS_I386_INTERPRETER_PATH, + python_version = "PY3", +) + +py_runtime( + name = "macos_arm64_py3_runtime", + interpreter_path = MACOS_ARM64_INTERPRETER_PATH, + python_version = "PY3", +) + +py_runtime_pair( + name = "macos_i386_py_runtime_pair", + py3_runtime = ":macos_i386_py3_runtime", +) + +py_runtime_pair( + name = "macos_arm64_py_runtime_pair", + py3_runtime = ":macos_arm64_py3_runtime", +) + +toolchain( + name = "macos_i386_toolchain", + target_compatible_with = [ + "@platforms//cpu:x86_64", + "@platforms//os:osx", + ], + toolchain = ":macos_i386_py_runtime_pair", + toolchain_type = "@rules_python//python:toolchain_type", +) + +toolchain( + name = "macos_arm64_toolchain", + target_compatible_with = [ + "@platforms//cpu:arm64", + "@platforms//os:osx", + ], + toolchain = ":macos_arm64_py_runtime_pair", + toolchain_type = "@rules_python//python:toolchain_type", +) diff --git a/py_toolchain/interpreter_paths.bzl b/py_toolchain/interpreter_paths.bzl new file mode 100644 index 0000000..61e74c6 --- /dev/null +++ b/py_toolchain/interpreter_paths.bzl @@ -0,0 +1,21 @@ +# -*- python -*- + +# Default value of interpreter_path used by the py_runtime in the default +# Python toolchain registered on the @platforms//os:linux platform. +LINUX_INTERPRETER_PATH = "/usr/bin/python3" + +# Default value of interpreter_path used by the py_runtime in the Python debug +# toolchain registered on the @platforms//os:linux platform when the +# --extra_toolchains=//tools/py_toolchain:linux_dbg_toolchain command line +# option is given. +LINUX_DBG_INTERPRETER_PATH = "/usr/bin/python3-dbg" + +# Default value of interpreter_path used by the py_runtime in the default +# Python toolchain registered on the @platforms//os:osx platform when +# running i386 (i.e., x86_64) builds. +MACOS_I386_INTERPRETER_PATH = "/usr/local/bin/python3.9" + +# Default value of interpreter_path used by the py_runtime in the default +# Python toolchain registered on the @platforms//os:osx platform when +# running arm64 builds. +MACOS_ARM64_INTERPRETER_PATH = "/opt/homebrew/bin/python3.9" diff --git a/skylark/BUILD.bazel b/skylark/BUILD.bazel new file mode 100644 index 0000000..ae7767b --- /dev/null +++ b/skylark/BUILD.bazel @@ -0,0 +1,21 @@ +# -*- python -*- + +load("@gz//bazel/skylark:gz_py.bzl", "gz_py_binary") + +# Used by :python_env.bzl. +config_setting( + name = "linux", + values = {"cpu": "k8"}, +) + +gz_py_binary( + name = "gz_configure_file", + srcs = ["gz_configure_file.py"], + visibility = ["//visibility:public"], +) + +gz_py_binary( + name = "cmake_configure_file", + srcs = ["cmake_configure_file.py"], + visibility = ["//visibility:public"], +) diff --git a/skylark/build_defs.bzl b/skylark/build_defs.bzl new file mode 100644 index 0000000..0516847 --- /dev/null +++ b/skylark/build_defs.bzl @@ -0,0 +1,34 @@ +load( + ":cmake_configure_file.bzl", + _cmake_configure_file = "cmake_configure_file", +) +load( + ":gz_configure_file.bzl", + _gz_configure_header = "gz_configure_file", +) +load( + ":gz_export_header.bzl", + _gz_export_header = "gz_export_header", +) +load( + ":gz_include_header.bzl", + _gz_include_header = "gz_include_header", +) + +GZ_ROOT = "@gz//" + +GZ_VISIBILITY = [ + "//:__subpackages__", + "//experimental:__subpackages__", +] + +GZ_FEATURES = [ + "-parse_headers", + "-use_header_modules", + "-layering_check", +] + +cmake_configure_file = _cmake_configure_file +gz_configure_header = _gz_configure_header +gz_export_header = _gz_export_header +gz_include_header = _gz_include_header diff --git a/skylark/cmake_configure_file.bzl b/skylark/cmake_configure_file.bzl new file mode 100644 index 0000000..9041275 --- /dev/null +++ b/skylark/cmake_configure_file.bzl @@ -0,0 +1,86 @@ +# -*- python -*- + +# Copied from the Drake project: +# https://github.com/RobotLocomotion/drake/blob/17423f8fb6f292b4af0b4cf3c6c0f157273af501/tools/workspace/cmake_configure_file.bzl + +# Defines the implementation actions to cmake_configure_file. +def _cmake_configure_file_impl(ctx): + arguments = [ + "--input", + ctx.file.src.path, + "--output", + ctx.outputs.out.path, + ] + for item in ctx.attr.defines: + arguments += ["-D" + item] + for item in ctx.attr.undefines: + arguments += ["-U" + item] + for item in ctx.files.cmakelists: + arguments += ["--cmakelists", item.path] + ctx.actions.run( + inputs = [ctx.file.src] + ctx.files.cmakelists, + outputs = [ctx.outputs.out], + arguments = arguments, + env = ctx.attr.env, + executable = ctx.executable.cmake_configure_file_py, + ) + return [] + +# Defines the rule to cmake_configure_file. +_cmake_configure_file_gen = rule( + attrs = { + "src": attr.label( + allow_single_file = True, + mandatory = True, + ), + "out": attr.output(mandatory = True), + "defines": attr.string_list(), + "undefines": attr.string_list(), + "cmakelists": attr.label_list(allow_files = True), + "cmake_configure_file_py": attr.label( + cfg = "host", + executable = True, + default = Label("@gz//bazel/skylark:cmake_configure_file"), + ), + "env": attr.string_dict( + mandatory = True, + allow_empty = True, + ), + }, + output_to_genfiles = True, + implementation = _cmake_configure_file_impl, +) + +def cmake_configure_file( + name, + src = None, + out = None, + defines = None, + undefines = None, + cmakelists = None, + **kwargs): + """Creates a rule to generate an out= file from a src= file, using CMake's + configure_file substitution semantics. This implementation is incomplete, + and may not produce the same result as CMake in all cases. + Definitions optionally can be passed in directly as defines= strings (with + the usual defines= convention of either a name-only "HAVE_FOO", or a + key-value "MYSCALAR=DOUBLE"). + Definitions optionally can be read from simple CMakeLists files that + contain statements of the form "set(FOO_MAJOR_VERSION 1)" and similar. + Variables that are known substitutions but which should be undefined can be + passed as undefines= strings. + See cmake_configure_file.py for our implementation of the configure_file + substitution rules. + The CMake documentation of the configure_file macro is: + https://cmake.org/cmake/help/latest/command/configure_file.html + """ + _cmake_configure_file_gen( + name = name, + src = src, + out = out, + defines = defines, + undefines = undefines, + cmakelists = cmakelists, + env = {}, + **kwargs + ) diff --git a/skylark/cmake_configure_file.py b/skylark/cmake_configure_file.py new file mode 100644 index 0000000..7a1d188 --- /dev/null +++ b/skylark/cmake_configure_file.py @@ -0,0 +1,155 @@ +"""A re-implementation of CMake's configure_file substitution semantics. This +implementation is incomplete, and may not produce the same result as CMake in +all (or even many) cases. +The CMake documentation of the configure_file macro is: +https://cmake.org/cmake/help/latest/command/configure_file.html + +Copied from the Drake project: +https://github.com/RobotLocomotion/drake/blob/17423f8fb6f292b4af0b4cf3c6c0f157273af501/tools/workspace/cmake_configure_file.py +""" + +import argparse +import os +import re +import sys + +from collections import OrderedDict + +# Looks like "#cmakedefine VAR ..." or "#cmakedefine01 VAR". +_cmakedefine = re.compile(r'^(\s*)#cmakedefine(01)? ([^ \r\n]+)(.*?)([\r\n]+)') + +# Looks like "@VAR@" or "${VAR}". +_varsubst = re.compile(r'^(.*?)(@[^ ]+?@|\$\{[^ ]+?\})(.*)([\r\n]*)') + + +# Transform a source code line per CMake's configure_file semantics. +# +# The 'definitions' provides values for CMake variables. The dict's keys are +# the variable names to substitute, and the dict's values are the values to +# substitute. (The values can be None, for known-but-undefined variable keys.) +# +# The configuration semantics are as follows: +# +# - An input line 'cmakedefine VAR' turns into '#define VAR VALUE' if and only +# if the 'definitions' dict has a non-None value VALUE for VAR, otherwise it +# turns into '/* #undef VAR */'. +# +# - An input line 'cmakedefine01 VAR' turns into '#define VAR 1' if and only if +# the 'definitions' dict has a non-None value for VAR, otherwise it turns +# into '#define VAR 0'. +# +# - An input line with a substitution '@VAR@' or '${VAR}' replaces the +# substitution token with the value in 'definitions' dict for that VAR, or +# else the empty string if the value is None. It is an error if there is no +# such key in the dict. +def _transform(line, definitions): + # Replace define statements. + match = _cmakedefine.match(line) + if match: + blank, maybe01, var, rest, newline = match.groups() + defined = definitions.get(var) is not None + if maybe01: + return blank + '#define ' + var + [' 0', ' 1'][defined] + newline + elif defined: + line = blank + '#define ' + var + rest + newline + else: + return blank + '/* #undef ' + var + ' */' + newline + + # Replace variable substitutions. + while True: + match = _varsubst.match(line) + if not match: + break + before, xvarx, after, newline = match.groups() + if xvarx[0] == '$': + assert len(xvarx) >= 4 + assert xvarx[1] == '{' + assert xvarx[-1] == '}' + var = xvarx[2:-1] + elif xvarx[0] == '@': + assert len(xvarx) >= 3 + assert xvarx[-1] == '@' + var = xvarx[1:-1] + assert len(var) > 0 + + if var not in definitions: + raise KeyError('Missing definition for ' + var) + value = definitions.get(var) + if value is None: + value = '' + line = before + value + after + newline + + return line + + +# Looks like "set(VAR value)". +_set_var = re.compile(r'^\s*set\s*\(\s*(.+)\s+(.+)\s*\)\s*$') + + +# From a line of CMakeLists.txt, return a set(...) key-value pair, if found. +def _extract_definition(line, prior_definitions): + match = _set_var.match(line) + if not match: + return dict() + var, value = match.groups() + try: + value = _transform(value, prior_definitions) + except KeyError: + return dict() + if value.startswith('"'): + assert value.endswith('"') + value = value[1:-1] + return {var: value} + + +# Load our definitions dict, given the command-line args: +# - A command-line '-Dfoo' will add ('foo', 1) to the result. +# - A command-line '-Dfoo=bar' will add ('foo', 'bar') to the result. +# - A command-line '-Ufoo' will add ('foo', None) to the result. +def _setup_definitions(args): + result = OrderedDict() + for item in args.defines: + if '=' in item: + key, value = item.split('=', 1) + result[key] = value + else: + result[item] = 1 + + for item in args.undefines: + result[item] = None + + for filename in args.cmakelists: + with open(filename, 'r') as cmakelist: + for line in cmakelist.readlines(): + definition = _extract_definition(line, result) + result.update(definition) + + return result + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--input', metavar='FILE') + parser.add_argument('--output', metavar='FILE') + parser.add_argument( + '-D', metavar='NAME', dest='defines', action='append', default=[]) + parser.add_argument( + '-U', metavar='NAME', dest='undefines', action='append', default=[]) + parser.add_argument( + '--cmakelists', action='append', default=[]) + args = parser.parse_args() + if args.input is None or args.output is None: + parser.print_usage() + sys.exit(1) + definitions = _setup_definitions(args) + + with open(args.input, 'r') as input_file: + with open(args.output + '.tmp', 'w') as output_file: + for input_line in input_file.readlines(): + output_line = _transform(input_line, definitions) + output_file.write(output_line) + os.rename(args.output + '.tmp', args.output) + + +if __name__ == '__main__': + main() diff --git a/skylark/gz_configure_file.bzl b/skylark/gz_configure_file.bzl new file mode 100644 index 0000000..7488876 --- /dev/null +++ b/skylark/gz_configure_file.bzl @@ -0,0 +1,83 @@ +def _gz_configure_file_impl(ctx): + # The list of arguments we pass to the script. + # volatile status file: ctx.version_file + # stable status file: ctx.info_file + arguments = [ + "--input", + ctx.file.src.path, + "--output", + ctx.outputs.out.path, + "--package", + ctx.attr.package, + "--stable", + ctx.info_file.path, + ] + + for item in ctx.attr.defines: + arguments += ["-D" + item] + for item in ctx.attr.undefines: + arguments += ["-U" + item] + for item in ctx.files.cmakelists: + arguments += ["--cmakelists", item.path] + + # Action to call the script. + ctx.actions.run( + inputs = [ctx.file.src, ctx.info_file] + ctx.files.cmakelists, + outputs = [ctx.outputs.out], + arguments = arguments, + env = ctx.attr.env, + executable = ctx.executable.gz_configure_file_py, + ) + return [] + +_gz_configure_file_gen = rule( + attrs = { + "src": attr.label( + allow_single_file = True, + mandatory = True, + ), + "out": attr.output(mandatory = True), + "package": attr.string(mandatory = True), + "defines": attr.string_list(), + "undefines": attr.string_list(), + "cmakelists": attr.label_list(allow_files = True), + "gz_configure_file_py": attr.label( + cfg = "host", + executable = True, + default = Label("@gz//bazel/skylark:gz_configure_file"), + ), + "env": attr.string_dict( + mandatory = True, + allow_empty = True, + ), + }, + output_to_genfiles = True, + implementation = _gz_configure_file_impl, +) + +def gz_configure_file( + name, + package = None, + src = None, + out = None, + defines = None, + undefines = None, + cmakelists = None, + **kwargs): + if not out: + out = src + idx = out.find(".in") + if (idx > 0): + out = out[0:idx] + + _gz_configure_file_gen( + name = name, + package = package, + src = src, + out = out, + defines = defines, + undefines = undefines, + cmakelists = cmakelists, + env = {}, + **kwargs + ) diff --git a/skylark/gz_configure_file.py b/skylark/gz_configure_file.py new file mode 100644 index 0000000..9c9844e --- /dev/null +++ b/skylark/gz_configure_file.py @@ -0,0 +1,192 @@ +import argparse +import os +import re +import sys + +from collections import OrderedDict + +# Looks like "#cmakedefine VAR ..." or "#cmakedefine01 VAR". +_cmakedefine = re.compile(r'^(\s*)#cmakedefine(01)? ([^ \r\n]+)(.*?)([\r\n]+)') + +# Looks like "@VAR@" or "${VAR}". +_varsubst = re.compile(r'^(.*?)(@[^ ]+?@|\$\{[^ ]+?\})(.*)([\r\n]*)') + + +# Transform a source code line per CMake's configure_file semantics. +# +# The 'definitions' provides values for CMake variables. The dict's keys are +# the variable names to substitute, and the dict's values are the values to +# substitute. (The values can be None, for known-but-undefined variable keys.) +# +# The configuration semantics are as follows: +# +# - An input line 'cmakedefine VAR' turns into '#define VAR VALUE' if and only +# if the 'definitions' dict has a non-None value VALUE for VAR, otherwise it +# turns into '/* #undef VAR */'. +# +# - An input line 'cmakedefine01 VAR' turns into '#define VAR 1' if and only if +# the 'definitions' dict has a non-None value for VAR, otherwise it turns +# into '#define VAR 0'. +# +# - An input line with a substitution '@VAR@' or '${VAR}' replaces the +# substitution token with the value in 'definitions' dict for that VAR, or +# else the empty string if the value is None. It is an error if there is no +# such key in the dict. +def _transform(line, definitions): + # Replace define statements. + match = _cmakedefine.match(line) + if match: + blank, maybe01, var, rest, newline = match.groups() + defined = definitions.get(var) is not None + if maybe01: + return blank + '#define ' + var + [' 0', ' 1'][defined] + newline + elif defined: + line = blank + '#define ' + var + rest + newline + else: + return blank + '/* #undef ' + var + ' */' + newline + + # Replace variable substitutions. + while True: + match = _varsubst.match(line) + if not match: + break + before, xvarx, after, newline = match.groups() + if xvarx[0] == '$': + assert len(xvarx) >= 4 + assert xvarx[1] == '{' + assert xvarx[-1] == '}' + var = xvarx[2:-1] + elif xvarx[0] == '@': + assert len(xvarx) >= 3 + assert xvarx[-1] == '@' + var = xvarx[1:-1] + assert len(var) > 0 + + if var not in definitions: + raise KeyError('Missing definition for ' + var) + value = definitions.get(var) + if value is None: + value = '' + line = before + value + after + newline + + return line + + +# Looks like "set(VAR value)". +_set_var = re.compile(r'^\s*set\s*\(\s*(.+)\s+(.+)\s*\)\s*$') + + +# From a line of CMakeLists.txt, return a set(...) key-value pair, if found. +def _extract_definition(line, prior_definitions): + match = _set_var.match(line) + if not match: + return dict() + var, value = match.groups() + try: + value = _transform(value, prior_definitions) + except KeyError: + return dict() + if value.startswith('"'): + assert value.endswith('"') + value = value[1:-1] + return {var: value} + + +# Load our definitions dict, given the command-line args: +# - A command-line '-Dfoo' will add ('foo', 1) to the result. +# - A command-line '-Dfoo=bar' will add ('foo', 'bar') to the result. +# - A command-line '-Ufoo' will add ('foo', None) to the result. +def _setup_definitions(args): + result = OrderedDict() + for item in args.defines: + if '=' in item: + key, value = item.split('=', 1) + result[key] = value + else: + result[item] = 1 + + for item in args.undefines: + result[item] = None + + for filename in args.cmakelists: + with open(filename, 'r') as cmakelist: + for line in cmakelist.readlines(): + definition = _extract_definition(line, result) + result.update(definition) + + return result + +def setup_workspace_definitions(args): + stable_vars = dict() + with open(args.stable, "r") as f: + for entry in f.read().split("\n"): + if entry: + key_value = entry.split(' ', 1) + key = key_value[0].strip() + + if key.find("STABLE_") == 0: + key = key[7:] + stable_vars[key] = key_value[1].strip() + + if args.package.find('_') > 0: + designation = args.package.split('_')[1] + else: + designation = args.package + + project_version_var = f'{args.package.upper()}_VER' + variables = dict() + + if project_version_var in stable_vars: + ver = stable_vars[project_version_var] + major, minor, patch = ver.split('.') + variables["IGN_DESIGNATION"] = designation + variables["IGN_DESIGNATION_UPPER"] = designation.upper() + variables["IGN_DESIGNATION_LOWER"] = designation.lower() + variables["PROJECT_NAME"] = args.package.upper().replace('_', '-') + variables["PROJECT_NAME_LOWER"] = args.package.lower().replace('_', '-') + variables["PROJECT_NAME_NO_VERSION"] = args.package.upper().replace('_', '-') + variables["PROJECT_VERSION"] = f'{major}.{minor}' + variables["PROJECT_VERSION_FULL"] = f'{major}.{minor}.{patch}' + variables["PROJECT_VERSION_MAJOR"] = major + variables["PROJECT_VERSION_MINOR"] = minor + variables["PROJECT_VERSION_PATCH"] = patch + return variables + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--package') + parser.add_argument('--input', metavar='FILE') + parser.add_argument('--output', metavar='FILE') + parser.add_argument('--stable', metavar='FILE') + parser.add_argument( + '-D', metavar='NAME', dest='defines', action='append', default=[]) + parser.add_argument( + '-U', metavar='NAME', dest='undefines', action='append', default=[]) + parser.add_argument( + '--cmakelists', action='append', default=[]) + + args = parser.parse_args() + if args.input is None or args.output is None: + parser.print_usage() + sys.exit(1) + + arg_definitions = _setup_definitions(args) + ws_definitions = setup_workspace_definitions(args) + + definitions = { **arg_definitions, **ws_definitions } + + with open(args.input, 'r') as input_file: + with open (args.output + '.tmp', 'w') as output_file: + for ii, input_line in enumerate(input_file.readlines()): + try: + output_line = _transform(input_line, definitions) + output_file.write(output_line) + except Exception as ex: + print(f'Line {ii}: {ex}') + + os.rename(args.output + '.tmp', args.output) + +if __name__ == "__main__": + main() + diff --git a/skylark/gz_export_header.bzl b/skylark/gz_export_header.bzl new file mode 100644 index 0000000..26cf210 --- /dev/null +++ b/skylark/gz_export_header.bzl @@ -0,0 +1,68 @@ +load( + "@gz//bazel/workspace:generate_file.bzl", + "generate_file", +) + +def gz_export_header(name, lib_name, export_base, visibility): + generate_file( + name = name, + visibility = visibility, + content = """ +/* + * Copyright (C) 2017 Open Source Robotics Foundation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +/* + * ========================================================================== + * This file was automatically generated by Bazel; do not modify it directly. + * To modify this file, make changes to ign-bazel/ign_export_header.bzl + * ========================================================================== +*/ + +#ifndef {export_base}_EXPORT_HH_ +#define {export_base}_EXPORT_HH_ + +#ifndef {export_base}_VISIBLE +/// For {lib_name} developers: Apply this macro to {lib_name} +/// functions and classes which consumers of this library will need to be able +/// to call from their own programs or libraries. +#define {export_base}_VISIBLE \ + __attribute__ ((visibility("default"))) +#endif + + +#ifndef {export_base}_HIDDEN +/// For {lib_name} developers: Apply this macro to {lib_name} +/// functions and classes which must not be used by consumers of this library. +/// By default, this property is applied to all classes and functions which are +/// not tagged with {export_base}_VISIBLE, so this does not +/// generally need to be used. +#define {export_base}_HIDDEN \ + __attribute__ ((visibility("hidden"))) +#endif + + +#ifndef GZ_DEPRECATED +/// For {lib_name} developers: Use this macro to indicate that a +/// function or class has been deprecated and should no longer be used. A +/// version should be specified to provide context to the user about when the +/// function became deprecated. +#define GZ_DEPRECATED(version) __attribute__ ((__deprecated__)) +#endif + +#endif +""".format(lib_name = lib_name, export_base = export_base), + ) diff --git a/skylark/gz_include_header.bzl b/skylark/gz_include_header.bzl new file mode 100644 index 0000000..3075dd2 --- /dev/null +++ b/skylark/gz_include_header.bzl @@ -0,0 +1,38 @@ +# -*- python -*- + +load(":pathutils.bzl", "output_path") + +# Generate a header that includes a set of other headers +def _generate_include_header_impl(ctx): + # Collect list of headers + hdrs = [] + for h in ctx.attr.hdrs: + for f in h.files.to_list(): + hdrs.append(output_path(ctx, f, ctx.attr.strip_prefix)) + + # Generate include header + content = "#pragma once\n" + content = content + "\n".join(["#include <%s>" % h for h in hdrs]) + ctx.actions.write(output = ctx.outputs.out, content = content) + +gz_include_header = rule( + attrs = { + "hdrs": attr.label_list(allow_files = True), + "strip_prefix": attr.string_list(default = ["**/include/"]), + "out": attr.output(mandatory = True), + }, + output_to_genfiles = True, + implementation = _generate_include_header_impl, +) + +"""Generate a header that includes a set of other headers. +This creates a rule to generate a header that includes a list of other headers. +The generated file will be of the form:: + #include + #include +Args: + hdrs (:obj:`str`): List of files or file labels of headers that the + generated header will include. + strip_prefix (:obj:`list` of :obj:`str`): List of prefixes to strip from + the header names when forming the ``#include`` directives. +""" diff --git a/skylark/gz_py.bzl b/skylark/gz_py.bzl new file mode 100644 index 0000000..a40c7b5 --- /dev/null +++ b/skylark/gz_py.bzl @@ -0,0 +1,260 @@ +# -*- python -*- + +load("@gz//bazel/skylark:py.bzl", "py_binary", "py_library", "py_test") + +def gz_py_library( + name, + deps = None, + **kwargs): + """A wrapper to insert Drake-specific customizations.""" + + # Work around https://github.com/bazelbuild/bazel/issues/1567. + deps = (deps or []) + py_library( + name = name, + deps = deps, + srcs_version = "PY3", + **kwargs + ) + +def _disable_test_impl(ctx): + info = dict( + bad_target = ctx.attr.bad_target, + good_target = ctx.attr.good_target, + ) + content = """#!/bin/bash +echo "ERROR: Please use '{good_target}'; the label '{bad_target}'" \ + "has been removed." >&2 +exit 1 +""".format(**info) + ctx.actions.write( + output = ctx.outputs.executable, + content = content, + ) + return [DefaultInfo()] + +# Defines a test which will fail when run via `bazel run` or `bazel test`, +# pointing the user to the correct binary to use. This should typically have +# a "manual" tag. +_disable_test = rule( + attrs = { + "bad_target": attr.string(mandatory = True), + "good_target": attr.string(mandatory = True), + }, + test = True, + implementation = _disable_test_impl, +) + +def _py_target_isolated( + name, + py_target = None, + srcs = None, + main = None, + isolate = True, + visibility = None, + **kwargs): + # See #8041 for more details. + # TODO(eric.cousineau): See if we can remove these shims once we stop + # supporting Python 2 (#10606). + if py_target == None: + fail("Must supply macro function for defining `py_target`.") + + # Do not isolate targets that are already isolated. This generally happens + # when linting tests (which are isolated) are invoked for isolated Python + # targets. Without this check, the actual test turns into + # `py/py/{name}`. + prefix = "py/" + if isolate and not name.startswith(prefix): + actual = prefix + name + + # Preserve original functionality. + if not main: + main = name + ".py" + if not srcs: + srcs = [name + ".py"] + py_target( + name = actual, + srcs = srcs, + main = main, + visibility = visibility, + **kwargs + ) + + # Disable and redirect original name. + package_prefix = "//" + native.package_name() + ":" + + # N.B. We make the disabled rule a test, even if the original was not. + # This ensures that developers will see the redirect using both + # `bazel run` or `bazel test`. + _disable_test( + name = name, + good_target = package_prefix + actual, + bad_target = package_prefix + name, + tags = ["manual"], + visibility = visibility, + ) + else: + py_target( + name = name, + srcs = srcs, + main = main, + visibility = visibility, + **kwargs + ) + +def gz_py_binary( + name, + srcs = None, + main = None, + data = [], + deps = None, + isolate = False, + tags = [], + add_test_rule = 0, + test_rule_args = [], + test_rule_data = [], + test_rule_tags = None, + test_rule_size = None, + test_rule_timeout = None, + test_rule_flaky = 0, + **kwargs): + """A wrapper to insert Drake-specific customizations. + + @param isolate (optional, default is False) + If True, the binary will be placed in a folder isolated from the + library code. This prevents submodules from leaking in as top-level + submodules. For more detail, see #8041. + """ + + # Work around https://github.com/bazelbuild/bazel/issues/1567. + deps = deps or [] + if main == None and len(srcs) == 1: + main = srcs[0] + _py_target_isolated( + name = name, + py_target = py_binary, + isolate = isolate, + srcs = srcs, + main = main, + data = data, + deps = deps, + tags = tags, + python_version = "PY3", + srcs_version = "PY3", + **kwargs + ) + if add_test_rule: + gz_py_test( + name = name + "_test", + srcs = srcs, + main = main, + deps = deps, + isolate = isolate, + args = test_rule_args, + data = data + test_rule_data, + size = test_rule_size, + timeout = test_rule_timeout, + flaky = test_rule_flaky, + tags = (test_rule_tags or []) + ["nolint"], + # N.B. Same as the warning in `drake_pybind_cc_googletest`: numpy + # imports unittest unconditionally. + allow_import_unittest = True, + **kwargs + ) + +def gz_py_unittest( + name, + **kwargs): + """Declares a `unittest`-based python test. + + This macro should be preferred instead of the basic drake_py_test for tests + that use the `unittest` framework. Tests that use this macro should *not* + contain a __main__ handler nor a shebang line. By default, sets test size + to "small" to indicate a unit test. + """ + helper = "//common/test_utilities:gz_py_unittest_main.py" + if kwargs.pop("srcs", None): + fail("Changing srcs= is not allowed by gz_py_unittest." + + " Use gz_py_test instead, if you need something weird.") + srcs = ["test/%s.py" % name, helper] + gz_py_test( + name = name, + srcs = srcs, + main = helper, + allow_import_unittest = True, + _gz_py_unittest_shard_count = kwargs.pop("shard_count", None), + deps = kwargs.pop("deps", []) + [ + "@xmlrunner_py", + ], + **kwargs + ) + +def gz_py_test( + name, + size = None, + srcs = None, + deps = None, + isolate = True, + allow_import_unittest = False, + tags = [], + **kwargs): + """A wrapper to insert Drake-specific customizations. + + @param isolate (optional, default is True) + If True, the test binary will be placed in a folder isolated from the + library code. This prevents submodules from leaking in as top-level + submodules. For more detail, see #8041. + + @param allow_import_unittest (optional, default is False) + If False, this test (and anything it imports) is prevented from doing + `import unittest`. This is a guard against writing `unittest`-based + cases that accidentally never get run. In general, `unittest`-based + tests should use the `drake_py_unittest` macro instead of this one + (thus disabling this interlock), but can override this parameter in + case something unique is happening and the other macro can't be used. + + By default, sets test size to "small" to indicate a unit test. Adds the tag + "py" if not already present. + + This macro does not allow a shard_count; use drake_py_unittest for that. + """ + if size == None: + size = "small" + if srcs == None: + srcs = ["test/%s.py" % name] + if kwargs.get("shard_count") != None: + fail("Only gz_py_unittest can use sharding") + shard_count = kwargs.pop("_gz_py_unittest_shard_count", None) + + # Work around https://github.com/bazelbuild/bazel/issues/1567. + deps = deps or [] + if not allow_import_unittest: + deps = deps + ["//common/test_utilities:disable_python_unittest"] + if "py" not in tags: + tags = tags + ["py"] + _py_target_isolated( + name = name, + py_target = py_test, + isolate = isolate, + size = size, + shard_count = shard_count, + srcs = srcs, + deps = deps, + tags = tags, + python_version = "PY3", + srcs_version = "PY3", + **kwargs + ) + +def py_test_isolated( + name, + **kwargs): + """Provides a directory-isolated Python test, robust against shadowing + (#8041). + """ + _py_target_isolated( + name = name, + py_target = py_test, + isolate = True, + **kwargs + ) diff --git a/skylark/pathutils.bzl b/skylark/pathutils.bzl new file mode 100644 index 0000000..97b75e5 --- /dev/null +++ b/skylark/pathutils.bzl @@ -0,0 +1,203 @@ +def _match_single_glob_tokens(path_tokens, prefix_tokens): + """If the prefix matches the path (anchored at the start), returns the + segment of the path tokens that matched -- or None if no match. The + arguments are lists of strings, with an implied "/" between elements. + + The token "*" must match exactly one path token. + The token "**" is not allowed. + """ + if len(prefix_tokens) > len(path_tokens): + # The prefix is too long ==> no match. + return None + + # Check the tokens pairwise (stopping at the shorter of the two lists). + for prefix, path in zip(prefix_tokens, path_tokens): + if prefix == "*": + # The "*" matches anything. + continue + if prefix != path: + # Mismatch. + return None + + # Successful match. + return path_tokens[:len(prefix_tokens)] + +def _match_double_glob_tokens(path_tokens, prefix_tokens): + """If the prefix matches the path (anchored at the start), returns the + segment of the path tokens that matched -- or None if no match. The + arguments are lists of strings, with an implied "/" between elements. + + The token "*" must match exactly one path token. + The token "**" match any number of path tokens, greedily. + """ + + # Expand the double ("**") globs into a list of brute-force candidates, + # i.e., ["**"] ==> [], ["*"], ["*", "*"], ["*", "*", "*"], etc. up to the + # most that we could need. To produce greedy matching, we rank them from + # longest to shorest, and expand the earlier globs first. Each candidate + # is a prefix token list, with either a literal str or a "*" in each item. + candidates = [[]] + for prefix_token in prefix_tokens: + # For a literal token or a "*", append it to every candidate. + if prefix_token != "**": + for i in range(len(candidates)): + candidates[i].append(prefix_token) + continue + + # For a ** token, replicate the candidates for possible ** matches. + # The longest ** match should be as long as the whole path. + expansions = [ + ["*"] * i + for i in reversed(range(len(path_tokens) + 1)) + ] + new_candidates = [ + candidate + expansion + for candidate in candidates + for expansion in expansions + ] + candidates = new_candidates + + # Check each candidate prefix token list for a match against the path. The + # first candidate that matches, wins. + for candidate_tokens in candidates: + match = _match_single_glob_tokens(path_tokens, candidate_tokens) + if match != None: + return match + + # Nothing matched. + return None + +def _remove_prefix(path, prefix): + """Remove prefix from path. + + This attempts to remove the specified prefix from the specified path. The + prefix may contain the globs ``*`` or ``**``, which match one or many + path components, respectively. Matching is greedy. Globs may only be + matched against complete path components (e.g. ``a/*/`` is okay, but + ``a*/`` is not treated as a glob and will be matched literally). + + Args: + path (:obj:`str`) The path to modify. + prefix (:obj:`str`) The prefix to remove. + + Returns: + :obj:`str`: The path with the prefix removed if successful, or None if + the prefix does not match the path. + """ + path_tokens = path.split("/") + prefix_tokens = prefix.split("/") + + # Ignore trailing empty element (happens if prefix string ends with "/"). + if len(prefix_tokens[-1]) == 0: + prefix_tokens = prefix_tokens[:-1] + + # Match the prefix against the path, leaving the final path name intact. + match = _match_double_glob_tokens(path_tokens[:-1], prefix_tokens) + + # If a match was found, return the stripped path, else None + if match == None: + return None + return "/".join(path_tokens[len(match):]) + +def basename(path): + """Return the file name portion of a file path.""" + return path.split("/")[-1] + +def dirname(path): + """Return the directory portion of a file path.""" + if path == "/": + return "/" + + parts = path.split("/") + + if len(parts) > 1: + return "/".join(parts[:-1]) + + return "." + +def join_paths(*args): + """Join paths without duplicating separators. + + This is roughly equivalent to Python's `os.path.join`. + + Args: + *args (:obj:`list` of :obj:`str`): Path components to be joined. + + Returns: + :obj:`str`: The concatenation of the input path components. + """ + result = "" + + for part in args: + if part.endswith("/"): + part = part[-1] + + if part == "" or part == ".": + continue + + result += part + "/" + + return result[:-1] + +def output_path(ctx, input_file, strip_prefix, package_root = None): + """Compute "output path". + + This computes the adjusted output path for an input file. Specifically, it + a) determines the path relative to the invoking context (which is usually, + but not always, the same as the path as specified by the user when the file + was mentioned in a rule), without Bazel's various possible extras, and b) + optionally removes prefixes from this path. When removing prefixes, the + first matching prefix is removed. + + This is used primarily to compute the output install path, without the + leading install prefix, for install actions. + + For example:: + + install_files( + dest = "docs", + files = ["foo/bar.txt"], + strip_prefix = ["foo/"], + ...) + + The :obj:`File`'s path components will have various Bazel bits added. Our + first step is to recover the input path, ``foo/bar.txt``. Then we remove + the prefix ``foo``, giving a path of ``bar.txt``, which will become + ``docs/bar.txt`` when the install destination is added. + + The input file must belong to the current package; otherwise, ``None`` is + returned. + + Args: + input_file (:obj:`File`): Artifact to be installed. + strip_prefix (:obj:`list` of :obj:`str`): List of prefixes to strip + from the input path before prepending the destination. + + Returns: + :obj:`str`: The install destination path for the file. + """ + + if package_root == None: + # Determine base path of invoking context. + package_root = join_paths(ctx.label.workspace_root, ctx.label.package) + + # Determine effective path by removing path of invoking context and any + # Bazel output-files path. + input_path = input_file.path + if input_file.is_source: + input_path = _remove_prefix(input_path, package_root) + else: + out_root = join_paths("bazel-out/*/*", package_root) + input_path = _remove_prefix(input_path, out_root) + + # Deal with possible case of file outside the package root. + if input_path == None: + return None + + # Possibly remove prefixes. + for p in strip_prefix: + output_path = _remove_prefix(input_path, p) + if output_path != None: + return output_path + + return input_path diff --git a/skylark/protobuf.bzl b/skylark/protobuf.bzl new file mode 100644 index 0000000..ac7bb01 --- /dev/null +++ b/skylark/protobuf.bzl @@ -0,0 +1,235 @@ +"""Utility functions for generating protobuf code.""" + +load("@rules_proto//proto:defs.bzl", "ProtoInfo") + +_PROTO_EXTENSION = ".proto" +_VIRTUAL_IMPORTS = "/_virtual_imports/" + +def well_known_proto_libs(): + return [ + "@com_google_protobuf//:any_proto", + "@com_google_protobuf//:api_proto", + "@com_google_protobuf//:compiler_plugin_proto", + "@com_google_protobuf//:descriptor_proto", + "@com_google_protobuf//:duration_proto", + "@com_google_protobuf//:empty_proto", + "@com_google_protobuf//:field_mask_proto", + "@com_google_protobuf//:source_context_proto", + "@com_google_protobuf//:struct_proto", + "@com_google_protobuf//:timestamp_proto", + "@com_google_protobuf//:type_proto", + "@com_google_protobuf//:wrappers_proto", + ] + +def get_proto_root(workspace_root): + """Gets the root protobuf directory. + Args: + workspace_root: context.label.workspace_root + Returns: + The directory relative to which generated include paths should be. + """ + if workspace_root: + return "/{}".format(workspace_root) + else: + return "" + +def _strip_proto_extension(proto_filename): + if not proto_filename.endswith(_PROTO_EXTENSION): + fail('"{}" does not end with "{}"'.format( + proto_filename, + _PROTO_EXTENSION, + )) + return proto_filename[:-len(_PROTO_EXTENSION)] + +def proto_path_to_generated_filename(proto_path, fmt_str): + """Calculates the name of a generated file for a protobuf path. + For example, "examples/protos/helloworld.proto" might map to + "helloworld.pb.h". + Args: + proto_path: The path to the .proto file. + fmt_str: A format string used to calculate the generated filename. For + example, "{}.pb.h" might be used to calculate a C++ header filename. + Returns: + The generated filename. + """ + return fmt_str.format(_strip_proto_extension(proto_path)) + +def get_include_directory(source_file): + """Returns the include directory path for the source_file. I.e. all of the + include statements within the given source_file are calculated relative to + the directory returned by this method. + The returned directory path can be used as the "--proto_path=" argument + value. + Args: + source_file: A proto file. + Returns: + The include directory path for the source_file. + """ + directory = source_file.path + prefix_len = 0 + + if is_in_virtual_imports(source_file): + root, relative = source_file.path.split(_VIRTUAL_IMPORTS, 2) + result = root + _VIRTUAL_IMPORTS + relative.split("/", 1)[0] + return result + + if not source_file.is_source and directory.startswith(source_file.root.path): + prefix_len = len(source_file.root.path) + 1 + + if directory.startswith("external", prefix_len): + external_separator = directory.find("/", prefix_len) + repository_separator = directory.find("/", external_separator + 1) + return directory[:repository_separator] + else: + return source_file.root.path if source_file.root.path else "." + +def get_plugin_args( + plugin, + flags, + dir_out, + generate_mocks, + plugin_name = "PLUGIN"): + """Returns arguments configuring protoc to use a plugin for a language. + Args: + plugin: An executable file to run as the protoc plugin. + flags: The plugin flags to be passed to protoc. + dir_out: The output directory for the plugin. + generate_mocks: A bool indicating whether to generate mocks. + plugin_name: A name of the plugin, it is required to be unique when there + are more than one plugin used in a single protoc command. + Returns: + A list of protoc arguments configuring the plugin. + """ + augmented_flags = list(flags) + if generate_mocks: + augmented_flags.append("generate_mock_code=true") + + augmented_dir_out = dir_out + if augmented_flags: + augmented_dir_out = ",".join(augmented_flags) + ":" + dir_out + + return [ + "--plugin=protoc-gen-{plugin_name}={plugin_path}".format( + plugin_name = plugin_name, + plugin_path = plugin.path, + ), + "--{plugin_name}_out={dir_out}".format( + plugin_name = plugin_name, + dir_out = augmented_dir_out, + ), + ] + +def _get_staged_proto_file(context, source_file): + if source_file.dirname == context.label.package or \ + is_in_virtual_imports(source_file): + # Current target and source_file are in same package + return source_file + else: + # Current target and source_file are in different packages (most + # probably even in different repositories) + copied_proto = context.actions.declare_file(source_file.basename) + context.actions.run_shell( + inputs = [source_file], + outputs = [copied_proto], + command = "cp {} {}".format(source_file.path, copied_proto.path), + mnemonic = "CopySourceProto", + ) + return copied_proto + +def protos_from_context(context): + """Copies proto files to the appropriate location. + Args: + context: The ctx object for the rule. + Returns: + A list of the protos. + """ + protos = [] + for src in context.attr.deps: + for file in src[ProtoInfo].direct_sources: + protos.append(_get_staged_proto_file(context, file)) + return protos + +def includes_from_deps(deps): + """Get includes from rule dependencies.""" + return [ + file + for src in deps + for file in src[ProtoInfo].transitive_imports.to_list() + ] + +def get_proto_arguments(protos, genfiles_dir_path): + """Get the protoc arguments specifying which protos to compile.""" + arguments = [] + for proto in protos: + strip_prefix_len = 0 + if is_in_virtual_imports(proto): + incl_directory = get_include_directory(proto) + if proto.path.startswith(incl_directory): + strip_prefix_len = len(incl_directory) + 1 + elif proto.path.startswith(genfiles_dir_path): + strip_prefix_len = len(genfiles_dir_path) + 1 + + arguments.append(proto.path[strip_prefix_len:]) + + return arguments + +def declare_out_files(protos, context, generated_file_format): + """Declares and returns the files to be generated.""" + + out_file_paths = [] + for proto in protos: + if not is_in_virtual_imports(proto): + out_file_paths.append(proto.basename) + else: + path = proto.path[proto.path.index(_VIRTUAL_IMPORTS) + 1:] + out_file_paths.append(path) + + return [ + context.actions.declare_file( + proto_path_to_generated_filename( + out_file_path, + generated_file_format, + ), + ) + for out_file_path in out_file_paths + ] + +def get_out_dir(protos, context): + """ Returns the calculated value for --_out= protoc argument based on + the input source proto files and current context. + Args: + protos: A list of protos to be used as source files in protoc command + context: A ctx object for the rule. + Returns: + The value of --_out= argument. + """ + at_least_one_virtual = 0 + for proto in protos: + if is_in_virtual_imports(proto): + at_least_one_virtual = True + elif at_least_one_virtual: + fail("Proto sources must be either all virtual imports or all real") + if at_least_one_virtual: + out_dir = get_include_directory(protos[0]) + ws_root = protos[0].owner.workspace_root + if ws_root and out_dir.find(ws_root) >= 0: + out_dir = "".join(out_dir.rsplit(ws_root, 1)) + return struct( + path = out_dir, + import_path = out_dir[out_dir.find(_VIRTUAL_IMPORTS) + 1:], + ) + return struct(path = context.genfiles_dir.path, import_path = None) + +def is_in_virtual_imports(source_file, virtual_folder = _VIRTUAL_IMPORTS): + """Determines if source_file is virtual (is placed in _virtual_imports + subdirectory). The output of all proto_library targets which use + import_prefix and/or strip_import_prefix arguments is placed under + _virtual_imports directory. + Args: + source_file: A proto file. + virtual_folder: The virtual folder name (is set to "_virtual_imports" + by default) + Returns: + True if source_file is located under _virtual_imports, False otherwise. + """ + return not source_file.is_source and virtual_folder in source_file.path diff --git a/skylark/py.bzl b/skylark/py.bzl new file mode 100644 index 0000000..0df5415 --- /dev/null +++ b/skylark/py.bzl @@ -0,0 +1,14 @@ +# -*- python -*- + +load( + "@rules_python//python:defs.bzl", + _py_binary = "py_binary", + _py_library = "py_library", + _py_test = "py_test", +) + +py_binary = _py_binary + +py_library = _py_library + +py_test = _py_test diff --git a/skylark/python_env.bzl b/skylark/python_env.bzl new file mode 100644 index 0000000..e39b9e4 --- /dev/null +++ b/skylark/python_env.bzl @@ -0,0 +1,22 @@ +# -*- python -*- + +def hermetic_python_env(): + # In general, we do not want to use Python's "user site-packages" + # (e.g., $HOME/.local) directory because it's not hermetic. Thus, + # we set PYTHONNOUSERSITE to disable the user site-packages. + # + # However, our macOS setup instructions provide for some dependencies + # (e.g., PyYAML) to come from pip, and in some reasonable configurations it + # could be done via `pip install --user` and so be part of $HOME. Thus, in + # order to support that configuration, we only set PYTHONNOUSERSITE under + # linux. We can revisit this decision if we changes how python packages on + # macOS are brought into the workspace. + # + # If https://github.com/bazelbuild/bazel/issues/4939 gets fixed, we can + # revisit whether manually specifying a hermetic env is still necessary. + return select({ + "@gz//bazel/skylark:linux": { + "PYTHONNOUSERSITE": "1", + }, + "//conditions:default": {}, + }) diff --git a/ubuntu-jammy.bazelrc b/ubuntu-jammy.bazelrc new file mode 100644 index 0000000..c9522bc --- /dev/null +++ b/ubuntu-jammy.bazelrc @@ -0,0 +1,2 @@ +build --fission=dbg +build --features=per_object_debug_info diff --git a/workspace/BUILD.bazel b/workspace/BUILD.bazel new file mode 100644 index 0000000..1f52546 --- /dev/null +++ b/workspace/BUILD.bazel @@ -0,0 +1,3 @@ +load("@gz//bazel/lint:lint.bzl", "add_lint_tests") + +add_lint_tests() diff --git a/workspace/bazel_skylib/BUILD.bazel b/workspace/bazel_skylib/BUILD.bazel new file mode 100644 index 0000000..1f52546 --- /dev/null +++ b/workspace/bazel_skylib/BUILD.bazel @@ -0,0 +1,3 @@ +load("@gz//bazel/lint:lint.bzl", "add_lint_tests") + +add_lint_tests() diff --git a/workspace/bazel_skylib/repository.bzl b/workspace/bazel_skylib/repository.bzl new file mode 100644 index 0000000..1755047 --- /dev/null +++ b/workspace/bazel_skylib/repository.bzl @@ -0,0 +1,12 @@ +# -*- python -*- + +load("@gz//bazel/workspace:github.bzl", "github_archive") + +def bazel_skylib_repository(name, mirrors = None): + github_archive( + name = name, + repository = "bazelbuild/bazel-skylib", + commit = "1.2.1", + sha256 = "710c2ca4b4d46250cdce2bf8f5aa76ea1f0cba514ab368f2988f70e864cfaf51", # noqa + mirrors = mirrors, + ) diff --git a/workspace/buildifier/BUILD.bazel b/workspace/buildifier/BUILD.bazel new file mode 100644 index 0000000..1f52546 --- /dev/null +++ b/workspace/buildifier/BUILD.bazel @@ -0,0 +1,3 @@ +load("@gz//bazel/lint:lint.bzl", "add_lint_tests") + +add_lint_tests() diff --git a/workspace/buildifier/package.BUILD.bazel b/workspace/buildifier/package.BUILD.bazel new file mode 100644 index 0000000..af0d071 --- /dev/null +++ b/workspace/buildifier/package.BUILD.bazel @@ -0,0 +1,7 @@ +# -*- mode: python -*- + +licenses(["notice"]) # Apache-2.0 + +exports_files( + ["buildifier"], +) diff --git a/workspace/buildifier/repository.bzl b/workspace/buildifier/repository.bzl new file mode 100644 index 0000000..5e512e4 --- /dev/null +++ b/workspace/buildifier/repository.bzl @@ -0,0 +1,69 @@ +# -*- mode: python -*- +# vi: set ft=python : + +""" +Downloads a precompiled version of buildifier and makes it available to the +WORKSPACE. + +Example: + WORKSPACE: + load("@gz//bazel/workspace:mirrors.bzl", "DEFAULT_MIRRORS") + load("@gz//bazel/workspace/buildifier:repository.bzl", "buildifier_repository") # noqa + buildifier_repository(name = "foo", mirrors = DEFAULT_MIRRORS) + + BUILD: + sh_binary( + name = "foobar", + srcs = ["bar.sh"], + data = ["@foo//:buildifier"], + ) + +Argument: + name: A unique name for this rule. +""" + +load("@gz//bazel/workspace:os.bzl", "determine_os") + +def _impl(repository_ctx): + # Enumerate the possible binaries. Note that the buildifier binaries are + # fully statically linked, so the particular distribution doesn't matter, + # only the kernel. + version = "5.1.0" + darwin_urls = [ + x.format(version = version, filename = "buildifier-darwin-amd64") + for x in repository_ctx.attr.mirrors.get("buildifier") + ] + darwin_sha256 = "c9378d9f4293fc38ec54a08fbc74e7a9d28914dae6891334401e59f38f6e65dc" # noqa + linux_urls = [ + x.format(version = version, filename = "buildifier-linux-amd64") + for x in repository_ctx.attr.mirrors.get("buildifier") + ] + linux_sha256 = "52bf6b102cb4f88464e197caac06d69793fa2b05f5ad50a7e7bf6fbd656648a3" # noqa + + # Choose which binary to use. + os_result = determine_os(repository_ctx) + if os_result.is_macos: + urls = darwin_urls + sha256 = darwin_sha256 + elif os_result.is_ubuntu or os_result.is_manylinux: + urls = linux_urls + sha256 = linux_sha256 + else: + fail("Operating system is NOT supported {}".format(os_result)) + + # Fetch the binary from mirrors. + output = repository_ctx.path("buildifier") + repository_ctx.download(urls, output, sha256, executable = True) + + # Add the BUILD file. + repository_ctx.symlink( + Label("@gz//bazel/workspace/buildifier:package.BUILD.bazel"), + "BUILD.bazel", + ) + +buildifier_repository = repository_rule( + attrs = { + "mirrors": attr.string_list_dict(), + }, + implementation = _impl, +) diff --git a/workspace/default.bzl b/workspace/default.bzl new file mode 100644 index 0000000..7af468d --- /dev/null +++ b/workspace/default.bzl @@ -0,0 +1,63 @@ +# -*- python -*- + +load("@gz//bazel/workspace:mirrors.bzl", "DEFAULT_MIRRORS") +load("@gz//bazel/workspace:os.bzl", "os_repository") +load("@gz//bazel/workspace/bazel_skylib:repository.bzl", "bazel_skylib_repository") # noqa +load("@gz//bazel/workspace/buildifier:repository.bzl", "buildifier_repository") # noqa +load("@gz//bazel/workspace/pycodestyle:repository.bzl", "pycodestyle_repository") # noqa +load("@gz//bazel/workspace/rules_python:repository.bzl", "rules_python_repository") # noqa + +def add_default_repositories(excludes = [], mirrors = DEFAULT_MIRRORS): + """Declares workspace repositories for all externals needed by drake (other + than those built into Bazel, of course). This is intended to be loaded and + called from a WORKSPACE file. + Args: + excludes: list of string names of repositories to exclude; this can + be useful if a WORKSPACE file has already supplied its own external + of a given name. + """ + if "bazel_skylib" not in excludes: + bazel_skylib_repository(name = "bazel_skylib", mirrors = mirrors) + if "buildifier" not in excludes: + buildifier_repository(name = "buildifier", mirrors = mirrors) + if "pycodestyle" not in excludes: + pycodestyle_repository(name = "pycodestyle", mirrors = mirrors) + if "rules_python" not in excludes: + rules_python_repository(name = "rules_python", mirrors = mirrors) + +def add_default_toolchains(excludes = []): + """Register toolchains for each language (e.g., "py") not explicitly + excluded and/or not using an automatically generated toolchain. + Args: + excludes: List of languages for which a toolchain should not be + registered. + """ + + if "py" not in excludes: + # The Python debug toolchain on Linux is not loaded automatically, but + # may be used by specifying the command line option + # --extra_toolchains=//tools/py_toolchain:linux_dbg_toolchain + native.register_toolchains( + "@gz//bazel/py_toolchain:linux_toolchain", + ) + +def add_default_workspace( + repository_excludes = [], + toolchain_excludes = [], + mirrors = DEFAULT_MIRRORS): + """Declare repositories in this WORKSPACE for each dependency of @drake + (e.g., "eigen") that is not explicitly excluded, and register toolchains + for each language (e.g., "py") not explicitly excluded and/or not using an + automatically generated toolchain. + Args: + repository_excludes: List of repositories that should not be declared + in this WORKSPACE. + toolchain_excludes: List of languages for which a toolchain should not + be registered. + mirrors: Dictionary of mirrors from which to download repository files. + See mirrors.bzl file in this directory for the file format and + default values. + """ + + add_default_repositories(excludes = repository_excludes, mirrors = mirrors) + add_default_toolchains(excludes = toolchain_excludes) diff --git a/workspace/execute.bzl b/workspace/execute.bzl new file mode 100644 index 0000000..a4dc9d1 --- /dev/null +++ b/workspace/execute.bzl @@ -0,0 +1,77 @@ +# -*- python -*- + +def path(repo_ctx, additional_search_paths = []): + """Return the value of the PATH environment variable that would be used by + the which() command.""" + search_paths = additional_search_paths + + # N.B. Ensure ${PATH} in each platform `tools/*.bazelrc` matches these + # paths. + if repo_ctx.os.name == "mac os x": + arch_result = repo_ctx.execute(["/usr/bin/arch"]) + if arch_result.return_code != 0: + fail("Failure while running /usr/bin/arch") + if arch_result.stdout.strip() == "arm64": + homebrew_bin = "/opt/homebrew/bin" + else: + homebrew_bin = "/usr/local/bin" + search_paths = search_paths + [homebrew_bin] + search_paths = search_paths + ["/usr/bin", "/bin"] + return ":".join(search_paths) + +def which(repo_ctx, program, additional_search_paths = []): + """Return the path of the given program or None if there is no such program + in the PATH as defined by the path() function above. The value of the + user's PATH environment variable is ignored. + """ + exec_result = repo_ctx.execute(["which", program], environment = { + "PATH": path(repo_ctx, additional_search_paths), + }) + if exec_result.return_code != 0: + return None + return repo_ctx.path(exec_result.stdout.strip()) + +def execute_and_return( + repo_ctx, + command, + additional_search_paths = [], + **kwargs): + """Runs the `command` (list) and returns a status value. The return value + is a struct with a field `error` that will be None on success or else a + detailed message on command failure. + """ + if "/" in str(command[0]): + program = command[0] + else: + program = which(repo_ctx, command[0], additional_search_paths) + if not program: + error = "Could not find a program named '{}'".format( + command[0], + ) + return struct(error = error) + exec_result = repo_ctx.execute([program] + command[1:], **kwargs) + if exec_result.return_code == 0: + error = None + else: + error = "Failure running " + ( + " ".join(["'{}'".format(x) for x in command]) + ) + if exec_result.stdout: + error += "\n" + exec_result.stdout + if exec_result.stderr: + error += "\n" + exec_result.stderr + return struct( + error = error, + stdout = exec_result.stdout, + ) + +def execute_or_fail(repo_ctx, command, **kwargs): + """Runs the `command` (list) and immediately fails on any error. + Returns a struct with the stdout value.""" + result = execute_and_return(repo_ctx, command, **kwargs) + if result.error: + fail("Unable to complete setup for @{} repository: {}".format( + repo_ctx.name, + result.error, + )) + return result diff --git a/workspace/generate_file.bzl b/workspace/generate_file.bzl new file mode 100644 index 0000000..72ed93f --- /dev/null +++ b/workspace/generate_file.bzl @@ -0,0 +1,25 @@ +# -*- python -*- + +def _generate_file_impl(ctx): + out = ctx.actions.declare_file(ctx.label.name) + ctx.actions.write(out, ctx.attr.content, ctx.attr.is_executable) + return [DefaultInfo( + files = depset([out]), + data_runfiles = ctx.runfiles(files = [out]), + )] + +generate_file = rule( + attrs = { + "content": attr.string(mandatory = True), + "is_executable": attr.bool(default = False), + }, + output_to_genfiles = True, + implementation = _generate_file_impl, +) + +"""Generate a file with specified content. +This creates a rule to generate a file with specified content (which is either +static or has been previously computed). +Args: + content (:obj:`str`): Desired content of the generated file. +""" diff --git a/workspace/github.bzl b/workspace/github.bzl new file mode 100644 index 0000000..07d32f0 --- /dev/null +++ b/workspace/github.bzl @@ -0,0 +1,363 @@ +# -*- python -*- + +load("@bazel_tools//tools/build_defs/repo:utils.bzl", "patch") + +def github_archive( + name, + repository = None, + commit = None, + commit_pin = None, + sha256 = "0" * 64, + build_file = None, + patches = None, + extra_strip_prefix = "", + local_repository_override = None, + mirrors = None, + **kwargs): + """A macro to be called in the WORKSPACE that adds an external from GitHub + using a workspace rule. + + Args: + name: required name is the rule name and so is used for @name//... + labels when referring to this archive from BUILD files. + repository: required GitHub repository name in the form + organization/project. + commit: required commit is the tag name or git commit sha to download. + commit_pin: optional boolean, set to True iff the archive should remain + at the same version indefinitely, eschewing automated upgrades to + newer versions. + sha256: required sha256 is the expected SHA-256 checksum of the + downloaded archive. When unsure, you can omit this argument (or + comment it out) and then the checksum-mismatch error message will + offer a suggestion. + build_file: optional build file is the BUILD file label to use for + building this external. As a Drake-specific abbreviation, when + provided as a relative label (e.g., ":package.BUILD.bazel"), it + will be taken as relative to the "@gz//bazel/workspace/{name}/" + package. When no build_file is provided, the BUILD file(s) within + the archive will be used. + patches: optional list of patches to apply, matching what's described + at https://bazel.build/rules/lib/repo/git#git_repository-patches. + As a Drake-specific abbreviation, when provided using relative + labels (e.g., ":patches/foo.patch"), they will be taken as relative + to the "@gz//bazel/workspace/{name}/" package. + extra_strip_prefix: optional path to strip from the downloaded archive, + e.g., "src" to root the repository at "./src/" instead of "./". + local_repository_override: optional local repository override can be + used for temporary local testing; instead of retrieving the code + from GitHub, the code is retrieved from the local filesystem path + given in the argument. + mirrors: required mirrors is a dict from string to list-of-string with + key "github", where the list-of-strings are URLs to use, formatted + using {repository} and {commit} string substitutions. The + mirrors.bzl file in this directory provides a reasonable default + value. + """ + if repository == None: + fail("Missing repository=") + if commit == None: + fail("Missing commit=") + if mirrors == None: + fail("Missing mirrors=; see mirrors.bzl") + + build_file = _resolve_drake_abbreviation(name, build_file) + patches = [ + _resolve_drake_abbreviation(name, one_patch) + for one_patch in (patches or []) + ] + + if local_repository_override != None: + path = local_repository_override + if extra_strip_prefix: + path += "/" + extra_strip_prefix + if build_file == None: + native.local_repository( + name = name, + path = path, + ) + else: + native.new_local_repository( + name = name, + build_file = build_file, + path = path, + ) + return + + # Once we've handled the "local_repository_override" sidestep, we delegate + # to a rule (not a macro) so that we have more leeway in the actions we can + # take (i.e., so we can do more than just a simple download-and-extract). + _github_archive_real( + name = name, + repository = repository, + commit = commit, + commit_pin = commit_pin, + sha256 = sha256, + build_file = build_file, + patches = patches, + extra_strip_prefix = extra_strip_prefix, + mirrors = mirrors, + **kwargs + ) + +def _resolve_drake_abbreviation(name, label_str): + """De-abbreviates the given label_str as a Gz tools/workspace label. + If the label_str is None, returns None. If the label_str is relative, + interprets it relative to the "@drake//tools/workspace/{name}/" package + and returns an absolute label. Otherwise, returns the label_str unchanged. + """ + if label_str == None: + return None + if label_str.startswith(":"): + return "@gz//bazel/workspace/" + name + label_str + return label_str + +# Helper stub to implement a repository_rule in terms of a setup() function. +def _github_archive_real_impl(repository_ctx): + result = setup_github_repository(repository_ctx) + if result.error != None: + fail("Unable to complete setup for " + + "@{} repository: {}".format( + repository_ctx.name, + result.error, + )) + +_github_archive_real = repository_rule( + implementation = _github_archive_real_impl, + attrs = { + "repository": attr.string( + mandatory = True, + ), + "commit": attr.string( + mandatory = True, + ), + "commit_pin": attr.bool(), + "sha256": attr.string( + mandatory = False, + default = "0" * 64, + ), + "build_file": attr.label( + default = None, + ), + "extra_strip_prefix": attr.string( + default = "", + ), + "mirrors": attr.string_list_dict( + mandatory = True, + allow_empty = False, + ), + "patches": attr.label_list( + default = [], + ), + "patch_tool": attr.string( + default = "patch", + ), + "patch_args": attr.string_list( + default = ["-p0"], + ), + "patch_cmds": attr.string_list( + default = [], + ), + }, +) +"""This is a rule() formulation of the github_archive() macro. It is identical +to the macro except that it does not support local_repository_override. +Consult the macro documentation for full API details. +""" + +def setup_github_repository(repository_ctx): + """This is a reusable formulation of the github_archive() macro. It is + identical to the macro except that (1) it does not support local repository + override, and (2) it returns a status struct instead of failing internally. + The result struct has a field `error` that will be non-None iff there were + any errors. Consult the macro documentation for additional API details. + """ + + # Do the download step first. (This also writes the metadata.) + github_download_and_extract( + repository_ctx, + repository = repository_ctx.attr.repository, + commit = repository_ctx.attr.commit, + commit_pin = getattr(repository_ctx.attr, "commit_pin", None), + mirrors = repository_ctx.attr.mirrors, + sha256 = repository_ctx.attr.sha256, + extra_strip_prefix = repository_ctx.attr.extra_strip_prefix, + ) + + # Optionally apply source patches, using Bazel's utility helper. Here we + # use getattr as a guard, in case the originating repository_rule didn't + # want to declare attr support for the patchfile-related settings. + patch_triggers = ["patches", "patch_cmds"] + if any([getattr(repository_ctx.attr, a, None) for a in patch_triggers]): + patch(repository_ctx) + + # We re-implement Bazel's workspace_and_buildfile utility, so that options + # we don't care about (e.g., build_file_content) do not have to be declared + # as attrs on our all of our own repository rules. + # + # Unlike workspace_and_buildfile, we create WORKSPACE.bazel and BUILD.bazel + # (rather than WORKSPACE and BUILD) because when the "*.bazel" flavor is + # present, it always takes precedence. + files_to_be_created = ["WORKSPACE.bazel"] + if repository_ctx.attr.build_file: + files_to_be_created.append("BUILD.bazel") + for name in files_to_be_created: + if repository_ctx.path(name).exists: + repository_ctx.execute(["/bin/mv", name, name + ".ignored"]) + repository_ctx.file( + "WORKSPACE.bazel", + "workspace(name = \"{name}\")\n".format( + name = repository_ctx.name, + ), + ) + if repository_ctx.attr.build_file: + repository_ctx.symlink(repository_ctx.attr.build_file, "BUILD.bazel") + return struct(error = None) + +def github_download_and_extract( + repository_ctx, + repository, + commit, + mirrors, + output = "", + sha256 = "0" * 64, + extra_strip_prefix = "", + commit_pin = None): + """Download an archive of the provided GitHub repository and commit to the + output path and extract it. + + Args: + repository_ctx: context of a Bazel repository rule. + repository: GitHub repository name in the form organization/project. + commit: git revision for which the archive should be downloaded. + mirrors: dictionary of mirrors, see mirrors.bzl in this directory for + an example. + output: path to the directory where the archive will be unpacked, + relative to the Bazel repository directory. + sha256: expected SHA-256 hash of the archive downloaded. Fallback to + an incorrect default value to prevent the hash check from being + disabled, but allow the first download attempt to fail and print + the correct SHA-256 hash. + extra_strip_prefix: optional path to strip from the downloaded archive, + e.g., "src" to root the repository at "./src/" instead of "./". + commit_pin: set to True iff the archive should remain at the same + version indefinitely, eschewing automated upgrades to newer + versions. + """ + urls = _urls( + repository = repository, + commit = commit, + mirrors = mirrors, + ) + + repository_ctx.download_and_extract( + urls, + output = output, + sha256 = _sha256(sha256), + type = "tar.gz", + stripPrefix = _strip_prefix(repository, commit, extra_strip_prefix), + ) + +def _sha256(sha256): + """Fallback to an incorrect default value of SHA-256 hash to prevent the + hash check from being disabled, but allow the first download attempt of an + archive to fail and print the correct hash. + + Args: + sha256: expected SHA-256 hash of the archive to be downloaded. + """ + if not sha256: + sha256 = "0" * 64 + + return sha256 + +def _strip_prefix(repository, commit, extra_strip_prefix): + """Compute the strip prefix for a downloaded archive of the provided + GitHub repository and commit. + + Args: + repository: GitHub repository name in the form organization/project. + commit: git revision for which the archive was downloaded. + """ + repository_split = repository.split("/") + + if len(repository_split) != 2: + fail("repository must be formatted as organization/project") + + _, project = repository_split + + # GitHub archives omit the "v" in version tags, for some reason. + if commit[0] == "v": + strip_commit = commit[1:] + else: + strip_commit = commit + + result = project + "-" + strip_commit.replace("/", "-") + if extra_strip_prefix: + result += "/" + extra_strip_prefix + return result + +def _is_commit_sha(commit): + """Returns true iff the commit is a hexadecimal string of length 40.""" + return len(commit) == 40 and all([ + ch.isdigit() or (ch >= "a" and ch <= "f") + for ch in commit.elems() + ]) + +def _format_url(*, pattern, repository, commit): + """Given a URL pattern for github.com or a Drake-specific mirror, + substitutes in the given repository and commit (tag or git sha). + + The URL pattern accepts the following substitutions: + + The {repository} is always substituted with `repository`. + The {commit} is always substituted with `commit`. + If `commit` refers to a git tag, then {tag_name} will be substituted. + If `commit` refers to a git branch, then {branch_name} will be substituted. + If `commit` refers to a git sha, then {commit_sha} will be substituted. + + Patterns that contain a substitution which does not apply to the given + `commit` (e.g., {commit_sha} when `commit` is a tag) will return None. + The pattern must contain exactly one of {commit}, {tag_name}, + {branch_name}, or {commit_sha}. + """ + is_commit_sha = _is_commit_sha(commit) + is_tag = not is_commit_sha + substitutions = { + "repository": repository, + "commit": commit, + "tag_name": commit if is_tag else None, + "commit_sha": commit if is_commit_sha else None, + } + for name, value in substitutions.items(): + if value == None: + needle = "{" + name + "}" + if needle in pattern: + # If the pattern uses a substitution that we do not have, + # report that to our caller as "None"; don't return a URL + # string with a literal "None" in it! + return None + return pattern.format(**substitutions) + +def _urls(*, repository, commit, mirrors): + """Compute the urls from which an archive of the provided GitHub + repository and commit may be downloaded. + + Args: + repository: GitHub repository name in the form organization/project. + commit: git revision for which the archive should be downloaded. + mirrors: dictionary of mirrors, see mirrors.bzl in this directory for + an example. + """ + result_with_nulls = [ + _format_url( + pattern = x, + repository = repository, + commit = commit, + ) + for x in mirrors.get("github") + ] + return [ + url + for url in result_with_nulls + if url != None + ] diff --git a/workspace/mirrors.bzl b/workspace/mirrors.bzl new file mode 100644 index 0000000..24dbfc1 --- /dev/null +++ b/workspace/mirrors.bzl @@ -0,0 +1,30 @@ +# -*- mode: python -*- +# vi: set ft=python : + +# This constant contains Drake's default lists of mirrors. It is keyed by the +# repository type using magic strings ("github", etc.), and has values of type +# list-of-string; each string is a pattern for a mirror URL. +# +# When calling a Drake workspace rule that requires a mirror= argument, this +# constant is a reasonable default value. +# +# Each repository type has its own keyword string substitutions within its +# pattern string; these will vary from one repository type to another; consult +# the specific rules (e.g., github_archive()) for details. +# +# The first item in each list is the authoritative source (e.g., the upstream +# server), if there is one. +# +# For Drake's defaults, Packages are mirrored from upstream (GitHub, Bitbucket, +# PyPI, etc.) to CloudFront backed by an S3 bucket. +# +DEFAULT_MIRRORS = { + "buildifier": [ + "https://github.com/bazelbuild/buildtools/releases/download/{version}/{filename}", # noqa + ], + "github": [ + # For github.com, we choose a pattern based on the kind of commit. + "https://github.com/{repository}/archive/refs/tags/{tag_name}.tar.gz", # noqa + "https://github.com/{repository}/archive/{commit_sha}.tar.gz", + ], +} diff --git a/workspace/os.bzl b/workspace/os.bzl new file mode 100644 index 0000000..512d50c --- /dev/null +++ b/workspace/os.bzl @@ -0,0 +1,344 @@ +# -*- mode: python -*- +# vi: set ft=python : + +"""A collection of OS-related utilities intended for use in repository rules, +i.e., rules used by WORKSPACE files, not BUILD files. + +To opt-in to the "manylinux" or "macos_wheel" build variants, set the +environment variable (e.g.) `DRAKE_OS=manylinux` before running the build. The +most precise way to do this is to add a `user.bazelrc` file to the root of the +Drake source tree with the following content: + + common --repo_env=DRAKE_OS=manylinux + +Alternatively, you may pass `--repo_env=DRAKE_OS=manylinux` on the bazel +command line. (Replace "manylinux" with "macos_wheel" as appropriate.) +""" + +load("@gz//bazel/workspace:execute.bzl", "which") + +def exec_using_which(repository_ctx, command): + """Run the given command (a list), using the which() function in + execute.bzl to locate the executable named by the zeroth index of + `command`. + + Return struct with attributes: + - error (None when success, or else str message) + - stdout (str command output, possibly empty) + """ + + # Find the executable. + fullpath = which(repository_ctx, command[0]) + if fullpath == None: + return struct( + stdout = "", + error = "could not find which '%s'" % command[0], + ) + + # Run the executable. + result = repository_ctx.execute([fullpath] + command[1:]) + if result.return_code != 0: + error = "error %d running %r (command %r, stdout %r, stderr %r)" % ( + result.return_code, + command[0], + command, + result.stdout, + result.stderr, + ) + return struct(stdout = result.stdout, error = error) + + # Success. + return struct(stdout = result.stdout, error = None) + +def _make_result( + error = None, + macos_release = None, + ubuntu_release = None, + is_wheel = False, + homebrew_prefix = None): + """Return a fully-populated struct result for determine_os, below.""" + is_macos = (macos_release != None) and not is_wheel + is_macos_wheel = (macos_release != None) and is_wheel + is_ubuntu = (ubuntu_release != None) and not is_wheel + is_manylinux = (ubuntu_release != None) and is_wheel + if is_macos: + target = "macos" + elif is_macos_wheel: + target = "macos_wheel" + elif is_ubuntu: + target = "ubuntu" + elif is_manylinux: + target = "manylinux" + else: + target = None + return struct( + error = error, + target = target, + is_macos = is_macos, + is_macos_wheel = is_macos_wheel, + is_ubuntu = is_ubuntu, + is_manylinux = is_manylinux, + ubuntu_release = ubuntu_release, + macos_release = macos_release, + homebrew_prefix = homebrew_prefix, + ) + +def _determine_linux(repository_ctx): + """Handle determine_os on Linux.""" + + # Shared error message text across different failure cases. + error_prologue = "could not determine Linux distribution: " + + # Allow the user to override the OS selection. + drake_os = repository_ctx.os.environ.get("DRAKE_OS", "") + is_manylinux = False + if len(drake_os) > 0: + if drake_os == "manylinux": + is_manylinux = True + else: + return _make_result(error = "{}{} DRAKE_OS={}".format( + error_prologue, + "unknown value for environment variable", + drake_os, + )) + + # Get distro name. + lsb = exec_using_which(repository_ctx, ["lsb_release", "-si"]) + if lsb.error != None: + return _make_result(error = error_prologue + lsb.error) + distro = lsb.stdout.strip() + + if distro == "Ubuntu": + lsb = exec_using_which(repository_ctx, ["lsb_release", "-sr"]) + if lsb.error != None: + return _make_result(error = error_prologue + lsb.error) + ubuntu_release = lsb.stdout.strip() + + # Match supported Ubuntu release(s). These should match those listed in + # both doc/_pages/from_source.md and the root CMakeLists.txt. + if ubuntu_release in ["20.04", "22.04"]: + return _make_result( + ubuntu_release = ubuntu_release, + is_wheel = is_manylinux, + ) + + # Nothing matched. + return _make_result( + error = (error_prologue + + "unsupported '%s' release '%s'" % + (distro, ubuntu_release)), + ) + + # Nothing matched. + return _make_result( + error = error_prologue + "unsupported distribution '%s'" % distro, + ) + +def _determine_macos(repository_ctx): + """Handle determine_os on macOS.""" + + # Shared error message text across different failure cases. + error_prologue = "could not determine macOS version: " + + # Allow the user to override the OS selection. + drake_os = repository_ctx.os.environ.get("DRAKE_OS", "") + is_macos_wheel = False + if len(drake_os) > 0: + if drake_os == "macos_wheel": + is_macos_wheel = True + else: + return _make_result(error = "{}{} DRAKE_OS={}".format( + error_prologue, + "unknown value for environment variable", + drake_os, + )) + + # Run sw_vers to determine macOS version. + sw_vers = exec_using_which(repository_ctx, [ + "sw_vers", + "-productVersion", + ]) + if sw_vers.error != None: + return _make_result(error = error_prologue + sw_vers.error) + + major_minor_versions = sw_vers.stdout.strip().split(".")[:2] + if int(major_minor_versions[0]) < 11: + macos_release = ".".join(major_minor_versions) + else: + macos_release = major_minor_versions[0] + + # Check which arch we should be using. + arch_result = exec_using_which(repository_ctx, ["/usr/bin/arch"]) + if arch_result.stdout.strip() == "arm64": + homebrew_prefix = "/opt/homebrew" + else: + homebrew_prefix = "/usr/local" + + # Match supported macOS release(s). + if macos_release in ["11", "12"]: + return _make_result( + macos_release = macos_release, + homebrew_prefix = homebrew_prefix, + is_wheel = is_macos_wheel, + ) + + # Nothing matched. + return _make_result( + error = error_prologue + "unsupported macOS '%s'" % macos_release, + ) + +def determine_os(repository_ctx): + """ + A repository_rule helper function that determines which of the supported + build environments (OS versions or wheel environments) we should target. + + We support four options, which are mutually exclusive and collectively + exhaustive: "macos" or "macos_wheel" or "ubuntu" or "manylinux". + + The "manylinux" target indicates this build will be packaged into a Python + wheel that conforms to a "manylinux" standard such as manylinux_2_31; see + https://github.com/pypa/manylinux. Currently we compile this in an Ubuntu + container using only the most basic host packages from Ubuntu (libc, + libstdc++, etc.). In this case, the value of is_ubuntu will be False, but + ubuntu_release will still be provided. + + The "macos_wheel" target indicates this build will be packaged into a + Python wheel. + + In case of an error, the "error" attribute of the struct will be set, and + all of the other fields will be None or False. + + Argument: + repository_ctx: The context passed to the repository_rule calling this. + + Result: + a struct, with attributes: + - error: str iff any error occurred, else None + + - target: str "macos" or "macos_wheel" or "ubuntu" or "manylinux" + - is_macos: True iff targeting a macOS non-wheel build + - is_macos_wheel: True iff targeting a macOS wheel build + - is_ubuntu: True iff targeting an Ubuntu non-wheel build + - is_manylinux: True iff targeting a Linux wheel build + + - ubuntu_release: str like "20.04" or "22.04" (set any time the build + platform is Ubuntu, even for builds targeting "manylinux") + - macos_release: str like "11" or "12" (set any time the build platform + is macOS, even for builds targeting "macos_wheel") + - homebrew_prefix: str "/usr/local" or "/opt/homebrew" (set any time + the build platform is macOS, even for builds targeting + "macos_wheel") + """ + + os_name = repository_ctx.os.name + if os_name == "mac os x": + return _determine_macos(repository_ctx) + elif os_name == "linux": + return _determine_linux(repository_ctx) + else: + return _make_result(error = "unknown or unsupported OS '%s'" % os_name) + +def os_specific_alias(repository_ctx, mapping): + """ + A repository_rule helper function that creates a BUILD file with alias() + declarations based on which supported OS version we are targeting. + + Argument: + repository_ctx: The context passed to the repository_rule calling this. + mapping: dict(str, list(str)) where the keys match the OS, and the list + of values are of the form name=actual as in alias(name, actual). + + The keys of mapping are searched in the following preferential order: + - Exact release, via e.g., "Ubuntu 20.04" or "macOS 11" + - Any release, via "Ubuntu default" or "macOS default" + - Anything else, via "default" + """ + + os_result = determine_os(repository_ctx) + if os_result.error != None: + fail(os_result.error) + + # Find the best match in the mapping dict for our OS. + keys = [] + if os_result.ubuntu_release: + keys = [ + "Ubuntu " + os_result.ubuntu_release, + "Ubuntu default", + "default", + ] + elif os_result.macos_release: + keys = [ + "macOS " + os_result.macos_release, + "macOS default", + "default", + ] + elif os_result.is_manylinux: + keys = [ + "manylinux", + ] + found_items = None + for key in keys: + if key in mapping: + found_items = mapping[key] + break + if not found_items: + fail("Unsupported os_result " + repr(os_result)) + + # Emit the list of aliases. + file_content = """# -*- python -*- + +# DO NOT EDIT: generated by os_specific_alias_repository() + +package(default_visibility = ["//visibility:public"]) +""" + + for item in found_items: + name, actual = item.split("=") + file_content += 'alias(name = "{}", actual = "{}")\n'.format( + name, + actual, + ) + repository_ctx.file( + "BUILD.bazel", + content = file_content, + executable = False, + ) + +def _os_specific_alias_impl(repository_ctx): + os_specific_alias(repository_ctx, repository_ctx.attr.mapping) + +os_specific_alias_repository = repository_rule( + attrs = { + "mapping": attr.string_list_dict(mandatory = True), + }, + implementation = _os_specific_alias_impl, +) + +def _os_impl(repo_ctx): + os_result = determine_os(repo_ctx) + repo_ctx.file("BUILD.bazel", "") + + if os_result.error: + fail(os_result.error) + + constants = """ +TARGET = {target} +UBUNTU_RELEASE = {ubuntu_release} +MACOS_RELEASE = {macos_release} +HOMEBREW_PREFIX = {homebrew_prefix} + """.format( + target = repr(os_result.target), + ubuntu_release = repr(os_result.ubuntu_release), + macos_release = repr(os_result.macos_release), + homebrew_prefix = repr(os_result.homebrew_prefix), + ) + repo_ctx.file("os.bzl", constants) + +os_repository = repository_rule( + implementation = _os_impl, +) + +""" +Provides the fields `TARGET`, `UBUNTU_RELEASE` and `MACOS_RELEASE` from +`determine_os`. +""" diff --git a/workspace/pkg_config.BUILD.tpl b/workspace/pkg_config.BUILD.tpl new file mode 100644 index 0000000..03e6140 --- /dev/null +++ b/workspace/pkg_config.BUILD.tpl @@ -0,0 +1,21 @@ +# -*- python -*- + +# %{topcomment} + +licenses(%{licenses}) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = %{name}, + srcs = %{srcs}, + hdrs = %{hdrs}, + copts = %{copts}, + defines = %{defines}, + includes = %{includes}, + linkopts = %{linkopts}, + deps = %{deps}, + deprecation = %{extra_deprecation}, +) + +%{build_epilog} diff --git a/workspace/pkg_config.bzl b/workspace/pkg_config.bzl new file mode 100644 index 0000000..845e3cc --- /dev/null +++ b/workspace/pkg_config.bzl @@ -0,0 +1,388 @@ +# -*- python -*- + +load("@gz//bazel/workspace:execute.bzl", "path", "which") +load("@gz//bazel/workspace:os.bzl", "determine_os") + +_DEFAULT_TEMPLATE = Label("@gz//bazel/workspace:pkg_config.BUILD.tpl") + +_DEFAULT_STATIC = False + +def _run_pkg_config(repository_ctx, command_line, pkg_config_paths): + """Run command_line with PKG_CONFIG_PATH = pkg_config_paths and return its + tokenized output.""" + pkg_config_path = ":".join(pkg_config_paths) + result = repository_ctx.execute( + command_line, + environment = { + "PKG_CONFIG_PATH": pkg_config_path, + }, + ) + if result.return_code != 0: + return struct(error = "error {} from {}: {}{}".format( + result.return_code, + command_line, + result.stdout, + result.stderr, + )) + tokens = [x for x in result.stdout.strip().split(" ") if x] + return struct(tokens = tokens, error = None) + +def setup_pkg_config_repository(repository_ctx): + """This is the macro form of the pkg_config_repository() rule below. + Refer to that rule's API documentation for details. + + This flavor of this rule is intended to be called by other repository_rule + implementation functions. The pkg_config_repository flavor of this rule is + intended to be called directly from the WORKSPACE file, or from a macro + that was called by the WORKSPACE file. + """ + + # First locate pkg-config. + tool_path = which(repository_ctx, "pkg-config") + if not tool_path: + return struct(error = "Could not find pkg-config on PATH={}".format( + path(repository_ctx), + )) + args = [tool_path, repository_ctx.attr.modname] + + # Grab any extra paths requested by the user. + pkg_config_paths = list(getattr( + repository_ctx.attr, + "pkg_config_paths", + [], + )) + + os_result = determine_os(repository_ctx) + + if os_result.is_macos or os_result.is_macos_wheel: + # Find the desired homebrew search path, if any. + homebrew_prefix = os_result.homebrew_prefix + homebrew_subdir = getattr( + repository_ctx.attr, + "homebrew_subdir", + "", + ) + if homebrew_prefix and homebrew_subdir: + pkg_config_paths.insert(0, "{}/{}".format( + homebrew_prefix, + homebrew_subdir, + )) + + if os_result.is_manylinux or os_result.is_macos_wheel: + pkg_config_paths.insert(0, "/opt/drake-dependencies/share/pkgconfig") + pkg_config_paths.insert(0, "/opt/drake-dependencies/lib/pkgconfig") + + # Check if we can find the required *.pc file of any version. + result = _run_pkg_config(repository_ctx, args, pkg_config_paths) + if result.error != None: + return result + + # If we have a minimum version, enforce that. + atleast_version = getattr(repository_ctx.attr, "atleast_version", "") + if atleast_version: + result = _run_pkg_config(repository_ctx, args + [ + "--atleast-version", + atleast_version, + ], pkg_config_paths) + if result.error != None: + return struct(error = result.error + "during version check") + + # Determine linkopts. + static = getattr(repository_ctx.attr, "static", _DEFAULT_STATIC) + libs_args = args + ["--libs"] + if static: + libs_args = libs_args + ["--static"] + result = _run_pkg_config(repository_ctx, libs_args, pkg_config_paths) + if result.error != None: + return result + linkopts = result.tokens + + # Bazel "linkopts=" must be either switches ("-foo"), variables ("$(FOO)"), + # or labels ("foo"). We should only get switches from `pkg-config --libs`. + # However, sometimes it produces "-framework CoreFoundation" or similar, + # which is *supposed* to be a single switch, but our split heuristic + # chopped it up. We recombine non-switch args with their preceding arg as + # a repair. We process args in reserve order to keep our loop index + # unchanged by a pop. + for i in reversed(range(len(linkopts))): + linkopt = linkopts[i] + + # Absolute system paths to *.dylib and *.so files get turned into -l + # instead. + if linkopt.endswith(".dylib") or linkopt.endswith(".so"): + if linkopt.endswith(".dylib"): + possible_libdirs = [ + "/usr/lib", + "/usr/local/lib", + ] + suffix = ".dylib" + elif linkopt.endswith(".so"): + possible_libdirs = [ + "/usr/lib", + "/usr/lib/x86_64-linux-gnu", + ] + suffix = ".so" + else: + return struct(error = ("expected linkopt {} to end with " + + ".dylib or .so").format(linkopt)) + for dir in possible_libdirs: + prefix = dir + "/lib" + if linkopt.startswith(prefix): + name = linkopt[len(prefix):-len(suffix)] + if "/" not in name: + linkopt = "-l" + name + linkopts[i] = linkopt + break + + # Add `-Wl,-rpath,` for `-L`. + # See https://github.com/RobotLocomotion/drake/issues/7387#issuecomment-359952616 # noqa + if linkopt.startswith("-L"): + linkopts.insert(i, "-Wl,-rpath," + linkopt[2:]) + continue + + # Switches stay put. + if linkopt.startswith("-"): + continue + + # A non-switch arg should be recombined with the preceding arg. + non_switch_arg = linkopts.pop(i) + if i == 0: + return struct(error = "malformed linkopts: " + repr(linkopts)) + linkopts[i - 1] += " " + non_switch_arg + + # Determine cflags; we'll split into includes and defines in a moment. + result = _run_pkg_config( + repository_ctx, + args + ["--cflags"], + pkg_config_paths, + ) + if result.error != None: + return result + cflags = result.tokens + + # Split cflags into includes and defines. The -I paths from pkg-config + # will be absolute paths; we'll make them relative in a moment. + absolute_includes = [] + defines = [] + unknown_cflags = [] + + # Blacklist various system include paths on macOS. + blacklisted_includes = [ + "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/usr/include", # noqa + "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk/usr/include", # noqa + "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX12.0.sdk/usr/include", # noqa + "/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include", # noqa + "/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include", + "/Library/Developer/CommandLineTools/SDKs/MacOSX11.0.sdk/usr/include", + "/Library/Developer/CommandLineTools/SDKs/MacOSX12.0.sdk/usr/include", + "/Library/Developer/CommandLineTools/usr/include", + ] + + # We process in reserve order to keep our loop index unchanged by a pop. + for cflag in cflags: + if cflag.startswith("-I"): + value = cflag[2:] + if value in blacklisted_includes: + continue + if value not in absolute_includes: + absolute_includes.append(value) + elif cflag.startswith("-D"): + value = cflag[2:] + if value not in defines: + defines.append(value) + elif cflag == "-pthread": + # The pkg-config output has told us to use -pthread when compiling. + # When compiling the typical effect of -pthread is to -D_REENTRANT; + # when linking the typical effect of -pthread is to -lpthread. In + # Bazel, we can't pass -pthread in a cc_library's defines (it's not + # a preprocessor definition), and we shouldn't pass -pthread only + # in a cc_library's copts (i.e., non-transitively), since + # respecting transitivity might be important for some toolchains. + # Instead, when compiling our code that uses this library, we'll + # decide to just ignore pkg-config's advice to use -pthread when + # compiling and instead apply -pthread only when linking. + if "-pthread" not in linkopts: + linkopts.append("-pthread") + elif cflag in [ + "-frounding-math", + "-ffloat-store", + "-msse", + "-msse2", + "-msse3", + "-msse4", + "-msse4.1", + "-msse4.2", + "-mfpmath", + ]: + # We know these are okay to ignore. + pass + else: + unknown_cflags += [cflag] + if unknown_cflags: + print("pkg-config of {} returned flags that we will ignore: {}".format( + repository_ctx.attr.modname, + unknown_cflags, + )) + + # Symlink the absolute include paths into our repository, to obtain + # relative paths for them as required by cc_library's attributes. + includes = [] + hdrs_path = repository_ctx.path("include") + for item in absolute_includes: + if item == "/usr/include" or item == "/usr/local/include": + print(("pkg-config of {} returned an include path that " + + "contains {} that may contain unrelated headers").format( + repository_ctx.attr.modname, + item, + )) + symlink_dest = item.replace("/", "_") + repository_ctx.symlink( + repository_ctx.path(item), + hdrs_path.get_child(symlink_dest), + ) + includes += ["include/" + symlink_dest] + hdrs_prologue = "glob([\"include/**\"]) + " + + extra_deprecation = getattr( + repository_ctx.attr, + "extra_deprecation", + "", + ) + if extra_deprecation == "": + extra_deprecation = None + + # Write out the BUILD.bazel file. + substitutions = { + "%{topcomment}": "DO NOT EDIT: generated by pkg_config_repository()", + "%{licenses}": repr( + getattr(repository_ctx.attr, "licenses", []), + ), + "%{name}": repr( + repository_ctx.name, + ), + "%{srcs}": repr( + getattr(repository_ctx.attr, "extra_srcs", []), + ), + "%{hdrs}": ( + hdrs_prologue + repr( + getattr(repository_ctx.attr, "extra_hdrs", []), + ) + ), + "%{copts}": repr( + getattr(repository_ctx.attr, "extra_copts", []), + ), + "%{defines}": repr( + defines + getattr(repository_ctx.attr, "extra_defines", []), + ), + "%{includes}": repr( + includes + getattr(repository_ctx.attr, "extra_includes", []), + ), + "%{linkopts}": repr( + linkopts + getattr(repository_ctx.attr, "extra_linkopts", []), + ), + "%{deps}": repr( + getattr(repository_ctx.attr, "extra_deps", []), + ), + "%{build_epilog}": getattr(repository_ctx.attr, "build_epilog", ""), + "%{extra_deprecation}": repr(extra_deprecation), + } + template = getattr( + repository_ctx.attr, + "build_file_template", + _DEFAULT_TEMPLATE, + ) + repository_ctx.template("BUILD.bazel", template, substitutions) + + return struct(value = True, error = None) + +def _impl(repository_ctx): + result = setup_pkg_config_repository(repository_ctx) + if result.error != None: + fail("Unable to complete pkg-config setup for " + + "@{} repository: {}".format( + repository_ctx.name, + result.error, + )) + +pkg_config_repository = repository_rule( + # TODO(jamiesnape): Make licenses mandatory. + # TODO(jamiesnape): Use of this rule may cause additional transitive + # dependencies to be linked and their licenses must also be enumerated. + attrs = { + "licenses": attr.string_list(), + "modname": attr.string(mandatory = True), + "atleast_version": attr.string(), + "static": attr.bool(default = _DEFAULT_STATIC), + "build_file_template": attr.label( + default = _DEFAULT_TEMPLATE, + allow_files = True, + ), + "extra_srcs": attr.string_list(), + "extra_hdrs": attr.string_list(), + "extra_copts": attr.string_list(), + "extra_defines": attr.string_list(), + "extra_includes": attr.string_list(), + "extra_linkopts": attr.string_list(), + "extra_deps": attr.string_list(), + "build_epilog": attr.string(), + "pkg_config_paths": attr.string_list(), + "homebrew_subdir": attr.string(), + "extra_deprecation": attr.string(), + }, + local = True, + configure = True, + implementation = _impl, +) + +"""Creates a repository that contains a single library target, based on the +results of invoking pkg-config. + +The pkg_config_repository flavor of this rule is intended to be called directly +from the WORKSPACE file, or from a macro that was called by the WORKSPACE file. +The setup_pkg_config_repository flavor of this rule is intended to be called by +other repository_rule implementation functions. + +Example: + WORKSPACE: + load("@drake//bazel/workspace:pkg_config.bzl", "pkg_config_repository") + pkg_config_repository( + name = "foo", + modname = "foo-2.0", + ) + + BUILD: + cc_library( + name = "foobar", + deps = ["@foo"], + srcs = ["bar.cc"], + ) + +Args: + name: A unique name for this rule. + licenses: Licenses of the library. Valid license types include restricted, + reciprocal, notice, permissive, and unencumbered. See + https://docs.bazel.build/versions/master/be/functions.html#licenses_args + for more information. + modname: The library name as known to pkg-config. + atleast_version: (Optional) The --atleast-version to pkg-config. + static: (Optional) Add linkopts for static linking to the library target. + build_file_template: (Optional) (Advanced) Override the BUILD template. + extra_srcs: (Optional) Extra items to add to the library target. + extra_hdrs: (Optional) Extra items to add to the library target. + extra_copts: (Optional) Extra items to add to the library target. + extra_defines: (Optional) Extra items to add to the library target. + extra_includes: (Optional) Extra items to add to the library target. + extra_linkopts: (Optional) Extra items to add to the library target. + extra_deps: (Optional) Extra items to add to the library target. + build_epilog: (Optional) Extra text to add to the generated BUILD.bazel. + pkg_config_paths: (Optional) Paths to find pkg-config files (.pc). Note + that we ignore the environment variable PKG_CONFIG_PATH + set by the user. + homebrew_subdir: (Optional) If running on macOS, then this path under the + homebrew prefix will also be searched. For example, + homebrew_subdir = "opt/libpng/lib/pkgconfig" would search + "/usr/local/opt/libpng/lib/pkgconfig" or + "/opt/homebrew/opt/libpng/lib/pkgconfig". + extra_deprecation: (Optional) Add a deprecation message to the library + BUILD target. +""" diff --git a/workspace/pycodestyle/BUILD.bazel b/workspace/pycodestyle/BUILD.bazel new file mode 100644 index 0000000..1f52546 --- /dev/null +++ b/workspace/pycodestyle/BUILD.bazel @@ -0,0 +1,3 @@ +load("@gz//bazel/lint:lint.bzl", "add_lint_tests") + +add_lint_tests() diff --git a/workspace/pycodestyle/package.BUILD.bazel b/workspace/pycodestyle/package.BUILD.bazel new file mode 100644 index 0000000..7113b96 --- /dev/null +++ b/workspace/pycodestyle/package.BUILD.bazel @@ -0,0 +1,40 @@ +# -*- python -*- + +load("@gz//bazel/skylark:py.bzl", "py_binary") +load("@gz//bazel/workspace:generate_file.bzl", "generate_file") + +licenses(["notice"]) # MIT + +package(default_visibility = ["//visibility:public"]) + +# Downstream users of python modules expect to say 'import pycodestyle'. +# However, the sandbox looks like:: +# +#