diff --git a/.azure-pipelines/azure-pipelines-linux.yml b/.azure-pipelines/azure-pipelines-linux.yml deleted file mode 100755 index 98a356c9d..000000000 --- a/.azure-pipelines/azure-pipelines-linux.yml +++ /dev/null @@ -1,85 +0,0 @@ -# This file was generated automatically from conda-smithy. To update this configuration, -# update the conda-forge.yml and/or the recipe/meta.yaml. -# -*- mode: yaml -*- - -jobs: -- job: linux - pool: - vmImage: ubuntu-latest - strategy: - matrix: - linux_64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11: - CONFIG: linux_64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11 - UPLOAD_PACKAGES: 'True' - DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cuda:11.8 - linux_64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13: - CONFIG: linux_64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13 - UPLOAD_PACKAGES: 'True' - DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 - linux_aarch64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11: - CONFIG: linux_aarch64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11 - UPLOAD_PACKAGES: 'True' - DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cuda:11.8 - linux_aarch64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13: - CONFIG: linux_aarch64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13 - UPLOAD_PACKAGES: 'True' - DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 - linux_ppc64le_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11: - CONFIG: linux_ppc64le_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11 - UPLOAD_PACKAGES: 'True' - DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cuda:11.8 - linux_ppc64le_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13: - CONFIG: linux_ppc64le_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13 - UPLOAD_PACKAGES: 'True' - DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 - timeoutInMinutes: 360 - variables: {} - - steps: - - script: | - sudo mkdir -p /opt/empty_dir || true - for d in \ - /opt/ghc \ - /opt/hostedtoolcache \ - /usr/lib/jvm \ - /usr/local/.ghcup \ - /usr/local/lib/android \ - /usr/local/share/powershell \ - /usr/share/dotnet \ - /usr/share/swift \ - ; do - sudo rsync --stats -a --delete /opt/empty_dir/ $d || true - done - sudo apt-get purge -y -f firefox \ - google-chrome-stable \ - microsoft-edge-stable - sudo apt-get autoremove -y >& /dev/null - sudo apt-get autoclean -y >& /dev/null - df -h - displayName: Manage disk space - # configure qemu binfmt-misc running. This allows us to run docker containers - # embedded qemu-static - - script: | - docker run --rm --privileged multiarch/qemu-user-static:register --reset --credential yes - ls /proc/sys/fs/binfmt_misc/ - condition: not(startsWith(variables['CONFIG'], 'linux_64')) - displayName: Configure binfmt_misc - - - script: | - export CI=azure - export flow_run_id=azure_$(Build.BuildNumber).$(System.JobAttempt) - export remote_url=$(Build.Repository.Uri) - export sha=$(Build.SourceVersion) - export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME - export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) - if [[ "${BUILD_REASON:-}" == "PullRequest" ]]; then - export IS_PR_BUILD="True" - else - export IS_PR_BUILD="False" - fi - .scripts/run_docker_build.sh - displayName: Run docker build - env: - BINSTAR_TOKEN: $(BINSTAR_TOKEN) - FEEDSTOCK_TOKEN: $(FEEDSTOCK_TOKEN) - STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) \ No newline at end of file diff --git a/.azure-pipelines/azure-pipelines-osx.yml b/.azure-pipelines/azure-pipelines-osx.yml deleted file mode 100755 index ac0bf384e..000000000 --- a/.azure-pipelines/azure-pipelines-osx.yml +++ /dev/null @@ -1,40 +0,0 @@ -# This file was generated automatically from conda-smithy. To update this configuration, -# update the conda-forge.yml and/or the recipe/meta.yaml. -# -*- mode: yaml -*- - -jobs: -- job: osx - pool: - vmImage: macOS-13 - strategy: - matrix: - osx_64_: - CONFIG: osx_64_ - UPLOAD_PACKAGES: 'True' - osx_arm64_: - CONFIG: osx_arm64_ - UPLOAD_PACKAGES: 'True' - timeoutInMinutes: 360 - variables: {} - - steps: - # TODO: Fast finish on azure pipelines? - - script: | - export CI=azure - export flow_run_id=azure_$(Build.BuildNumber).$(System.JobAttempt) - export remote_url=$(Build.Repository.Uri) - export sha=$(Build.SourceVersion) - export OSX_FORCE_SDK_DOWNLOAD="1" - export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME - export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) - if [[ "${BUILD_REASON:-}" == "PullRequest" ]]; then - export IS_PR_BUILD="True" - else - export IS_PR_BUILD="False" - fi - ./.scripts/run_osx_build.sh - displayName: Run OSX build - env: - BINSTAR_TOKEN: $(BINSTAR_TOKEN) - FEEDSTOCK_TOKEN: $(FEEDSTOCK_TOKEN) - STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) \ No newline at end of file diff --git a/.azure-pipelines/azure-pipelines-win.yml b/.azure-pipelines/azure-pipelines-win.yml index 28002c897..89c40d832 100755 --- a/.azure-pipelines/azure-pipelines-win.yml +++ b/.azure-pipelines/azure-pipelines-win.yml @@ -16,8 +16,8 @@ jobs: UPLOAD_PACKAGES: 'True' timeoutInMinutes: 360 variables: - CONDA_BLD_PATH: C:\\bld\\ - MINIFORGE_HOME: C:\\Miniforge + CONDA_BLD_PATH: C:\bld\ + MINIFORGE_HOME: C:\miniforge\ UPLOAD_TEMP: D:\\tmp steps: @@ -35,6 +35,4 @@ jobs: sha: $(Build.SourceVersion) UPLOAD_PACKAGES: $(UPLOAD_PACKAGES) UPLOAD_TEMP: $(UPLOAD_TEMP) - BINSTAR_TOKEN: $(BINSTAR_TOKEN) - FEEDSTOCK_TOKEN: $(FEEDSTOCK_TOKEN) - STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) \ No newline at end of file + BINSTAR_TOKEN: $(BINSTAR_TOKEN) \ No newline at end of file diff --git a/.ci_support/linux_64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml b/.ci_support/linux_64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml deleted file mode 100644 index 7aa8ef0a8..000000000 --- a/.ci_support/linux_64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml +++ /dev/null @@ -1,82 +0,0 @@ -aws_crt_cpp: -- 0.29.0 -aws_sdk_cpp: -- 1.11.407 -azure_core_cpp: -- 1.14.0 -azure_identity_cpp: -- 1.10.0 -azure_storage_blobs_cpp: -- 12.13.0 -azure_storage_files_datalake_cpp: -- 12.12.0 -bzip2: -- '1' -c_compiler: -- gcc -c_compiler_version: -- '11' -c_stdlib: -- sysroot -c_stdlib_version: -- '2.17' -cdt_name: -- cos7 -channel_sources: -- conda-forge -channel_targets: -- conda-forge main -cuda_compiler: -- nvcc -cuda_compiler_version: -- '11.8' -cuda_compiler_version_min: -- '11.8' -cxx_compiler: -- gxx -cxx_compiler_version: -- '11' -docker_image: -- quay.io/condaforge/linux-anvil-cuda:11.8 -gflags: -- '2.2' -glog: -- '0.7' -libabseil: -- '20240722' -libboost_headers: -- '1.86' -libgoogle_cloud_devel: -- '2.30' -libgoogle_cloud_storage_devel: -- '2.30' -libgrpc: -- '1.65' -libprotobuf: -- 5.27.5 -lz4_c: -- 1.9.3 -openssl: -- '3' -orc: -- 2.0.2 -re2: -- 2024.07.02 -snappy: -- '1.2' -target_platform: -- linux-64 -thrift_cpp: -- 0.21.0 -zip_keys: -- - c_compiler_version - - cxx_compiler_version - - c_stdlib_version - - cdt_name - - cuda_compiler - - cuda_compiler_version - - docker_image -zlib: -- '1' -zstd: -- '1.5' diff --git a/.ci_support/linux_64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml b/.ci_support/linux_64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml deleted file mode 100644 index e569f16a1..000000000 --- a/.ci_support/linux_64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml +++ /dev/null @@ -1,82 +0,0 @@ -aws_crt_cpp: -- 0.29.0 -aws_sdk_cpp: -- 1.11.407 -azure_core_cpp: -- 1.14.0 -azure_identity_cpp: -- 1.10.0 -azure_storage_blobs_cpp: -- 12.13.0 -azure_storage_files_datalake_cpp: -- 12.12.0 -bzip2: -- '1' -c_compiler: -- gcc -c_compiler_version: -- '13' -c_stdlib: -- sysroot -c_stdlib_version: -- '2.17' -cdt_name: -- cos7 -channel_sources: -- conda-forge -channel_targets: -- conda-forge main -cuda_compiler: -- None -cuda_compiler_version: -- None -cuda_compiler_version_min: -- '11.8' -cxx_compiler: -- gxx -cxx_compiler_version: -- '13' -docker_image: -- quay.io/condaforge/linux-anvil-cos7-x86_64 -gflags: -- '2.2' -glog: -- '0.7' -libabseil: -- '20240722' -libboost_headers: -- '1.86' -libgoogle_cloud_devel: -- '2.30' -libgoogle_cloud_storage_devel: -- '2.30' -libgrpc: -- '1.65' -libprotobuf: -- 5.27.5 -lz4_c: -- 1.9.3 -openssl: -- '3' -orc: -- 2.0.2 -re2: -- 2024.07.02 -snappy: -- '1.2' -target_platform: -- linux-64 -thrift_cpp: -- 0.21.0 -zip_keys: -- - c_compiler_version - - cxx_compiler_version - - c_stdlib_version - - cdt_name - - cuda_compiler - - cuda_compiler_version - - docker_image -zlib: -- '1' -zstd: -- '1.5' diff --git a/.ci_support/linux_aarch64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml b/.ci_support/linux_aarch64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml deleted file mode 100644 index a1e083058..000000000 --- a/.ci_support/linux_aarch64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml +++ /dev/null @@ -1,86 +0,0 @@ -BUILD: -- aarch64-conda_cos7-linux-gnu -aws_crt_cpp: -- 0.29.0 -aws_sdk_cpp: -- 1.11.407 -azure_core_cpp: -- 1.14.0 -azure_identity_cpp: -- 1.10.0 -azure_storage_blobs_cpp: -- 12.13.0 -azure_storage_files_datalake_cpp: -- 12.12.0 -bzip2: -- '1' -c_compiler: -- gcc -c_compiler_version: -- '11' -c_stdlib: -- sysroot -c_stdlib_version: -- '2.17' -cdt_arch: -- aarch64 -cdt_name: -- cos7 -channel_sources: -- conda-forge -channel_targets: -- conda-forge main -cuda_compiler: -- nvcc -cuda_compiler_version: -- '11.8' -cuda_compiler_version_min: -- '11.8' -cxx_compiler: -- gxx -cxx_compiler_version: -- '11' -docker_image: -- quay.io/condaforge/linux-anvil-cuda:11.8 -gflags: -- '2.2' -glog: -- '0.7' -libabseil: -- '20240722' -libboost_headers: -- '1.86' -libgoogle_cloud_devel: -- '2.30' -libgoogle_cloud_storage_devel: -- '2.30' -libgrpc: -- '1.65' -libprotobuf: -- 5.27.5 -lz4_c: -- 1.9.3 -openssl: -- '3' -orc: -- 2.0.2 -re2: -- 2024.07.02 -snappy: -- '1.2' -target_platform: -- linux-aarch64 -thrift_cpp: -- 0.21.0 -zip_keys: -- - c_compiler_version - - cxx_compiler_version - - c_stdlib_version - - cdt_name - - cuda_compiler - - cuda_compiler_version - - docker_image -zlib: -- '1' -zstd: -- '1.5' diff --git a/.ci_support/linux_aarch64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml b/.ci_support/linux_aarch64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml deleted file mode 100644 index dbcbde020..000000000 --- a/.ci_support/linux_aarch64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml +++ /dev/null @@ -1,86 +0,0 @@ -BUILD: -- aarch64-conda_cos7-linux-gnu -aws_crt_cpp: -- 0.29.0 -aws_sdk_cpp: -- 1.11.407 -azure_core_cpp: -- 1.14.0 -azure_identity_cpp: -- 1.10.0 -azure_storage_blobs_cpp: -- 12.13.0 -azure_storage_files_datalake_cpp: -- 12.12.0 -bzip2: -- '1' -c_compiler: -- gcc -c_compiler_version: -- '13' -c_stdlib: -- sysroot -c_stdlib_version: -- '2.17' -cdt_arch: -- aarch64 -cdt_name: -- cos7 -channel_sources: -- conda-forge -channel_targets: -- conda-forge main -cuda_compiler: -- None -cuda_compiler_version: -- None -cuda_compiler_version_min: -- '11.8' -cxx_compiler: -- gxx -cxx_compiler_version: -- '13' -docker_image: -- quay.io/condaforge/linux-anvil-cos7-x86_64 -gflags: -- '2.2' -glog: -- '0.7' -libabseil: -- '20240722' -libboost_headers: -- '1.86' -libgoogle_cloud_devel: -- '2.30' -libgoogle_cloud_storage_devel: -- '2.30' -libgrpc: -- '1.65' -libprotobuf: -- 5.27.5 -lz4_c: -- 1.9.3 -openssl: -- '3' -orc: -- 2.0.2 -re2: -- 2024.07.02 -snappy: -- '1.2' -target_platform: -- linux-aarch64 -thrift_cpp: -- 0.21.0 -zip_keys: -- - c_compiler_version - - cxx_compiler_version - - c_stdlib_version - - cdt_name - - cuda_compiler - - cuda_compiler_version - - docker_image -zlib: -- '1' -zstd: -- '1.5' diff --git a/.ci_support/linux_ppc64le_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml b/.ci_support/linux_ppc64le_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml deleted file mode 100644 index 1c0e0bf63..000000000 --- a/.ci_support/linux_ppc64le_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml +++ /dev/null @@ -1,82 +0,0 @@ -aws_crt_cpp: -- 0.29.0 -aws_sdk_cpp: -- 1.11.407 -azure_core_cpp: -- 1.14.0 -azure_identity_cpp: -- 1.10.0 -azure_storage_blobs_cpp: -- 12.13.0 -azure_storage_files_datalake_cpp: -- 12.12.0 -bzip2: -- '1' -c_compiler: -- gcc -c_compiler_version: -- '11' -c_stdlib: -- sysroot -c_stdlib_version: -- '2.17' -cdt_name: -- cos7 -channel_sources: -- conda-forge -channel_targets: -- conda-forge main -cuda_compiler: -- nvcc -cuda_compiler_version: -- '11.8' -cuda_compiler_version_min: -- '11.8' -cxx_compiler: -- gxx -cxx_compiler_version: -- '11' -docker_image: -- quay.io/condaforge/linux-anvil-cuda:11.8 -gflags: -- '2.2' -glog: -- '0.7' -libabseil: -- '20240722' -libboost_headers: -- '1.86' -libgoogle_cloud_devel: -- '2.30' -libgoogle_cloud_storage_devel: -- '2.30' -libgrpc: -- '1.65' -libprotobuf: -- 5.27.5 -lz4_c: -- 1.9.3 -openssl: -- '3' -orc: -- 2.0.2 -re2: -- 2024.07.02 -snappy: -- '1.2' -target_platform: -- linux-ppc64le -thrift_cpp: -- 0.21.0 -zip_keys: -- - c_compiler_version - - cxx_compiler_version - - c_stdlib_version - - cdt_name - - cuda_compiler - - cuda_compiler_version - - docker_image -zlib: -- '1' -zstd: -- '1.5' diff --git a/.ci_support/linux_ppc64le_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml b/.ci_support/linux_ppc64le_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml deleted file mode 100644 index 783f5af36..000000000 --- a/.ci_support/linux_ppc64le_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml +++ /dev/null @@ -1,82 +0,0 @@ -aws_crt_cpp: -- 0.29.0 -aws_sdk_cpp: -- 1.11.407 -azure_core_cpp: -- 1.14.0 -azure_identity_cpp: -- 1.10.0 -azure_storage_blobs_cpp: -- 12.13.0 -azure_storage_files_datalake_cpp: -- 12.12.0 -bzip2: -- '1' -c_compiler: -- gcc -c_compiler_version: -- '13' -c_stdlib: -- sysroot -c_stdlib_version: -- '2.17' -cdt_name: -- cos7 -channel_sources: -- conda-forge -channel_targets: -- conda-forge main -cuda_compiler: -- None -cuda_compiler_version: -- None -cuda_compiler_version_min: -- '11.8' -cxx_compiler: -- gxx -cxx_compiler_version: -- '13' -docker_image: -- quay.io/condaforge/linux-anvil-cos7-x86_64 -gflags: -- '2.2' -glog: -- '0.7' -libabseil: -- '20240722' -libboost_headers: -- '1.86' -libgoogle_cloud_devel: -- '2.30' -libgoogle_cloud_storage_devel: -- '2.30' -libgrpc: -- '1.65' -libprotobuf: -- 5.27.5 -lz4_c: -- 1.9.3 -openssl: -- '3' -orc: -- 2.0.2 -re2: -- 2024.07.02 -snappy: -- '1.2' -target_platform: -- linux-ppc64le -thrift_cpp: -- 0.21.0 -zip_keys: -- - c_compiler_version - - cxx_compiler_version - - c_stdlib_version - - cdt_name - - cuda_compiler - - cuda_compiler_version - - docker_image -zlib: -- '1' -zstd: -- '1.5' diff --git a/.ci_support/migrations/aws_crt_cpp0290.yaml b/.ci_support/migrations/aws_crt_cpp0290.yaml deleted file mode 100644 index 89ea87094..000000000 --- a/.ci_support/migrations/aws_crt_cpp0290.yaml +++ /dev/null @@ -1,9 +0,0 @@ -__migrator: - build_number: 1 - commit_message: Rebuild for aws_crt_cpp 0.29.0 - kind: version - migration_number: 1 - automerge: true -aws_crt_cpp: -- 0.29.0 -migrator_ts: 1729790301.3397346 diff --git a/.ci_support/migrations/numpy2.yaml b/.ci_support/migrations/numpy2.yaml new file mode 100644 index 000000000..d70edeb0d --- /dev/null +++ b/.ci_support/migrations/numpy2.yaml @@ -0,0 +1,74 @@ +__migrator: + build_number: 1 + kind: version + commit_message: | + Rebuild for numpy 2.0 + + TL;DR: The way we build against numpy has changed as of numpy 2.0. This bot + PR has updated the recipe to account for the changes (see below for details). + The numpy 2.0 package itself is currently only available from a special release + channel (`conda-forge/label/numpy_rc`) and will not be available on the main + `conda-forge` channel until the release of numpy 2.0 GA. + + The biggest change is that we no longer need to use the oldest available numpy + version at build time in order to support old numpy version at runtime - numpy + will by default use a compatible ABI for the oldest still-supported numpy versions. + + Additionally, we no longer need to use `{{ pin_compatible("numpy") }}` as a + run requirement - this has been handled for more than two years now by a + run-export on the numpy package itself. The migrator will therefore remove + any occurrences of this. + + However, by default, building against numpy 2.0 will assume that the package + is compatible with numpy 2.0, which is not necessarily the case. You should + check that the upstream package explicitly supports numpy 2.0, otherwise you + need to add a `- numpy <2.0dev0` run requirement until that happens (check numpy + issue 26191 for an overview of the most important packages). + + Note that the numpy release candidate promises to be ABI-compatible with the + final 2.0 release. This means that building against 2.0.0rc1 produces packages + that can be published to our main channels. + + If you already want to use the numpy 2.0 release candidate yourself, you can do + ``` + conda config --add channels conda-forge/label/numpy_rc + ``` + or add this channel to your `.condarc` file directly. + + ### To-Dos: + * [ ] Match run-requirements for numpy (i.e. check upstream `pyproject.toml` or however the project specifies numpy compatibility) + * If upstream is not yet compatible with numpy 2.0, add `numpy <2.0dev0` upper bound under `run:`. + * If upstream is already compatible with numpy 2.0, nothing else should be necessary in most cases. + * If upstream requires a minimum numpy version newer than 1.19, you can add `numpy >=x.y` under `run:`. + * [ ] Remove any remaining occurrences of `{{ pin_compatible("numpy") }}` that the bot may have missed. + + PS. If the build does not compile anymore, this is almost certainly a sign that + the upstream project is not yet ready for numpy 2.0; do not close this PR until + a version compatible with numpy 2.0 has been released upstream and on this + feedstock (in the meantime, you can keep the bot from reopening this PR in + case of git conflicts by marking it as a draft). + + migration_number: 1 + exclude: + # needs local overrides that get stomped on by the migrator, which then fails + - scipy + # already done, but thinks its unsolvable + - pandas + ordering: + # prefer channels including numpy_rc (otherwise smithy doesn't + # know which of the two values should be taken on merge) + channel_sources: + - conda-forge + - conda-forge/label/numpy_rc,conda-forge + +# needs to match length of zip {python, python_impl, numpy} +# as it is in global CBC in order to override it +numpy: + - 1.22 # no py38 support for numpy 2.0 + - 2.0 + - 2.0 + - 2.0 + - 2.0 +channel_sources: + - conda-forge/label/numpy_rc,conda-forge +migrator_ts: 1713572489.295986 diff --git a/.ci_support/migrations/python313.yaml b/.ci_support/migrations/python313.yaml new file mode 100644 index 000000000..119bed8a2 --- /dev/null +++ b/.ci_support/migrations/python313.yaml @@ -0,0 +1,42 @@ +migrator_ts: 1724712607 +__migrator: + commit_message: Rebuild for python 3.13 + migration_number: 1 + operation: key_add + primary_key: python + ordering: + python: + - 3.6.* *_cpython + - 3.7.* *_cpython + - 3.8.* *_cpython + - 3.9.* *_cpython + - 3.10.* *_cpython + - 3.11.* *_cpython + - 3.12.* *_cpython + - 3.13.* *_cp313 # new entry + - 3.6.* *_73_pypy + - 3.7.* *_73_pypy + - 3.8.* *_73_pypy + - 3.9.* *_73_pypy + paused: false + longterm: true + pr_limit: 20 + max_solver_attempts: 3 # this will make the bot retry "not solvable" stuff 12 times + exclude: + # this shouldn't attempt to modify the python feedstocks + - python + - pypy3.6 + - pypy-meta + - cross-python + - python_abi + # see https://github.com/conda-forge/scipy-feedstock/pull/283 + - scipy + exclude_pinned_pkgs: false + +python: +- 3.13.* *_cp313 +# additional entries to add for zip_keys +numpy: +- 2 +python_impl: +- cpython diff --git a/.ci_support/osx_64_.yaml b/.ci_support/osx_64_.yaml deleted file mode 100644 index 391b389b1..000000000 --- a/.ci_support/osx_64_.yaml +++ /dev/null @@ -1,77 +0,0 @@ -MACOSX_DEPLOYMENT_TARGET: -- '10.13' -MACOSX_SDK_VERSION: -- '10.13' -aws_crt_cpp: -- 0.29.0 -aws_sdk_cpp: -- 1.11.407 -azure_core_cpp: -- 1.14.0 -azure_identity_cpp: -- 1.10.0 -azure_storage_blobs_cpp: -- 12.13.0 -azure_storage_files_datalake_cpp: -- 12.12.0 -bzip2: -- '1' -c_compiler: -- clang -c_compiler_version: -- '18' -c_stdlib: -- macosx_deployment_target -c_stdlib_version: -- '10.13' -channel_sources: -- conda-forge -channel_targets: -- conda-forge main -cuda_compiler: -- None -cuda_compiler_version: -- None -cxx_compiler: -- clangxx -cxx_compiler_version: -- '18' -gflags: -- '2.2' -glog: -- '0.7' -libabseil: -- '20240722' -libboost_headers: -- '1.86' -libgoogle_cloud_devel: -- '2.30' -libgoogle_cloud_storage_devel: -- '2.30' -libgrpc: -- '1.65' -libprotobuf: -- 5.27.5 -lz4_c: -- 1.9.3 -macos_machine: -- x86_64-apple-darwin13.4.0 -openssl: -- '3' -orc: -- 2.0.2 -re2: -- 2024.07.02 -snappy: -- '1.2' -target_platform: -- osx-64 -thrift_cpp: -- 0.21.0 -zip_keys: -- - c_compiler_version - - cxx_compiler_version -zlib: -- '1' -zstd: -- '1.5' diff --git a/.ci_support/osx_arm64_.yaml b/.ci_support/osx_arm64_.yaml deleted file mode 100644 index 96944ec3b..000000000 --- a/.ci_support/osx_arm64_.yaml +++ /dev/null @@ -1,77 +0,0 @@ -MACOSX_DEPLOYMENT_TARGET: -- '11.0' -MACOSX_SDK_VERSION: -- '11.0' -aws_crt_cpp: -- 0.29.0 -aws_sdk_cpp: -- 1.11.407 -azure_core_cpp: -- 1.14.0 -azure_identity_cpp: -- 1.10.0 -azure_storage_blobs_cpp: -- 12.13.0 -azure_storage_files_datalake_cpp: -- 12.12.0 -bzip2: -- '1' -c_compiler: -- clang -c_compiler_version: -- '18' -c_stdlib: -- macosx_deployment_target -c_stdlib_version: -- '11.0' -channel_sources: -- conda-forge -channel_targets: -- conda-forge main -cuda_compiler: -- None -cuda_compiler_version: -- None -cxx_compiler: -- clangxx -cxx_compiler_version: -- '18' -gflags: -- '2.2' -glog: -- '0.7' -libabseil: -- '20240722' -libboost_headers: -- '1.86' -libgoogle_cloud_devel: -- '2.30' -libgoogle_cloud_storage_devel: -- '2.30' -libgrpc: -- '1.65' -libprotobuf: -- 5.27.5 -lz4_c: -- 1.9.3 -macos_machine: -- arm64-apple-darwin20.0.0 -openssl: -- '3' -orc: -- 2.0.2 -re2: -- 2024.07.02 -snappy: -- '1.2' -target_platform: -- osx-arm64 -thrift_cpp: -- 0.21.0 -zip_keys: -- - c_compiler_version - - cxx_compiler_version -zlib: -- '1' -zstd: -- '1.5' diff --git a/.ci_support/win_64_cuda_compilerNonecuda_compiler_versionNone.yaml b/.ci_support/win_64_cuda_compilerNonecuda_compiler_versionNone.yaml index 708d6f86d..1222920ba 100644 --- a/.ci_support/win_64_cuda_compilerNonecuda_compiler_versionNone.yaml +++ b/.ci_support/win_64_cuda_compilerNonecuda_compiler_versionNone.yaml @@ -42,10 +42,26 @@ libprotobuf: - 5.27.5 lz4_c: - 1.9.3 +numpy: +- '2.0' +- '2.0' +- '2.0' +- '2' +- '2.0' openssl: - '3' orc: - 2.0.2 +pin_run_as_build: + python: + min_pin: x.x + max_pin: x.x +python: +- 3.10.* *_cpython +- 3.11.* *_cpython +- 3.12.* *_cpython +- 3.13.* *_cp313 +- 3.9.* *_cpython re2: - 2024.07.02 snappy: @@ -57,6 +73,8 @@ thrift_cpp: zip_keys: - - cuda_compiler - cuda_compiler_version +- - python + - numpy zlib: - '1' zstd: diff --git a/.ci_support/win_64_cuda_compilernvcccuda_compiler_version11.8.yaml b/.ci_support/win_64_cuda_compilernvcccuda_compiler_version11.8.yaml index 8d4208a41..82883bf88 100644 --- a/.ci_support/win_64_cuda_compilernvcccuda_compiler_version11.8.yaml +++ b/.ci_support/win_64_cuda_compilernvcccuda_compiler_version11.8.yaml @@ -42,10 +42,26 @@ libprotobuf: - 5.27.5 lz4_c: - 1.9.3 +numpy: +- '2.0' +- '2.0' +- '2.0' +- '2' +- '2.0' openssl: - '3' orc: - 2.0.2 +pin_run_as_build: + python: + min_pin: x.x + max_pin: x.x +python: +- 3.10.* *_cpython +- 3.11.* *_cpython +- 3.12.* *_cpython +- 3.13.* *_cp313 +- 3.9.* *_cpython re2: - 2024.07.02 snappy: @@ -57,6 +73,8 @@ thrift_cpp: zip_keys: - - cuda_compiler - cuda_compiler_version +- - python + - numpy zlib: - '1' zstd: diff --git a/.scripts/build_steps.sh b/.scripts/build_steps.sh deleted file mode 100755 index 827a29fbc..000000000 --- a/.scripts/build_steps.sh +++ /dev/null @@ -1,100 +0,0 @@ -#!/usr/bin/env bash - -# PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here -# will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent -# changes to this script, consider a proposal to conda-smithy so that other feedstocks can also -# benefit from the improvement. - -# -*- mode: jinja-shell -*- - -set -xeuo pipefail -export FEEDSTOCK_ROOT="${FEEDSTOCK_ROOT:-/home/conda/feedstock_root}" -source ${FEEDSTOCK_ROOT}/.scripts/logging_utils.sh - - -( endgroup "Start Docker" ) 2> /dev/null - -( startgroup "Configuring conda" ) 2> /dev/null - -export PYTHONUNBUFFERED=1 -export RECIPE_ROOT="${RECIPE_ROOT:-/home/conda/recipe_root}" -export CI_SUPPORT="${FEEDSTOCK_ROOT}/.ci_support" -export CONFIG_FILE="${CI_SUPPORT}/${CONFIG}.yaml" - -cat >~/.condarc < /opt/conda/conda-meta/history -micromamba install --root-prefix ~/.conda --prefix /opt/conda \ - --yes --override-channels --channel conda-forge --strict-channel-priority \ - pip python=3.12 conda-build conda-forge-ci-setup=4 "conda-build>=24.1" -export CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED=1 - -# set up the condarc -setup_conda_rc "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" - -source run_conda_forge_build_setup - - - -# make the build number clobber -make_build_number "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" - -if [[ "${HOST_PLATFORM}" != "${BUILD_PLATFORM}" ]] && [[ "${HOST_PLATFORM}" != linux-* ]] && [[ "${BUILD_WITH_CONDA_DEBUG:-0}" != 1 ]]; then - EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --no-test" -fi - - -( endgroup "Configuring conda" ) 2> /dev/null - -if [[ -f "${FEEDSTOCK_ROOT}/LICENSE.txt" ]]; then - cp "${FEEDSTOCK_ROOT}/LICENSE.txt" "${RECIPE_ROOT}/recipe-scripts-license.txt" -fi - -if [[ "${BUILD_WITH_CONDA_DEBUG:-0}" == 1 ]]; then - if [[ "x${BUILD_OUTPUT_ID:-}" != "x" ]]; then - EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --output-id ${BUILD_OUTPUT_ID}" - fi - conda debug "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \ - ${EXTRA_CB_OPTIONS:-} \ - --clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml" - - # Drop into an interactive shell - /bin/bash -else - conda-build "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \ - --suppress-variables ${EXTRA_CB_OPTIONS:-} \ - --clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml" \ - --extra-meta flow_run_id="${flow_run_id:-}" remote_url="${remote_url:-}" sha="${sha:-}" - ( startgroup "Inspecting artifacts" ) 2> /dev/null - - # inspect_artifacts was only added in conda-forge-ci-setup 4.9.4 - command -v inspect_artifacts >/dev/null 2>&1 && inspect_artifacts --recipe-dir "${RECIPE_ROOT}" -m "${CONFIG_FILE}" || echo "inspect_artifacts needs conda-forge-ci-setup >=4.9.4" - - ( endgroup "Inspecting artifacts" ) 2> /dev/null - ( startgroup "Validating outputs" ) 2> /dev/null - - validate_recipe_outputs "${FEEDSTOCK_NAME}" - - ( endgroup "Validating outputs" ) 2> /dev/null - - ( startgroup "Uploading packages" ) 2> /dev/null - - if [[ "${UPLOAD_PACKAGES}" != "False" ]] && [[ "${IS_PR_BUILD}" == "False" ]]; then - upload_package --validate --feedstock-name="${FEEDSTOCK_NAME}" "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" - fi - - ( endgroup "Uploading packages" ) 2> /dev/null -fi - -( startgroup "Final checks" ) 2> /dev/null - -touch "${FEEDSTOCK_ROOT}/build_artifacts/conda-forge-build-done-${CONFIG}" \ No newline at end of file diff --git a/.scripts/run_docker_build.sh b/.scripts/run_docker_build.sh deleted file mode 100755 index 00f377a80..000000000 --- a/.scripts/run_docker_build.sh +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/env bash - -# PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here -# will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent -# changes to this script, consider a proposal to conda-smithy so that other feedstocks can also -# benefit from the improvement. - -source .scripts/logging_utils.sh - -( startgroup "Configure Docker" ) 2> /dev/null - -set -xeo pipefail - -THISDIR="$( cd "$( dirname "$0" )" >/dev/null && pwd )" -PROVIDER_DIR="$(basename $THISDIR)" - -FEEDSTOCK_ROOT="$( cd "$( dirname "$0" )/.." >/dev/null && pwd )" -RECIPE_ROOT="${FEEDSTOCK_ROOT}/recipe" - -if [ -z ${FEEDSTOCK_NAME} ]; then - export FEEDSTOCK_NAME=$(basename ${FEEDSTOCK_ROOT}) -fi - -if [[ "${sha:-}" == "" ]]; then - pushd "${FEEDSTOCK_ROOT}" - sha=$(git rev-parse HEAD) - popd -fi - -docker info - -# In order for the conda-build process in the container to write to the mounted -# volumes, we need to run with the same id as the host machine, which is -# normally the owner of the mounted volumes, or at least has write permission -export HOST_USER_ID=$(id -u) -# Check if docker-machine is being used (normally on OSX) and get the uid from -# the VM -if hash docker-machine 2> /dev/null && docker-machine active > /dev/null; then - export HOST_USER_ID=$(docker-machine ssh $(docker-machine active) id -u) -fi - -ARTIFACTS="$FEEDSTOCK_ROOT/build_artifacts" - -if [ -z "$CONFIG" ]; then - set +x - FILES=`ls .ci_support/linux_*` - CONFIGS="" - for file in $FILES; do - CONFIGS="${CONFIGS}'${file:12:-5}' or "; - done - echo "Need to set CONFIG env variable. Value can be one of ${CONFIGS:0:-4}" - exit 1 -fi - -if [ -z "${DOCKER_IMAGE}" ]; then - SHYAML_INSTALLED="$(shyaml -h || echo NO)" - if [ "${SHYAML_INSTALLED}" == "NO" ]; then - echo "WARNING: DOCKER_IMAGE variable not set and shyaml not installed. Trying to parse with coreutils" - DOCKER_IMAGE=$(cat .ci_support/${CONFIG}.yaml | grep '^docker_image:$' -A 1 | tail -n 1 | cut -b 3-) - if [ "${DOCKER_IMAGE}" = "" ]; then - echo "No docker_image entry found in ${CONFIG}. Falling back to quay.io/condaforge/linux-anvil-comp7" - DOCKER_IMAGE="quay.io/condaforge/linux-anvil-comp7" - fi - else - DOCKER_IMAGE="$(cat "${FEEDSTOCK_ROOT}/.ci_support/${CONFIG}.yaml" | shyaml get-value docker_image.0 quay.io/condaforge/linux-anvil-comp7 )" - fi -fi - -mkdir -p "$ARTIFACTS" -DONE_CANARY="$ARTIFACTS/conda-forge-build-done-${CONFIG}" -rm -f "$DONE_CANARY" - -# Allow people to specify extra default arguments to `docker run` (e.g. `--rm`) -DOCKER_RUN_ARGS="${CONDA_FORGE_DOCKER_RUN_ARGS}" -if [ -z "${CI}" ]; then - DOCKER_RUN_ARGS="-it ${DOCKER_RUN_ARGS}" -fi - -( endgroup "Configure Docker" ) 2> /dev/null - -( startgroup "Start Docker" ) 2> /dev/null - -export UPLOAD_PACKAGES="${UPLOAD_PACKAGES:-True}" -export IS_PR_BUILD="${IS_PR_BUILD:-False}" -docker pull "${DOCKER_IMAGE}" -docker run ${DOCKER_RUN_ARGS} \ - -v "${RECIPE_ROOT}":/home/conda/recipe_root:rw,z,delegated \ - -v "${FEEDSTOCK_ROOT}":/home/conda/feedstock_root:rw,z,delegated \ - -e CONFIG \ - -e HOST_USER_ID \ - -e UPLOAD_PACKAGES \ - -e IS_PR_BUILD \ - -e GIT_BRANCH \ - -e UPLOAD_ON_BRANCH \ - -e CI \ - -e FEEDSTOCK_NAME \ - -e CPU_COUNT \ - -e BUILD_WITH_CONDA_DEBUG \ - -e BUILD_OUTPUT_ID \ - -e flow_run_id \ - -e remote_url \ - -e sha \ - -e BINSTAR_TOKEN \ - -e FEEDSTOCK_TOKEN \ - -e STAGING_BINSTAR_TOKEN \ - "${DOCKER_IMAGE}" \ - bash \ - "/home/conda/feedstock_root/${PROVIDER_DIR}/build_steps.sh" - -# verify that the end of the script was reached -test -f "$DONE_CANARY" - -# This closes the last group opened in `build_steps.sh` -( endgroup "Final checks" ) 2> /dev/null \ No newline at end of file diff --git a/.scripts/run_osx_build.sh b/.scripts/run_osx_build.sh deleted file mode 100755 index 7f948c41f..000000000 --- a/.scripts/run_osx_build.sh +++ /dev/null @@ -1,119 +0,0 @@ -#!/usr/bin/env bash - -# -*- mode: jinja-shell -*- - -source .scripts/logging_utils.sh - -set -xe - -MINIFORGE_HOME=${MINIFORGE_HOME:-${HOME}/miniforge3} -MINIFORGE_HOME=${MINIFORGE_HOME%/} # remove trailing slash - -( startgroup "Provisioning base env with micromamba" ) 2> /dev/null -MICROMAMBA_VERSION="1.5.10-0" -if [[ "$(uname -m)" == "arm64" ]]; then - osx_arch="osx-arm64" -else - osx_arch="osx-64" -fi -MICROMAMBA_URL="https://github.com/mamba-org/micromamba-releases/releases/download/${MICROMAMBA_VERSION}/micromamba-${osx_arch}" -MAMBA_ROOT_PREFIX="${MINIFORGE_HOME}-micromamba-$(date +%s)" -echo "Downloading micromamba ${MICROMAMBA_VERSION}" -micromamba_exe="$(mktemp -d)/micromamba" -curl -L -o "${micromamba_exe}" "${MICROMAMBA_URL}" -chmod +x "${micromamba_exe}" -echo "Creating environment" -"${micromamba_exe}" create --yes --root-prefix "${MAMBA_ROOT_PREFIX}" --prefix "${MINIFORGE_HOME}" \ - --channel conda-forge \ - pip python=3.12 conda-build conda-forge-ci-setup=4 "conda-build>=24.1" -echo "Moving pkgs cache from ${MAMBA_ROOT_PREFIX} to ${MINIFORGE_HOME}" -mv "${MAMBA_ROOT_PREFIX}/pkgs" "${MINIFORGE_HOME}" -echo "Cleaning up micromamba" -rm -rf "${MAMBA_ROOT_PREFIX}" "${micromamba_exe}" || true -( endgroup "Provisioning base env with micromamba" ) 2> /dev/null - -( startgroup "Configuring conda" ) 2> /dev/null - -source "${MINIFORGE_HOME}/etc/profile.d/conda.sh" -conda activate base -export CONDA_SOLVER="libmamba" -export CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED=1 - - - - - -echo -e "\n\nSetting up the condarc and mangling the compiler." -setup_conda_rc ./ ./recipe ./.ci_support/${CONFIG}.yaml - -if [[ "${CI:-}" != "" ]]; then - mangle_compiler ./ ./recipe .ci_support/${CONFIG}.yaml -fi - -if [[ "${CI:-}" != "" ]]; then - echo -e "\n\nMangling homebrew in the CI to avoid conflicts." - /usr/bin/sudo mangle_homebrew - /usr/bin/sudo -k -else - echo -e "\n\nNot mangling homebrew as we are not running in CI" -fi - -if [[ "${sha:-}" == "" ]]; then - sha=$(git rev-parse HEAD) -fi - -echo -e "\n\nRunning the build setup script." -source run_conda_forge_build_setup - - - -( endgroup "Configuring conda" ) 2> /dev/null - -echo -e "\n\nMaking the build clobber file" -make_build_number ./ ./recipe ./.ci_support/${CONFIG}.yaml - -if [[ -f LICENSE.txt ]]; then - cp LICENSE.txt "recipe/recipe-scripts-license.txt" -fi - -if [[ "${BUILD_WITH_CONDA_DEBUG:-0}" == 1 ]]; then - if [[ "x${BUILD_OUTPUT_ID:-}" != "x" ]]; then - EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --output-id ${BUILD_OUTPUT_ID}" - fi - conda debug ./recipe -m ./.ci_support/${CONFIG}.yaml \ - ${EXTRA_CB_OPTIONS:-} \ - --clobber-file ./.ci_support/clobber_${CONFIG}.yaml - - # Drop into an interactive shell - /bin/bash -else - - if [[ "${HOST_PLATFORM}" != "${BUILD_PLATFORM}" ]]; then - EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --no-test" - fi - - conda-build ./recipe -m ./.ci_support/${CONFIG}.yaml \ - --suppress-variables ${EXTRA_CB_OPTIONS:-} \ - --clobber-file ./.ci_support/clobber_${CONFIG}.yaml \ - --extra-meta flow_run_id="$flow_run_id" remote_url="$remote_url" sha="$sha" - - ( startgroup "Inspecting artifacts" ) 2> /dev/null - - # inspect_artifacts was only added in conda-forge-ci-setup 4.9.4 - command -v inspect_artifacts >/dev/null 2>&1 && inspect_artifacts --recipe-dir ./recipe -m ./.ci_support/${CONFIG}.yaml || echo "inspect_artifacts needs conda-forge-ci-setup >=4.9.4" - - ( endgroup "Inspecting artifacts" ) 2> /dev/null - ( startgroup "Validating outputs" ) 2> /dev/null - - validate_recipe_outputs "${FEEDSTOCK_NAME}" - - ( endgroup "Validating outputs" ) 2> /dev/null - - ( startgroup "Uploading packages" ) 2> /dev/null - - if [[ "${UPLOAD_PACKAGES}" != "False" ]] && [[ "${IS_PR_BUILD}" == "False" ]]; then - upload_package --validate --feedstock-name="${FEEDSTOCK_NAME}" ./ ./recipe ./.ci_support/${CONFIG}.yaml - fi - - ( endgroup "Uploading packages" ) 2> /dev/null -fi \ No newline at end of file diff --git a/.scripts/run_win_build.bat b/.scripts/run_win_build.bat index d3c0345e5..ef14a69f6 100755 --- a/.scripts/run_win_build.bat +++ b/.scripts/run_win_build.bat @@ -32,9 +32,6 @@ call "%MICROMAMBA_EXE%" create --yes --root-prefix "%MAMBA_ROOT_PREFIX%" --prefi --channel conda-forge ^ pip python=3.12 conda-build conda-forge-ci-setup=4 "conda-build>=24.1" if !errorlevel! neq 0 exit /b !errorlevel! -echo Moving pkgs cache from %MAMBA_ROOT_PREFIX% to %MINIFORGE_HOME% -move /Y "%MAMBA_ROOT_PREFIX%\pkgs" "%MINIFORGE_HOME%" -if !errorlevel! neq 0 exit /b !errorlevel! echo Removing %MAMBA_ROOT_PREFIX% del /S /Q "%MAMBA_ROOT_PREFIX%" del /S /Q "%MICROMAMBA_TMPDIR%" @@ -108,17 +105,13 @@ if /i "%CI%" == "azure" ( ) :: Validate -call :start_group "Validating outputs" -validate_recipe_outputs "%FEEDSTOCK_NAME%" -if !errorlevel! neq 0 exit /b !errorlevel! -call :end_group if /i "%UPLOAD_PACKAGES%" == "true" ( if /i "%IS_PR_BUILD%" == "false" ( call :start_group "Uploading packages" if not exist "%TEMP%\" md "%TEMP%" set "TMP=%TEMP%" - upload_package --validate --feedstock-name="%FEEDSTOCK_NAME%" .\ ".\recipe" .ci_support\%CONFIG%.yaml + upload_package .\ ".\recipe" .ci_support\%CONFIG%.yaml if !errorlevel! neq 0 exit /b !errorlevel! call :end_group ) diff --git a/README.md b/README.md index f04a71aed..916e46bed 100644 --- a/README.md +++ b/README.md @@ -94,6 +94,42 @@ Package license: Apache-2.0 Summary: C++ libraries for Apache Arrow Substrait +About pyarrow-core +------------------ + +Home: http://github.com/apache/arrow + +Package license: Apache-2.0 + +Summary: Python libraries for Apache Arrow Core + +About pyarrow +------------- + +Home: http://github.com/apache/arrow + +Package license: Apache-2.0 + +Summary: Python libraries for Apache Arrow with default capabilities + +About pyarrow-all +----------------- + +Home: http://github.com/apache/arrow + +Package license: Apache-2.0 + +Summary: Python libraries for Apache Arrow with all capabilities + +About pyarrow-tests +------------------- + +Home: http://github.com/apache/arrow + +Package license: Apache-2.0 + +Summary: Python test files for Apache Arrow + Current build status ==================== @@ -112,62 +148,6 @@ Current build status - - - - - - - - - - - - - - - - - - - - - - - -
VariantStatus
linux_64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11 - - variant - -
linux_64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13 - - variant - -
linux_aarch64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11 - - variant - -
linux_aarch64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13 - - variant - -
linux_ppc64le_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11 - - variant - -
linux_ppc64le_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13 - - variant - -
osx_64 - - variant - -
osx_arm64 - - variant - -
win_64_cuda_compilerNonecuda_compiler_versionNone @@ -204,6 +184,10 @@ Current release info | [![Conda Recipe](https://img.shields.io/badge/recipe-libarrow--gandiva-green.svg)](https://anaconda.org/conda-forge/libarrow-gandiva) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/libarrow-gandiva.svg)](https://anaconda.org/conda-forge/libarrow-gandiva) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/libarrow-gandiva.svg)](https://anaconda.org/conda-forge/libarrow-gandiva) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/libarrow-gandiva.svg)](https://anaconda.org/conda-forge/libarrow-gandiva) | | [![Conda Recipe](https://img.shields.io/badge/recipe-libarrow--substrait-green.svg)](https://anaconda.org/conda-forge/libarrow-substrait) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/libarrow-substrait.svg)](https://anaconda.org/conda-forge/libarrow-substrait) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/libarrow-substrait.svg)](https://anaconda.org/conda-forge/libarrow-substrait) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/libarrow-substrait.svg)](https://anaconda.org/conda-forge/libarrow-substrait) | | [![Conda Recipe](https://img.shields.io/badge/recipe-libparquet-green.svg)](https://anaconda.org/conda-forge/libparquet) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/libparquet.svg)](https://anaconda.org/conda-forge/libparquet) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/libparquet.svg)](https://anaconda.org/conda-forge/libparquet) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/libparquet.svg)](https://anaconda.org/conda-forge/libparquet) | +| [![Conda Recipe](https://img.shields.io/badge/recipe-pyarrow-green.svg)](https://anaconda.org/conda-forge/pyarrow) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/pyarrow.svg)](https://anaconda.org/conda-forge/pyarrow) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/pyarrow.svg)](https://anaconda.org/conda-forge/pyarrow) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/pyarrow.svg)](https://anaconda.org/conda-forge/pyarrow) | +| [![Conda Recipe](https://img.shields.io/badge/recipe-pyarrow--all-green.svg)](https://anaconda.org/conda-forge/pyarrow-all) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/pyarrow-all.svg)](https://anaconda.org/conda-forge/pyarrow-all) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/pyarrow-all.svg)](https://anaconda.org/conda-forge/pyarrow-all) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/pyarrow-all.svg)](https://anaconda.org/conda-forge/pyarrow-all) | +| [![Conda Recipe](https://img.shields.io/badge/recipe-pyarrow--core-green.svg)](https://anaconda.org/conda-forge/pyarrow-core) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/pyarrow-core.svg)](https://anaconda.org/conda-forge/pyarrow-core) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/pyarrow-core.svg)](https://anaconda.org/conda-forge/pyarrow-core) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/pyarrow-core.svg)](https://anaconda.org/conda-forge/pyarrow-core) | +| [![Conda Recipe](https://img.shields.io/badge/recipe-pyarrow--tests-green.svg)](https://anaconda.org/conda-forge/pyarrow-tests) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/pyarrow-tests.svg)](https://anaconda.org/conda-forge/pyarrow-tests) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/pyarrow-tests.svg)](https://anaconda.org/conda-forge/pyarrow-tests) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/pyarrow-tests.svg)](https://anaconda.org/conda-forge/pyarrow-tests) | Installing arrow-cpp ==================== @@ -215,16 +199,16 @@ conda config --add channels conda-forge conda config --set channel_priority strict ``` -Once the `conda-forge` channel has been enabled, `apache-arrow-proc, libarrow, libarrow-acero, libarrow-all, libarrow-dataset, libarrow-flight, libarrow-flight-sql, libarrow-gandiva, libarrow-substrait, libparquet` can be installed with `conda`: +Once the `conda-forge` channel has been enabled, `apache-arrow-proc, libarrow, libarrow-acero, libarrow-all, libarrow-dataset, libarrow-flight, libarrow-flight-sql, libarrow-gandiva, libarrow-substrait, libparquet, pyarrow, pyarrow-all, pyarrow-core, pyarrow-tests` can be installed with `conda`: ``` -conda install apache-arrow-proc libarrow libarrow-acero libarrow-all libarrow-dataset libarrow-flight libarrow-flight-sql libarrow-gandiva libarrow-substrait libparquet +conda install apache-arrow-proc libarrow libarrow-acero libarrow-all libarrow-dataset libarrow-flight libarrow-flight-sql libarrow-gandiva libarrow-substrait libparquet pyarrow pyarrow-all pyarrow-core pyarrow-tests ``` or with `mamba`: ``` -mamba install apache-arrow-proc libarrow libarrow-acero libarrow-all libarrow-dataset libarrow-flight libarrow-flight-sql libarrow-gandiva libarrow-substrait libparquet +mamba install apache-arrow-proc libarrow libarrow-acero libarrow-all libarrow-dataset libarrow-flight libarrow-flight-sql libarrow-gandiva libarrow-substrait libparquet pyarrow pyarrow-all pyarrow-core pyarrow-tests ``` It is possible to list all of the versions of `apache-arrow-proc` available on your platform with `conda`: diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 32da3d0c0..d54fe696a 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -28,6 +28,4 @@ stages: condition: and(succeeded(), eq(dependencies.Check.outputs['Skip.result.start_main'], 'true')) dependsOn: Check jobs: - - template: ./.azure-pipelines/azure-pipelines-linux.yml - - template: ./.azure-pipelines/azure-pipelines-osx.yml - template: ./.azure-pipelines/azure-pipelines-win.yml \ No newline at end of file diff --git a/conda-forge.yml b/conda-forge.yml index de4bf9480..401e8f66b 100644 --- a/conda-forge.yml +++ b/conda-forge.yml @@ -1,10 +1,11 @@ azure: free_disk_space: true - max_parallel: 20 + settings_linux: + swapfile_size: 8GiB settings_win: variables: - CONDA_BLD_PATH: C:\\bld\\ - MINIFORGE_HOME: C:\\Miniforge + CONDA_BLD_PATH: C:\bld\ + MINIFORGE_HOME: C:\miniforge\ bot: abi_migration_branches: - 17.x @@ -17,7 +18,9 @@ build_platform: osx_arm64: osx_64 conda_build: pkg_format: '2' -conda_forge_output_validation: true +# debug only! +conda_forge_output_validation: false +conda_install_tool: micromamba github: branch_name: main tooling_branch_name: main diff --git a/recipe/build-pyarrow.bat b/recipe/build-pyarrow.bat new file mode 100644 index 000000000..6218eebb9 --- /dev/null +++ b/recipe/build-pyarrow.bat @@ -0,0 +1,40 @@ +@echo on + +pushd "%SRC_DIR%"\python + +SET ARROW_HOME=%LIBRARY_PREFIX% +SET SETUPTOOLS_SCM_PRETEND_VERSION=%PKG_VERSION% +SET PYARROW_BUILD_TYPE=release +SET PYARROW_WITH_ACERO=1 +SET PYARROW_WITH_DATASET=1 +SET PYARROW_WITH_FLIGHT=1 +SET PYARROW_WITH_GANDIVA=1 +SET PYARROW_WITH_GCS=1 +SET PYARROW_WITH_HDFS=1 +SET PYARROW_WITH_ORC=1 +SET PYARROW_WITH_PARQUET=1 +SET PYARROW_WITH_PARQUET_ENCRYPTION=1 +SET PYARROW_WITH_S3=1 +SET PYARROW_WITH_SUBSTRAIT=1 +SET PYARROW_CMAKE_GENERATOR=Ninja + +:: Enable CUDA support +if "%cuda_compiler_version%"=="None" ( + set "PYARROW_WITH_CUDA=0" +) else ( + set "PYARROW_WITH_CUDA=1" +) + +%PYTHON% setup.py ^ + build_ext ^ + install --single-version-externally-managed ^ + --record=record.txt +if %ERRORLEVEL% neq 0 exit 1 +popd + +if [%PKG_NAME%] NEQ [pyarrow-tests] ( + rd /s /q %SP_DIR%\pyarrow\tests +) + +:: generated by setup.py +rmdir .\python\build /s /q diff --git a/recipe/build-pyarrow.sh b/recipe/build-pyarrow.sh new file mode 100644 index 000000000..8a449b63c --- /dev/null +++ b/recipe/build-pyarrow.sh @@ -0,0 +1,64 @@ +#!/bin/sh +set -ex + +# Build dependencies +export ARROW_HOME=$PREFIX +export PARQUET_HOME=$PREFIX +export SETUPTOOLS_SCM_PRETEND_VERSION=$PKG_VERSION +export PYARROW_BUILD_TYPE=release +export PYARROW_WITH_ACERO=1 +export PYARROW_WITH_AZURE=1 +export PYARROW_WITH_DATASET=1 +export PYARROW_WITH_FLIGHT=1 +export PYARROW_WITH_GANDIVA=1 +export PYARROW_WITH_GCS=1 +export PYARROW_WITH_HDFS=1 +export PYARROW_WITH_ORC=1 +export PYARROW_WITH_PARQUET=1 +export PYARROW_WITH_PARQUET_ENCRYPTION=1 +export PYARROW_WITH_S3=1 +export PYARROW_WITH_SUBSTRAIT=1 +export PYARROW_CMAKE_GENERATOR=Ninja +export PYARROW_CMAKE_OPTIONS="-DARROW_SIMD_LEVEL=NONE" +BUILD_EXT_FLAGS="" + +# Enable CUDA support +if [[ ! -z "${cuda_compiler_version+x}" && "${cuda_compiler_version}" != "None" ]]; then + export PYARROW_WITH_CUDA=1 + if [[ "${build_platform}" != "${target_platform}" ]]; then + export CUDAToolkit_ROOT=${CUDA_HOME} + export CMAKE_LIBRARY_PATH=${CONDA_BUILD_SYSROOT}/lib + fi +else + export PYARROW_WITH_CUDA=0 +fi + +# Resolve: Make Error at cmake_modules/SetupCxxFlags.cmake:338 (message): Unsupported arch flag: -march=. +if [[ "${target_platform}" == "linux-aarch64" ]]; then + export PYARROW_CMAKE_OPTIONS="-DARROW_ARMV8_ARCH=armv8-a ${PYARROW_CMAKE_OPTIONS}" +fi + +if [[ "${target_platform}" == osx-* ]]; then + # See https://conda-forge.org/docs/maintainer/knowledge_base.html#newer-c-features-with-old-sdk + CXXFLAGS="${CXXFLAGS} -D_LIBCPP_DISABLE_AVAILABILITY" +fi + +if [[ "${target_platform}" == "linux-aarch64" ]] || [[ "${target_platform}" == "linux-ppc64le" ]]; then + # Limit number of threads used to avoid hardware oversubscription + export CMAKE_BUILD_PARALLEL_LEVEL=4 +fi + +cd python + +$PYTHON setup.py \ + build_ext \ + install --single-version-externally-managed \ + --record=record.txt + +if [[ "$PKG_NAME" != "pyarrow-tests" ]]; then + rm -r ${SP_DIR}/pyarrow/tests +fi + +# generated by setup.py +rm -rf build +cd .. diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 29d115aa9..16f73d06f 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -13,7 +13,7 @@ package: version: {{ version }} source: - - url: https://www.apache.org/dyn/closer.lua/arrow/arrow-{{ version }}/apache-arrow-{{ version }}.tar.gz?action=download + - url: https://www.apache.org/dyn/closer.lua/arrow/arrow-{{ version }}/apache-arrow-{{ version }}.tar.gz?action=download fn: apache-arrow-{{ version }}.tar.gz sha256: abcf1934cd0cdddd33664e9f2d9a251d6c55239d1122ad0ed223b13a583c82a9 patches: @@ -21,16 +21,20 @@ source: - patches/0001-GH-44455-C-Update-vendored-date-to-3.0.3.patch # backport https://github.com/apache/arrow/pull/44507 - patches/0002-GH-44448-C-Add-support-for-overriding-grpc_cpp_plugi.patch - # testing-submodule not part of release tarball + # testing-submodules not part of release tarball - git_url: https://github.com/apache/arrow-testing.git git_rev: 4d209492d514c2d3cb2d392681b9aa00e6d8da1c folder: testing + - git_url: https://github.com/apache/parquet-testing.git + git_rev: cb7a9674142c137367bf75a01b79c6e214a73199 + folder: cpp/submodules/parquet-testing build: number: 0 # for cuda support, building with one version is enough to be compatible with # all later versions, since arrow is only using libcuda, and not libcudart. skip: true # [cuda_compiler_version not in ("None", cuda_compiler_version_min)] + skip: true # [unix] requirements: build: @@ -631,6 +635,288 @@ outputs: - LICENSE.txt summary: C++ libraries for Apache Parquet + - name: pyarrow-core + script: build-pyarrow.sh # [unix] + script: build-pyarrow.bat # [win] + version: {{ version }} + build: + string: py{{ CONDA_PY }}h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }} + ignore_run_exports_from: + - {{ compiler("cuda") }} # [cuda_compiler_version != "None"] + rpaths: + - lib/ + - {{ SP_DIR }}/pyarrow + missing_dso_whitelist: + # not actually missing, but installed into SP_DIR, see tests + - '*/arrow_python.dll' # [win] + - '*/arrow_python_flight.dll' # [win] + # pyarrow-core builds with the capabilities but we do not ship them + # to provide the smaller core functionality. + - 'lib/libarrow_acero.*' # [unix] + - 'lib/libarrow_dataset.*' # [unix] + - 'lib/libarrow_substrait.*' # [unix] + - 'lib/libarrow_flight.*' # [unix] + - 'lib/libparquet.*' # [unix] + - 'lib/libgandiva.*' # [unix] + - 'Library/lib/arrow_acero.dll' # [win] + - 'Library/lib/arrow_dataset.dll' # [win] + - 'Library/lib/arrow_substrait.dll' # [win] + - 'Library/lib/arrow_flight.dll' # [win] + - 'Library/lib/parquet.dll' # [win] + - 'Library/lib/gandiva.dll' # [win] + requirements: + build: + - {{ compiler("c") }} + - {{ stdlib("c") }} + - {{ compiler("cxx") }} + # pyarrow does not require nvcc but it needs to link against libraries in libarrow=*=*cuda + - {{ compiler("cuda") }} # [cuda_compiler_version != "None"] + - python # [build_platform != target_platform] + - cross-python_{{ target_platform }} # [build_platform != target_platform] + - cython # [build_platform != target_platform] + - numpy # [build_platform != target_platform] + - cmake + - ninja + host: + # We add all libarrow package dependencies on host in order + # to build pyarrow once with all capabilities. + - {{ pin_subpackage("libarrow-all", exact=True) }} + - clangdev {{ llvm_version }} + - llvmdev {{ llvm_version }} + - zlib + - cython + - numpy + - python + - setuptools + - setuptools-scm + run: + # We ignore the run-exports from libarrow-all and restrict to only + # libarrow, as we don't want the other libraries to be installed when + # running for pyarrow-core, where the aim is a low storage footprint. + - {{ pin_subpackage("libarrow", exact=True) }} + - python + # this is redundant with libarrow, but we want smithy to pick up that + # cuda_compiler_version_min is present, to populate the CI configs + - __cuda >={{ cuda_compiler_version_min }} # [cuda_compiler_version != "None"] + run_constrained: + - apache-arrow-proc =*={{ build_ext }} + + test: + imports: + - pyarrow + # Compute can be imported but the underlying libarrow_acero is not present. + - pyarrow.compute + - pyarrow.orc + - pyarrow.fs + - pyarrow._s3fs + - pyarrow._hdfs + # We can only test importing cuda package but cannot run when a + # CUDA device is not available, for instance, when building from CI. + # On Windows, we cannot even do that due to `nvcuda.dll` not being found, see + # https://conda-forge.org/docs/maintainer/knowledge_base.html#nvcuda-dll-cannot-be-found-on-windows + # However, we check below for (at least) the presence of a correctly-compiled module + - pyarrow.cuda # [cuda_compiler_version != "None" and not win] + commands: + # libraries that depend on python (and hence aren't in libarrow itself) + - test -f ${SP_DIR}/pyarrow/libarrow_python.so # [linux] + - test -f ${SP_DIR}/pyarrow/libarrow_python_flight.so # [linux] + - test -f ${SP_DIR}/pyarrow/libarrow_python_parquet_encryption.so # [linux] + - test -f ${SP_DIR}/pyarrow/libarrow_python.dylib # [osx] + - test -f ${SP_DIR}/pyarrow/libarrow_python_flight.dylib # [osx] + - test -f ${SP_DIR}/pyarrow/libarrow_python_parquet_encryption.dylib # [osx] + - if not exist %SP_DIR%\pyarrow\arrow_python.dll exit 1 # [win] + - if not exist %SP_DIR%\pyarrow\arrow_python_flight.dll exit 1 # [win] + - if not exist %SP_DIR%\pyarrow\arrow_python_parquet_encryption.dll exit 1 # [win] + + - test -f ${SP_DIR}/pyarrow/include/arrow/python/pyarrow.h # [unix] + - if not exist %SP_DIR%\pyarrow\include\arrow\python\pyarrow.h exit 1 # [win] + + - test ! -f ${SP_DIR}/pyarrow/tests/test_array.py # [unix] + - if exist %SP_DIR%/pyarrow/tests/test_array.py exit 1 # [win] + # Need to remove dot from PY_VER; %MYVAR:x=y% replaces "x" in %MYVAR% with "y" + - if not exist %SP_DIR%/pyarrow/_cuda.cp%PY_VER:.=%-win_amd64.pyd exit 1 # [win and cuda_compiler_version != "None"] + + # Expected not included libraries + - test ! -f $PREFIX/lib/libarrow_acero${SHLIB_EXT} # [unix] + - test ! -f $PREFIX/lib/libarrow_dataset${SHLIB_EXT} # [unix] + - test ! -f $PREFIX/lib/libarrow_flight${SHLIB_EXT} # [unix] + - test ! -f $PREFIX/lib/libgandiva${SHLIB_EXT} # [unix] + - test ! -f $PREFIX/lib/libparquet${SHLIB_EXT} # [unix] + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: Python libraries for Apache Arrow Core + + - name: pyarrow + version: {{ version }} + requirements: + host: + # only necessary for run-exports + - python + - numpy + run: + # Default doesn't contain flight, flight-sql and gandiva + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libarrow-dataset", exact=True) }} + - {{ pin_subpackage("libarrow-substrait", exact=True) }} + - {{ pin_subpackage("libparquet", exact=True) }} + # do not use pin_compatible because pyarrow-core has CUDA/non-CUDA variants + - pyarrow-core {{ version }}=*_{{ PKG_BUILDNUM }}_* + - python + + test: + files: + - test_read_parquet.py + imports: + # default pyarrow contains parquet + - pyarrow.dataset + - pyarrow.parquet + commands: + # Expected not included libraries + - test ! -f $PREFIX/lib/libarrow_flight${SHLIB_EXT} # [unix] + - test ! -f $PREFIX/lib/libgandiva${SHLIB_EXT} # [unix] + + - python test_read_parquet.py + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: Python libraries for Apache Arrow with default capabilities + + - name: pyarrow-all + version: {{ version }} + requirements: + host: + # only necessary for run-exports + - python + - numpy + run: + - {{ pin_subpackage("libarrow-flight", exact=True) }} + - {{ pin_subpackage("libarrow-flight-sql", exact=True) }} + - {{ pin_subpackage("libarrow-gandiva", exact=True) }} + - pyarrow {{ version }}=*_{{ PKG_BUILDNUM }} + - python + + test: + imports: + - pyarrow.flight + - pyarrow.gandiva + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: Python libraries for Apache Arrow with all capabilities + + - name: pyarrow-tests + script: build-pyarrow.sh # [unix] + script: build-pyarrow.bat # [win] + version: {{ version }} + build: + skip: true # [cuda_compiler_version != "None"] + requirements: + build: + - {{ compiler("c") }} + - {{ stdlib("c") }} + - {{ compiler("cxx") }} + - python # [build_platform != target_platform] + - cross-python_{{ target_platform }} # [build_platform != target_platform] + - cython # [build_platform != target_platform] + - numpy # [build_platform != target_platform] + - cmake + - ninja + host: + - {{ pin_subpackage("libarrow-all", exact=True) }} + - pyarrow-all {{ version }}=*_{{ PKG_BUILDNUM }} + - clangdev {{ llvm_version }} + - llvmdev {{ llvm_version }} + - zlib + - cython + - numpy + - python + - setuptools + - setuptools-scm + run: + - pyarrow-all {{ version }}=*_{{ PKG_BUILDNUM }} + - python + + {% if not (aarch64 or ppc64le) or py == 311 %} + test: + requires: + # test_cpp_extension_in_python requires a compiler + - {{ compiler("cxx") }} # [linux] + - pytest + - boto3 + - cffi + - cloudpickle + - cython + - fastparquet # [py<313] + - fsspec + - hypothesis + - minio-server + - pandas + - s3fs >=2023 + - scipy + - sparse # [py<313] + # these are generally (far) behind on migrating abseil/grpc/protobuf, + # and using them as test dependencies blocks the migrator unnecessarily + # - pytorch + # - tensorflow + # we're not building java bindings + # - jpype1 + # doesn't get picked up correctly + # - libhdfs3 + source_files: + - cpp/submodules/parquet-testing/data + - testing/data + commands: + - cd ${SP_DIR} # [unix] + - cd %SP_DIR% # [win] + - export ARROW_TEST_DATA="${SRC_DIR}/testing/data" # [unix] + - set "ARROW_TEST_DATA=%SRC_DIR%\testing\data" # [win] + - export PARQUET_TEST_DATA="${SRC_DIR}/cpp/submodules/parquet-testing/data" # [unix] + - set "PARQUET_TEST_DATA=%SRC_DIR%\cpp\submodules\parquet-testing\data" # [win] + + {% set tests_to_skip = "_not_a_real_test" %} + # we do not have GPUs in CI --> cannot test cuda + {% set tests_to_skip = tests_to_skip + " or test_cuda" + " or test_dlpack_cuda_not_supported"%} + # skip tests that raise SIGINT and crash the test suite + {% set tests_to_skip = tests_to_skip + " or (test_csv and test_cancellation)" %} # [linux] + {% set tests_to_skip = tests_to_skip + " or (test_flight and test_interrupt)" %} # [linux] + # skip tests that make invalid(-for-conda) assumptions about the compilers setup + {% set tests_to_skip = tests_to_skip + " or test_cython_api" %} # [unix] + {% set tests_to_skip = tests_to_skip + " or test_visit_strings" %} # [unix] + # skip tests that cannot succeed in emulation + {% set tests_to_skip = tests_to_skip + " or test_debug_memory_pool_disabled" %} # [aarch64 or ppc64le] + {% set tests_to_skip = tests_to_skip + " or test_env_var_io_thread_count" %} # [aarch64 or ppc64le] + # vvvvvvv TESTS THAT SHOULDN'T HAVE TO BE SKIPPED vvvvvvv + # https://github.com/apache/arrow/issues/43800 + {% set tests_to_skip = tests_to_skip + " or test_cpp_extension_in_python" %} # [osx] + # https://github.com/apache/arrow/issues/43356 + {% set tests_to_skip = tests_to_skip + " or (test_compute and test_assume_timezone)" %} # [aarch64 or ppc64le] + {% set tests_to_skip = tests_to_skip + " or (test_compute and test_strftime)" %} # [aarch64 or ppc64le] + {% set tests_to_skip = tests_to_skip + " or (test_compute and test_round_temporal)" %} # [aarch64 or ppc64le] + {% set tests_to_skip = tests_to_skip + " or test_extract_datetime_components " %} # [aarch64 or ppc64le] + # flaky test that fails regularly on aarch + {% set tests_to_skip = tests_to_skip + " or test_feather_format[serial]" %} # [aarch64] + # gandiva tests are segfaulting on ppc + {% set tests_to_skip = tests_to_skip + " or test_gandiva" %} # [ppc64le] + # ^^^^^^^ TESTS THAT SHOULDN'T HAVE TO BE SKIPPED ^^^^^^^ + - pytest pyarrow/ -rfEs -k "not ({{ tests_to_skip }})" + {% endif %} + + about: + home: http://github.com/apache/arrow + license: Apache-2.0 + license_file: + - LICENSE.txt + summary: Python test files for Apache Arrow + about: home: http://github.com/apache/arrow license: Apache-2.0 diff --git a/recipe/test_read_parquet.py b/recipe/test_read_parquet.py new file mode 100644 index 000000000..5f76a4e22 --- /dev/null +++ b/recipe/test_read_parquet.py @@ -0,0 +1,5 @@ +import pyarrow as pa +import pyarrow.parquet as pq + +table = pa.Table.from_pydict({"a": [1, 2]}) +pq.write_table(table, "test.parquet")