-
-
Notifications
You must be signed in to change notification settings - Fork 13.7k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Connor Baker
committed
Feb 26, 2023
1 parent
a0c8931
commit 07f8db5
Showing
3 changed files
with
178 additions
and
0 deletions.
There are no files selected for viewing
171 changes: 171 additions & 0 deletions
171
pkgs/development/libraries/science/math/tiny-cuda-nn/default.nix
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,171 @@ | ||
{ | ||
stdenv, | ||
cmake, | ||
fetchFromGitHub, | ||
lib, | ||
ninja, | ||
symlinkJoin, | ||
which, | ||
pythonPackages, | ||
pythonSupport ? false, | ||
}: | ||
assert lib.asserts.assertMsg | ||
(pythonPackages.torch.cudaSupport) | ||
"tiny-cuda-nn requires torch to be built with cudaSupport"; let | ||
inherit (lib) lists strings; | ||
inherit (pythonPackages.torch) cudaPackages cudaSupport; | ||
inherit (cudaPackages) cudatoolkit cudaFlags; | ||
|
||
cuda-redist = symlinkJoin { | ||
name = "cuda-redist"; | ||
paths = with cudaPackages; [ | ||
cuda_cudart | ||
cuda_nvcc | ||
libcublas | ||
libcusparse | ||
libcusolver | ||
]; | ||
}; | ||
|
||
dotlessSemicolonCudaCapabilities = let | ||
dropPTX = builtins.filter (c: ! strings.hasSuffix "PTX" c) cudaFlags.cudaCapabilities; | ||
dropDot = lists.map (strings.replaceStrings ["."] [""]) dropPTX; | ||
in | ||
strings.concatStringsSep ";" (lists.unique dropDot); | ||
|
||
flags = [ | ||
"TCNN_BUILD_BENCHMARK=OFF" | ||
"TCNN_BUILD_EXAMPLES=OFF" | ||
"TCNN_ALLOW_CUBLAS_CUSOLVER=ON" | ||
"TCNN_CUDA_ARCHITECTURES=${dotlessSemicolonCudaCapabilities}" | ||
]; | ||
in | ||
stdenv.mkDerivation (finalAttrs: { | ||
name = "tiny-cuda-nn"; | ||
version = "1.6"; | ||
|
||
format = strings.optionalString pythonSupport "setuptools"; | ||
|
||
src = fetchFromGitHub { | ||
owner = "NVlabs"; | ||
repo = finalAttrs.name; | ||
rev = "v${finalAttrs.version}"; | ||
fetchSubmodules = true; | ||
hash = "sha256-qW6Fk2GB71fvZSsfu+mykabSxEKvaikZ/pQQZUycOy0="; | ||
}; | ||
|
||
nativeBuildInputs = | ||
[ | ||
which | ||
cmake | ||
ninja | ||
] | ||
++ lists.optionals pythonSupport (with pythonPackages; [ | ||
pip | ||
wheel | ||
setuptools | ||
]); | ||
|
||
buildInputs = | ||
[ | ||
cuda-redist | ||
] | ||
++ lib.optionals pythonSupport ( | ||
with pythonPackages; [ | ||
python | ||
pybind11 | ||
] | ||
); | ||
|
||
# TODO: OpenCV, whicih also provides python bindings and supports CUDA, adds the cuda | ||
# redistributable to propagatedBuildInputs. Should we do this here as well? | ||
propagatedBuildInputs = lib.optionals pythonSupport ( | ||
with pythonPackages; [ | ||
torch | ||
] | ||
); | ||
|
||
# NOTE: We cannot use pythonImportsCheck for this module because it uses torch to immediately | ||
# initailize CUDA. We cannot assume that at the time we run the check phase, the user has an | ||
# NVIDIA GPU available. | ||
# There are no tests for the C++ library or the python bindings, so we just skip the check | ||
# phase. | ||
doCheck = false; | ||
|
||
preConfigure = '' | ||
export CUDA_HOME=${cuda-redist} | ||
export LIBRARY_PATH=${cuda-redist}/lib/stubs:$LIBRARY_PATH | ||
export CC=${cudatoolkit.cc}/bin/cc | ||
export CXX=${cudatoolkit.cc}/bin/c++ | ||
export CUDAHOSTCXX=${cudatoolkit.cc}/bin/c++ | ||
''; | ||
|
||
# When building the python bindings, we cannot re-use the artifacts from the C++ build so we | ||
# skip the CMake confurePhase and the buildPhase. | ||
dontUseCmakeConfigure = pythonSupport; | ||
cmakeFlags = lists.map (x: "-D${x}") flags; | ||
|
||
# The configurePhase usually puts you in the build directory, so for the python bindings we | ||
# need to change directories to the source directory. | ||
configurePhase = strings.optionalString pythonSupport '' | ||
runHook preConfigure | ||
mkdir -p $NIX_BUILD_TOP/build | ||
cd $NIX_BUILD_TOP/build | ||
${strings.concatStringsSep "\n" (lists.map (x: "export ${x}") flags)} | ||
runHook postConfigure | ||
''; | ||
|
||
buildPhase = strings.optionalString pythonSupport '' | ||
runHook preBuild | ||
python -m pip wheel \ | ||
--no-build-isolation \ | ||
--no-clean \ | ||
--no-deps \ | ||
--no-index \ | ||
--verbose \ | ||
--wheel-dir $NIX_BUILD_TOP/build \ | ||
$NIX_BUILD_TOP/source/bindings/torch | ||
runHook postBuild | ||
''; | ||
|
||
installPhase = | ||
'' | ||
runHook preInstall | ||
mkdir -p $out/lib | ||
'' | ||
# Installing the C++ library just requires copying the static library to the output directory | ||
+ strings.optionalString (!pythonSupport) '' | ||
cp libtiny-cuda-nn.a $out/lib/ | ||
'' | ||
# Installing the python bindings requires building the wheel and installing it | ||
+ strings.optionalString pythonSupport '' | ||
python -m pip install \ | ||
--no-build-isolation \ | ||
--no-cache-dir \ | ||
--no-deps \ | ||
--no-index \ | ||
--no-warn-script-location \ | ||
--prefix="$out" \ | ||
--verbose \ | ||
./*.whl | ||
'' | ||
+ '' | ||
runHook postInstall | ||
''; | ||
# the cv2/__init__.py just tries to check provide "nice user feedback" if the installation is bad | ||
# however, this also causes infinite recursion when used by other packages | ||
# rm -r $out/${pythonPackages.python.sitePackages}/cv2 | ||
|
||
passthru = { | ||
inherit cudaPackages; | ||
}; | ||
|
||
meta = with lib; { | ||
description = "Lightning fast C++/CUDA neural network framework"; | ||
homepage = "https://github.com/NVlabs/tiny-cuda-nn"; | ||
license = licenses.bsd3; | ||
maintainers = with maintainers; []; | ||
platforms = platforms.linux; | ||
broken = !cudaSupport; | ||
}; | ||
}) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters