diff options
author | Alexander Tsvyashchenko <ndl@endl.ch> | 2021-12-28 01:19:10 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2021-12-27 16:19:10 -0800 |
commit | be5272250926e352427b3c62c6066a95c6592375 (patch) | |
tree | 826d9be930dc2c701209d84eb6abbda59cff853c /pkgs/development/python-modules/jaxlib/bin.nix | |
parent | 8efd318b108e44673cfcb0643ddd1fd224e25dc1 (diff) | |
download | nixpkgs-be5272250926e352427b3c62c6066a95c6592375.tar nixpkgs-be5272250926e352427b3c62c6066a95c6592375.tar.gz nixpkgs-be5272250926e352427b3c62c6066a95c6592375.tar.bz2 nixpkgs-be5272250926e352427b3c62c6066a95c6592375.tar.lz nixpkgs-be5272250926e352427b3c62c6066a95c6592375.tar.xz nixpkgs-be5272250926e352427b3c62c6066a95c6592375.tar.zst nixpkgs-be5272250926e352427b3c62c6066a95c6592375.zip |
python3Packages.jaxlib: refactor to support Nix-based builds (#151909)
* python3Packages.jaxlib: rename to `jaxlib-bin` Refactoring `jaxlib` to have a similar structure to `tensorflow` with the 'bin' and 'build' options. * python3Packages.jaxlib: init the 'build' variant at 0.1.75 Similar to `tensorflow-build`, now there's an option to build `jaxlib` using Nix-provided environment and dependencies. * python3Packages.jax: 0.2.24 -> 0.2.26 * Addressed review comments. * Fixed `cudaSupport` missing property on some arches. * Unified the versions of CUDA-related packages with TF. Co-authored-by: Samuel Ainsworth <skainsworth@gmail.com>
Diffstat (limited to 'pkgs/development/python-modules/jaxlib/bin.nix')
-rw-r--r-- | pkgs/development/python-modules/jaxlib/bin.nix | 90 |
1 files changed, 90 insertions, 0 deletions
diff --git a/pkgs/development/python-modules/jaxlib/bin.nix b/pkgs/development/python-modules/jaxlib/bin.nix new file mode 100644 index 00000000000..f597eeacfce --- /dev/null +++ b/pkgs/development/python-modules/jaxlib/bin.nix @@ -0,0 +1,90 @@ +# For the moment we only support the CPU and GPU backends of jaxlib. The TPU +# backend will require some additional work. Those wheels are located here: +# https://storage.googleapis.com/jax-releases/libtpu_releases.html. + +# For future reference, the easiest way to test the GPU backend is to run +# NIX_PATH=.. nix-shell -p python3 python3Packages.jax "python3Packages.jaxlib.override { cudaSupport = true; }" +# export XLA_FLAGS=--xla_gpu_force_compilation_parallelism=1 +# python -c "from jax.lib import xla_bridge; assert xla_bridge.get_backend().platform == 'gpu'" +# python -c "from jax import random; random.PRNGKey(0)" +# python -c "from jax import random; x = random.normal(random.PRNGKey(0), (100, 100)); x @ x" +# There's no convenient way to test the GPU backend in the derivation since the +# nix build environment blocks access to the GPU. See also: +# * https://github.com/google/jax/issues/971#issuecomment-508216439 +# * https://github.com/google/jax/issues/5723#issuecomment-913038780 + +{ addOpenGLRunpath, autoPatchelfHook, buildPythonPackage, config +, fetchurl, isPy39, lib, stdenv +# propagatedBuildInputs +, absl-py, flatbuffers, scipy, cudatoolkit_11 +# Options: +, cudaSupport ? config.cudaSupport or false +}: + +assert cudaSupport -> lib.versionAtLeast cudatoolkit_11.version "11.1"; + +let + device = if cudaSupport then "gpu" else "cpu"; +in +buildPythonPackage rec { + pname = "jaxlib"; + version = "0.1.71"; + format = "wheel"; + + # At the time of writing (8/19/21), there are releases for 3.7-3.9. Supporting + # all of them is a pain, so we focus on 3.9, the current nixpkgs python3 + # version. + disabled = !isPy39; + + src = { + cpu = fetchurl { + url = "https://storage.googleapis.com/jax-releases/nocuda/jaxlib-${version}-cp39-none-manylinux2010_x86_64.whl"; + sha256 = "sha256:0rqhs6qabydizlv5d3rb20dbv6612rr7dqfniy9r6h4kazdinsn6"; + }; + gpu = fetchurl { + url = "https://storage.googleapis.com/jax-releases/cuda111/jaxlib-${version}+cuda111-cp39-none-manylinux2010_x86_64.whl"; + sha256 = "sha256:065kyzjsk9m84d138p99iymdiiicm1qz8a3iwxz8rspl43rwrw89"; + }; + }.${device}; + + # Prebuilt wheels are dynamically linked against things that nix can't find. + # Run `autoPatchelfHook` to automagically fix them. + nativeBuildInputs = [ autoPatchelfHook ] ++ lib.optional cudaSupport addOpenGLRunpath; + # Dynamic link dependencies + buildInputs = [ stdenv.cc.cc ]; + + # jaxlib contains shared libraries that open other shared libraries via dlopen + # and these implicit dependencies are not recognized by ldd or + # autoPatchelfHook. That means we need to sneak them into rpath. This step + # must be done after autoPatchelfHook and the automatic stripping of + # artifacts. autoPatchelfHook runs in postFixup and auto-stripping runs in the + # patchPhase. Dependencies: + # * libcudart.so.11.0 -> cudatoolkit_11.lib + # * libcublas.so.11 -> cudatoolkit_11 + # * libcuda.so.1 -> opengl driver in /run/opengl-driver/lib + preInstallCheck = lib.optional cudaSupport '' + shopt -s globstar + + addOpenGLRunpath $out/**/*.so + + for file in $out/**/*.so; do + rpath=$(patchelf --print-rpath $file) + # For some reason `makeLibraryPath` on `cudatoolkit_11` maps to + # <cudatoolkit_11.lib>/lib which is different from <cudatoolkit_11>/lib. + patchelf --set-rpath "$rpath:${cudatoolkit_11}/lib:${lib.makeLibraryPath [ cudatoolkit_11.lib ]}" $file + done + ''; + + # pip dependencies and optionally cudatoolkit. + propagatedBuildInputs = [ absl-py flatbuffers scipy ] ++ lib.optional cudaSupport cudatoolkit_11; + + pythonImportsCheck = [ "jaxlib" ]; + + meta = with lib; { + description = "XLA library for JAX"; + homepage = "https://github.com/google/jax"; + license = licenses.asl20; + maintainers = with maintainers; [ samuela ]; + platforms = [ "x86_64-linux" ]; + }; +} |