mirror of
https://github.com/NixOS/nixpkgs.git
synced 2024-11-17 15:22:59 +01:00
python311Packages.torch: choose magma at the expression level
...instead of at the callPackage site. Addresses https://github.com/NixOS/nixpkgs/issues/268736
This commit is contained in:
parent
98405dd2bd
commit
395b7cc35b
4 changed files with 26 additions and 21 deletions
|
@ -589,6 +589,9 @@ The module update takes care of the new config syntax and the data itself (user
|
|||
|
||||
- `python3.pkgs.flitBuildHook` has been removed. Use `flit-core` and `format = "pyproject"` instead.
|
||||
|
||||
- Now `magma` defaults to `magma-hip` instead of `magma-cuda`. It also
|
||||
respects the `config.cudaSupport` and `config.rocmSupport` options.
|
||||
|
||||
- The `extend` function of `llvmPackages` has been removed due it coming from the `tools` attrset thus only extending the `tool` attrset. A possible replacement is to construct the set from `libraries` and `tools`, or patch nixpkgs.
|
||||
|
||||
- The `qemu-vm.nix` module now supports disabling overriding `fileSystems` with
|
||||
|
|
|
@ -8,12 +8,7 @@
|
|||
{ blas
|
||||
, cmake
|
||||
, cudaPackages
|
||||
# FIXME: cuda being unfree means ofborg won't eval "magma".
|
||||
# respecting config.cudaSupport -> false by default
|
||||
# -> ofborg eval -> throws "no GPU targets specified".
|
||||
# Probably should delete everything but "magma-cuda" and "magma-hip"
|
||||
# from all-packages.nix
|
||||
, cudaSupport ? true
|
||||
, cudaSupport ? config.cudaSupport
|
||||
, fetchurl
|
||||
, gfortran
|
||||
, cudaCapabilities ? cudaPackages.cudaFlags.cudaCapabilities
|
||||
|
@ -25,7 +20,9 @@
|
|||
, magmaRelease
|
||||
, ninja
|
||||
, config
|
||||
, rocmSupport ? config.rocmSupport
|
||||
# At least one back-end has to be enabled,
|
||||
# and we can't default to CUDA since it's unfree
|
||||
, rocmSupport ? !cudaSupport
|
||||
, static ? false
|
||||
, stdenv
|
||||
, symlinkJoin
|
||||
|
@ -133,6 +130,8 @@ stdenv.mkDerivation {
|
|||
|
||||
cmakeFlags = [
|
||||
"-DGPU_TARGET=${gpuTargetString}"
|
||||
(lib.cmakeBool "MAGMA_ENABLE_CUDA" cudaSupport)
|
||||
(lib.cmakeBool "MAGMA_ENABLE_HIP" rocmSupport)
|
||||
] ++ lists.optionals static [
|
||||
"-DBUILD_SHARED_LIBS=OFF"
|
||||
] ++ lists.optionals cudaSupport [
|
||||
|
@ -140,11 +139,9 @@ stdenv.mkDerivation {
|
|||
"-DMIN_ARCH=${minArch}" # Disarms magma's asserts
|
||||
"-DCMAKE_C_COMPILER=${backendStdenv.cc}/bin/cc"
|
||||
"-DCMAKE_CXX_COMPILER=${backendStdenv.cc}/bin/c++"
|
||||
"-DMAGMA_ENABLE_CUDA=ON"
|
||||
] ++ lists.optionals rocmSupport [
|
||||
"-DCMAKE_C_COMPILER=${rocmPackages.clr}/bin/hipcc"
|
||||
"-DCMAKE_CXX_COMPILER=${rocmPackages.clr}/bin/hipcc"
|
||||
"-DMAGMA_ENABLE_HIP=ON"
|
||||
];
|
||||
|
||||
buildFlags = [
|
||||
|
@ -155,7 +152,7 @@ stdenv.mkDerivation {
|
|||
doCheck = false;
|
||||
|
||||
passthru = {
|
||||
inherit cudaPackages cudaSupport;
|
||||
inherit cudaPackages cudaSupport rocmSupport gpuTargets;
|
||||
};
|
||||
|
||||
meta = with lib; {
|
||||
|
@ -164,7 +161,11 @@ stdenv.mkDerivation {
|
|||
homepage = "http://icl.cs.utk.edu/magma/index.html";
|
||||
platforms = platforms.unix;
|
||||
maintainers = with maintainers; [ connorbaker ];
|
||||
# CUDA and ROCm are mutually exclusive
|
||||
broken = cudaSupport && rocmSupport || cudaSupport && strings.versionOlder cudaVersion "9";
|
||||
|
||||
# Cf. https://bitbucket.org/icl/magma/src/fcfe5aa61c1a4c664b36a73ebabbdbab82765e9f/CMakeLists.txt#lines-20
|
||||
broken =
|
||||
!(cudaSupport || rocmSupport) # At least one back-end enabled
|
||||
|| (cudaSupport && rocmSupport) # Mutually exclusive
|
||||
|| (cudaSupport && strings.versionOlder cudaVersion "9");
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,5 +1,12 @@
|
|||
{ stdenv, lib, fetchFromGitHub, fetchpatch, buildPythonPackage, python,
|
||||
config, cudaSupport ? config.cudaSupport, cudaPackages, magma,
|
||||
config, cudaSupport ? config.cudaSupport, cudaPackages,
|
||||
effectiveMagma ?
|
||||
if cudaSupport then magma-cuda-static
|
||||
else if rocmSupport then magma-hip
|
||||
else magma,
|
||||
magma,
|
||||
magma-hip,
|
||||
magma-cuda-static,
|
||||
useSystemNccl ? true,
|
||||
MPISupport ? false, mpi,
|
||||
buildDocs ? false,
|
||||
|
@ -115,7 +122,7 @@ let
|
|||
"CUDA is not targeting Linux" = cudaSupport && !stdenv.isLinux;
|
||||
"Unsupported CUDA version" = cudaSupport && !(builtins.elem cudaPackages.cudaMajorVersion [ "11" "12" ]);
|
||||
"MPI cudatoolkit does not match cudaPackages.cudatoolkit" = MPISupport && cudaSupport && (mpi.cudatoolkit != cudaPackages.cudatoolkit);
|
||||
"Magma cudaPackages does not match cudaPackages" = cudaSupport && (magma.cudaPackages != cudaPackages);
|
||||
"Magma cudaPackages does not match cudaPackages" = cudaSupport && (effectiveMagma.cudaPackages != cudaPackages);
|
||||
};
|
||||
in buildPythonPackage rec {
|
||||
pname = "torch";
|
||||
|
@ -359,7 +366,7 @@ in buildPythonPackage rec {
|
|||
cuda_profiler_api.dev # <cuda_profiler_api.h>
|
||||
])
|
||||
++ lib.optionals rocmSupport [ rocmPackages.llvm.openmp ]
|
||||
++ lib.optionals (cudaSupport || rocmSupport) [ magma ]
|
||||
++ lib.optionals (cudaSupport || rocmSupport) [ effectiveMagma ]
|
||||
++ lib.optionals stdenv.isLinux [ numactl ]
|
||||
++ lib.optionals stdenv.isDarwin [ Accelerate CoreServices libobjc ];
|
||||
|
||||
|
|
|
@ -14121,10 +14121,6 @@ self: super: with self; {
|
|||
toposort = callPackage ../development/python-modules/toposort { };
|
||||
|
||||
torch = callPackage ../development/python-modules/torch {
|
||||
magma =
|
||||
if pkgs.config.cudaSupport
|
||||
then pkgs.magma-cuda-static
|
||||
else pkgs.magma;
|
||||
inherit (pkgs.darwin.apple_sdk.frameworks) Accelerate CoreServices;
|
||||
inherit (pkgs.darwin) libobjc;
|
||||
};
|
||||
|
@ -14134,7 +14130,6 @@ self: super: with self; {
|
|||
};
|
||||
|
||||
torchWithCuda = self.torch.override {
|
||||
magma = pkgs.magma-cuda-static;
|
||||
openai-triton = self.openai-triton-cuda;
|
||||
cudaSupport = true;
|
||||
rocmSupport = false;
|
||||
|
@ -14145,7 +14140,6 @@ self: super: with self; {
|
|||
};
|
||||
|
||||
torchWithRocm = self.torch.override {
|
||||
magma = pkgs.magma-hip;
|
||||
openai-triton = self.openai-triton-no-cuda;
|
||||
rocmSupport = true;
|
||||
cudaSupport = false;
|
||||
|
|
Loading…
Reference in a new issue