diff options
author | Martin Weinelt <hexa@darmstadt.ccc.de> | 2023-11-17 22:12:44 +0100 |
---|---|---|
committer | Martin Weinelt <hexa@darmstadt.ccc.de> | 2023-11-18 12:33:31 +0100 |
commit | 0aeec365caf1496ecdb86b63e916d7d508e3431e (patch) | |
tree | 5b9bcfdcd853ff3664e41bddc44e454365ad2d6e | |
parent | 19e27c3547b51e8705855879a4f55846c75ee5fb (diff) | |
download | nixpkgs-0aeec365caf1496ecdb86b63e916d7d508e3431e.tar nixpkgs-0aeec365caf1496ecdb86b63e916d7d508e3431e.tar.gz nixpkgs-0aeec365caf1496ecdb86b63e916d7d508e3431e.tar.bz2 nixpkgs-0aeec365caf1496ecdb86b63e916d7d508e3431e.tar.lz nixpkgs-0aeec365caf1496ecdb86b63e916d7d508e3431e.tar.xz nixpkgs-0aeec365caf1496ecdb86b63e916d7d508e3431e.tar.zst nixpkgs-0aeec365caf1496ecdb86b63e916d7d508e3431e.zip |
openai-whisper: 20230918 -> 20231117
https://github.com/openai/whisper/blob/v20231117/CHANGELOG.md Always propagate openai-triton, since whether or not it is free is decided by `nixpkgs.config.cudaSupport`. This allows us to drop the local `cudaSupport` flags in favor of the nixpkgs-wide one.
-rw-r--r-- | pkgs/development/python-modules/openai-whisper/default.nix | 47 | ||||
-rw-r--r-- | pkgs/top-level/python-packages.nix | 5 |
2 files changed, 20 insertions, 32 deletions
diff --git a/pkgs/development/python-modules/openai-whisper/default.nix b/pkgs/development/python-modules/openai-whisper/default.nix index 68f692e4c37..7983abd2e98 100644 --- a/pkgs/development/python-modules/openai-whisper/default.nix +++ b/pkgs/development/python-modules/openai-whisper/default.nix @@ -2,22 +2,23 @@ , fetchFromGitHub , buildPythonPackage , substituteAll -, cudaSupport ? false + +# build-system +, setuptools # runtime , ffmpeg-headless # propagates -, numpy -, torch -, torchWithCuda -, tqdm , more-itertools -, transformers , numba +, numpy , openai-triton , scipy , tiktoken +, torch +, tqdm +, transformers # tests , pytestCheckHook @@ -25,14 +26,14 @@ buildPythonPackage rec { pname = "whisper"; - version = "20230918"; - format = "setuptools"; + version = "20231117"; + pyproject = true; src = fetchFromGitHub { owner = "openai"; repo = pname; rev = "refs/tags/v${version}"; - hash = "sha256-wBAanFVEIIzTcoX40P9eI26UdEu0SC/xuife/zi2Xho="; + hash = "sha256-MJ1XjB/GuYUiECCuuHS0NWHvvs+ko0oTvLuDI7zLNiY="; }; patches = [ @@ -42,32 +43,22 @@ buildPythonPackage rec { }) ]; + nativeBuildInputs = [ + setuptools + ]; + propagatedBuildInputs = [ - numpy - tqdm more-itertools - transformers numba + numpy + openai-triton scipy tiktoken - ] ++ lib.optionals (!cudaSupport) [ torch - ] ++ lib.optionals (cudaSupport) [ - openai-triton - torchWithCuda + tqdm + transformers ]; - postPatch = '' - substituteInPlace requirements.txt \ - --replace "tiktoken==0.3.3" "tiktoken>=0.3.3" - '' - # openai-triton is only needed for CUDA support. - # triton needs CUDA to be build. - # -> by making it optional, we can build whisper without unfree packages enabled - + lib.optionalString (!cudaSupport) '' - sed -i '/if sys.platform.startswith("linux") and platform.machine() == "x86_64":/{N;d}' setup.py - ''; - preCheck = '' export HOME=$TMPDIR ''; @@ -85,7 +76,7 @@ buildPythonPackage rec { ]; meta = with lib; { - changelog = "https://github.com/openai/whisper/blob/v$[version}/CHANGELOG.md"; + changelog = "https://github.com/openai/whisper/blob/v${version}/CHANGELOG.md"; description = "General-purpose speech recognition model"; homepage = "https://github.com/openai/whisper"; license = licenses.mit; diff --git a/pkgs/top-level/python-packages.nix b/pkgs/top-level/python-packages.nix index 0a2f0fea462..e0d93436ace 100644 --- a/pkgs/top-level/python-packages.nix +++ b/pkgs/top-level/python-packages.nix @@ -8456,10 +8456,7 @@ self: super: with self; { openai-triton-bin = callPackage ../development/python-modules/openai-triton/bin.nix { }; - openai-whisper = callPackage ../development/python-modules/openai-whisper { - inherit (pkgs.config) cudaSupport; - openai-triton = self.openai-triton-cuda; - }; + openai-whisper = callPackage ../development/python-modules/openai-whisper { }; openant = callPackage ../development/python-modules/openant { }; |