2022-09-30 11:47:45 +00:00
|
|
|
{ lib
|
|
|
|
, fetchFromGitHub
|
|
|
|
, buildPythonPackage
|
2022-10-06 18:32:54 +00:00
|
|
|
, substituteAll
|
2023-04-12 12:48:02 +00:00
|
|
|
, cudaSupport ? false
|
2022-10-06 18:32:54 +00:00
|
|
|
|
|
|
|
# runtime
|
2023-10-09 19:29:22 +00:00
|
|
|
, ffmpeg-headless
|
2022-09-30 11:47:45 +00:00
|
|
|
|
|
|
|
# propagates
|
|
|
|
, numpy
|
|
|
|
, torch
|
2023-04-12 12:48:02 +00:00
|
|
|
, torchWithCuda
|
2022-09-30 11:47:45 +00:00
|
|
|
, tqdm
|
|
|
|
, more-itertools
|
|
|
|
, transformers
|
2023-04-12 12:48:02 +00:00
|
|
|
, numba
|
|
|
|
, openai-triton
|
|
|
|
, scipy
|
|
|
|
, tiktoken
|
2022-09-30 11:47:45 +00:00
|
|
|
|
|
|
|
# tests
|
|
|
|
, pytestCheckHook
|
|
|
|
}:
|
|
|
|
|
|
|
|
buildPythonPackage rec {
|
|
|
|
pname = "whisper";
|
2023-10-09 19:29:22 +00:00
|
|
|
version = "20230918";
|
2022-09-30 11:47:45 +00:00
|
|
|
format = "setuptools";
|
|
|
|
|
|
|
|
src = fetchFromGitHub {
|
|
|
|
owner = "openai";
|
|
|
|
repo = pname;
|
2023-02-02 18:25:31 +00:00
|
|
|
rev = "refs/tags/v${version}";
|
2023-10-09 19:29:22 +00:00
|
|
|
hash = "sha256-wBAanFVEIIzTcoX40P9eI26UdEu0SC/xuife/zi2Xho=";
|
2022-09-30 11:47:45 +00:00
|
|
|
};
|
|
|
|
|
2022-10-06 18:32:54 +00:00
|
|
|
patches = [
|
|
|
|
(substituteAll {
|
|
|
|
src = ./ffmpeg-path.patch;
|
2023-10-09 19:29:22 +00:00
|
|
|
ffmpeg = ffmpeg-headless;
|
2022-10-06 18:32:54 +00:00
|
|
|
})
|
|
|
|
];
|
2022-09-30 11:47:45 +00:00
|
|
|
|
|
|
|
propagatedBuildInputs = [
|
|
|
|
numpy
|
|
|
|
tqdm
|
|
|
|
more-itertools
|
|
|
|
transformers
|
2023-04-12 12:48:02 +00:00
|
|
|
numba
|
|
|
|
scipy
|
|
|
|
tiktoken
|
|
|
|
] ++ lib.optionals (!cudaSupport) [
|
|
|
|
torch
|
|
|
|
] ++ lib.optionals (cudaSupport) [
|
|
|
|
openai-triton
|
|
|
|
torchWithCuda
|
2022-09-30 11:47:45 +00:00
|
|
|
];
|
|
|
|
|
2023-04-12 12:48:02 +00:00
|
|
|
postPatch = ''
|
|
|
|
substituteInPlace requirements.txt \
|
2023-10-09 19:29:22 +00:00
|
|
|
--replace "tiktoken==0.3.3" "tiktoken>=0.3.3"
|
2023-04-12 12:48:02 +00:00
|
|
|
''
|
|
|
|
# openai-triton is only needed for CUDA support.
|
|
|
|
# triton needs CUDA to be build.
|
|
|
|
# -> by making it optional, we can build whisper without unfree packages enabled
|
|
|
|
+ lib.optionalString (!cudaSupport) ''
|
|
|
|
sed -i '/if sys.platform.startswith("linux") and platform.machine() == "x86_64":/{N;d}' setup.py
|
|
|
|
'';
|
|
|
|
|
2022-09-30 11:47:45 +00:00
|
|
|
preCheck = ''
|
|
|
|
export HOME=$TMPDIR
|
|
|
|
'';
|
|
|
|
|
2023-02-02 18:25:31 +00:00
|
|
|
nativeCheckInputs = [
|
2022-09-30 11:47:45 +00:00
|
|
|
pytestCheckHook
|
|
|
|
];
|
|
|
|
|
|
|
|
disabledTests = [
|
|
|
|
# requires network access to download models
|
|
|
|
"test_transcribe"
|
2023-04-12 12:48:02 +00:00
|
|
|
# requires NVIDIA drivers
|
|
|
|
"test_dtw_cuda_equivalence"
|
|
|
|
"test_median_filter_equivalence"
|
2022-09-30 11:47:45 +00:00
|
|
|
];
|
|
|
|
|
|
|
|
meta = with lib; {
|
2023-04-12 12:48:02 +00:00
|
|
|
changelog = "https://github.com/openai/whisper/blob/v$[version}/CHANGELOG.md";
|
2022-09-30 11:47:45 +00:00
|
|
|
description = "General-purpose speech recognition model";
|
|
|
|
homepage = "https://github.com/openai/whisper";
|
|
|
|
license = licenses.mit;
|
2023-04-12 12:48:02 +00:00
|
|
|
maintainers = with maintainers; [ hexa MayNiklas ];
|
2022-09-30 11:47:45 +00:00
|
|
|
};
|
|
|
|
}
|