2023-08-22 20:05:09 +00:00
|
|
|
{ lib
|
|
|
|
, buildPythonPackage
|
|
|
|
, fetchFromGitHub
|
|
|
|
, pythonOlder
|
|
|
|
, coloredlogs
|
|
|
|
, datasets
|
|
|
|
, evaluate
|
|
|
|
, h5py
|
|
|
|
, huggingface-hub
|
|
|
|
, numpy
|
|
|
|
, onnx
|
|
|
|
, onnxruntime
|
|
|
|
, packaging
|
|
|
|
, protobuf
|
|
|
|
, sympy
|
|
|
|
, tensorflow
|
|
|
|
, tf2onnx
|
|
|
|
, timm
|
|
|
|
, torch
|
|
|
|
, transformers
|
|
|
|
}:
|
|
|
|
|
|
|
|
buildPythonPackage rec {
|
|
|
|
pname = "optimum";
|
2024-04-21 15:54:59 +00:00
|
|
|
version = "1.18.1";
|
2023-08-22 20:05:09 +00:00
|
|
|
format = "setuptools";
|
|
|
|
|
|
|
|
disabled = pythonOlder "3.7";
|
|
|
|
|
|
|
|
src = fetchFromGitHub {
|
|
|
|
owner = "huggingface";
|
|
|
|
repo = "optimum";
|
|
|
|
rev = "refs/tags/v${version}";
|
2024-04-21 15:54:59 +00:00
|
|
|
hash = "sha256-Y+KWvpd/ULthCOr18hQjP0REQPcs2Ql2aUi3DIKIOpQ=";
|
2023-08-22 20:05:09 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
propagatedBuildInputs = [
|
|
|
|
coloredlogs
|
|
|
|
datasets
|
|
|
|
huggingface-hub
|
|
|
|
numpy
|
|
|
|
packaging
|
|
|
|
sympy
|
|
|
|
torch
|
|
|
|
transformers
|
|
|
|
] ++ transformers.optional-dependencies.sentencepiece;
|
|
|
|
|
|
|
|
passthru.optional-dependencies = {
|
|
|
|
onnxruntime = [
|
|
|
|
onnx
|
|
|
|
onnxruntime
|
|
|
|
datasets
|
|
|
|
evaluate
|
|
|
|
protobuf
|
|
|
|
];
|
|
|
|
exporters = [
|
|
|
|
onnx
|
|
|
|
onnxruntime
|
|
|
|
timm
|
|
|
|
];
|
|
|
|
exporters-tf = [
|
|
|
|
tensorflow
|
|
|
|
tf2onnx
|
|
|
|
onnx
|
|
|
|
onnxruntime
|
|
|
|
timm
|
|
|
|
h5py
|
|
|
|
numpy
|
|
|
|
];
|
|
|
|
diffusers = [
|
|
|
|
# diffusers
|
|
|
|
];
|
|
|
|
intel = [
|
|
|
|
# optimum-intel
|
|
|
|
];
|
|
|
|
openvino = [
|
|
|
|
# optimum-intel
|
|
|
|
]; # ++ optimum-intel.optional-dependencies.openvino;
|
|
|
|
nncf = [
|
|
|
|
# optimum-intel
|
|
|
|
]; # ++ optimum-intel.optional-dependencies.nncf;
|
|
|
|
neural-compressor = [
|
|
|
|
# optimum-intel
|
|
|
|
]; # ++ optimum-intel.optional-dependencies.neural-compressor;
|
|
|
|
graphcore = [
|
|
|
|
# optimum-graphcore
|
|
|
|
];
|
|
|
|
habana = [
|
|
|
|
transformers
|
|
|
|
# optimum-habana
|
|
|
|
];
|
|
|
|
neuron = [
|
|
|
|
# optimum-neuron
|
|
|
|
]; # ++ optimum-neuron.optional-dependencies.neuron;
|
|
|
|
neuronx = [
|
|
|
|
# optimum-neuron
|
|
|
|
]; # ++ optimum-neuron.optional-dependencies.neuronx;
|
|
|
|
furiosa = [
|
|
|
|
# optimum-furiosa
|
|
|
|
];
|
|
|
|
};
|
|
|
|
|
|
|
|
# almost all tests try to connect to https://huggingface.co
|
|
|
|
doCheck = false;
|
|
|
|
|
|
|
|
pythonImportsCheck = [ "optimum" ];
|
|
|
|
|
|
|
|
meta = with lib; {
|
|
|
|
description = "Accelerate training and inference of 🤗 Transformers and 🤗 Diffusers with easy to use hardware optimization tools";
|
2024-04-21 15:54:59 +00:00
|
|
|
mainProgram = "optimum-cli";
|
2023-08-22 20:05:09 +00:00
|
|
|
homepage = "https://github.com/huggingface/optimum";
|
|
|
|
changelog = "https://github.com/huggingface/optimum/releases/tag/${src.rev}";
|
|
|
|
license = licenses.asl20;
|
|
|
|
maintainers = with maintainers; [ natsukium ];
|
|
|
|
};
|
|
|
|
}
|