depot/third_party/nixpkgs/pkgs/development/python-modules/llama-index-embeddings-gemini/default.nix
Default email 587713944a Project import generated by Copybara.
GitOrigin-RevId: 6143fc5eeb9c4f00163267708e26191d1e918932
2024-04-21 17:54:59 +02:00

52 lines
1.1 KiB
Nix

{
lib,
buildPythonPackage,
fetchPypi,
google-generativeai,
llama-index-core,
poetry-core,
pytestCheckHook,
pythonRelaxDepsHook,
pythonOlder,
}:
buildPythonPackage rec {
pname = "llama-index-embeddings-gemini";
version = "0.1.6";
pyproject = true;
disabled = pythonOlder "3.9";
src = fetchPypi {
pname = "llama_index_embeddings_gemini";
inherit version;
hash = "sha256-HYwYA67/7gDxE7ZxQkkyblgwE83gZXuDmUuseXujr5g=";
};
pythonRelaxDeps = [ "google-generativeai" ];
build-system = [
poetry-core
];
nativeBuildInputs = [
pythonRelaxDepsHook
];
dependencies = [
google-generativeai
llama-index-core
];
# Tests are only available in the mono repo
doCheck = false;
pythonImportsCheck = [ "llama_index.embeddings.gemini" ];
meta = with lib; {
description = "LlamaIndex Llms Integration for Gemini";
homepage = "https://github.com/run-llama/llama_index/tree/main/llama-index-integrations/embeddings/llama-index-embeddings-gemini";
license = licenses.mit;
maintainers = with maintainers; [ fab ];
};
}