depot/third_party/nixpkgs/pkgs/development/python-modules/llama-index-embeddings-gemini/default.nix
Default email bcb2f287e1 Project import generated by Copybara.
GitOrigin-RevId: d603719ec6e294f034936c0d0dc06f689d91b6c3
2024-06-20 20:27:18 +05:30

48 lines
1.1 KiB
Nix

{
lib,
buildPythonPackage,
fetchPypi,
google-generativeai,
llama-index-core,
poetry-core,
pytestCheckHook,
pythonRelaxDepsHook,
pythonOlder,
}:
buildPythonPackage rec {
pname = "llama-index-embeddings-gemini";
version = "0.1.8";
pyproject = true;
disabled = pythonOlder "3.9";
src = fetchPypi {
pname = "llama_index_embeddings_gemini";
inherit version;
hash = "sha256-rQKyPqZnyVYH2h5TTVV53kYYldGURWARWjYBrBb4d5M=";
};
pythonRelaxDeps = [ "google-generativeai" ];
build-system = [ poetry-core ];
nativeBuildInputs = [ pythonRelaxDepsHook ];
dependencies = [
google-generativeai
llama-index-core
];
# Tests are only available in the mono repo
doCheck = false;
pythonImportsCheck = [ "llama_index.embeddings.gemini" ];
meta = with lib; {
description = "LlamaIndex Llms Integration for Gemini";
homepage = "https://github.com/run-llama/llama_index/tree/main/llama-index-integrations/embeddings/llama-index-embeddings-gemini";
license = licenses.mit;
maintainers = with maintainers; [ fab ];
};
}