159e378cbb
GitOrigin-RevId: c04d5652cfa9742b1d519688f65d1bbccea9eb7e
45 lines
999 B
Nix
45 lines
999 B
Nix
{
|
|
lib,
|
|
buildPythonPackage,
|
|
fetchPypi,
|
|
google-generativeai,
|
|
llama-index-core,
|
|
poetry-core,
|
|
pythonOlder,
|
|
}:
|
|
|
|
buildPythonPackage rec {
|
|
pname = "llama-index-embeddings-gemini";
|
|
version = "0.2.0";
|
|
pyproject = true;
|
|
|
|
disabled = pythonOlder "3.9";
|
|
|
|
src = fetchPypi {
|
|
pname = "llama_index_embeddings_gemini";
|
|
inherit version;
|
|
hash = "sha256-7ftu+Gh8Ar9+yrFkUldlAvUfVtAw+xzrmyzqr7TeQdc=";
|
|
};
|
|
|
|
pythonRelaxDeps = [ "google-generativeai" ];
|
|
|
|
build-system = [ poetry-core ];
|
|
|
|
|
|
dependencies = [
|
|
google-generativeai
|
|
llama-index-core
|
|
];
|
|
|
|
# Tests are only available in the mono repo
|
|
doCheck = false;
|
|
|
|
pythonImportsCheck = [ "llama_index.embeddings.gemini" ];
|
|
|
|
meta = with lib; {
|
|
description = "LlamaIndex Llms Integration for Gemini";
|
|
homepage = "https://github.com/run-llama/llama_index/tree/main/llama-index-integrations/embeddings/llama-index-embeddings-gemini";
|
|
license = licenses.mit;
|
|
maintainers = with maintainers; [ fab ];
|
|
};
|
|
}
|