2021-01-15 22:18:51 +00:00
|
|
|
{ lib
|
|
|
|
, stdenv
|
2021-06-28 23:13:55 +00:00
|
|
|
, botocore
|
2020-04-24 23:36:52 +00:00
|
|
|
, buildPythonPackage
|
2021-06-28 23:13:55 +00:00
|
|
|
, cryptography
|
|
|
|
, cssselect
|
2021-10-09 14:59:57 +00:00
|
|
|
, fetchPypi
|
2020-04-24 23:36:52 +00:00
|
|
|
, glibcLocales
|
2021-06-28 23:13:55 +00:00
|
|
|
, installShellFiles
|
|
|
|
, itemadapter
|
|
|
|
, itemloaders
|
|
|
|
, jmespath
|
|
|
|
, lxml
|
2022-10-30 15:09:59 +00:00
|
|
|
, packaging
|
2021-06-28 23:13:55 +00:00
|
|
|
, parsel
|
2023-11-16 04:20:00 +00:00
|
|
|
, pexpect
|
2021-06-28 23:13:55 +00:00
|
|
|
, protego
|
|
|
|
, pydispatcher
|
|
|
|
, pyopenssl
|
2020-10-16 20:44:37 +00:00
|
|
|
, pytestCheckHook
|
2021-06-28 23:13:55 +00:00
|
|
|
, pythonOlder
|
|
|
|
, queuelib
|
|
|
|
, service-identity
|
2024-02-29 20:09:43 +00:00
|
|
|
, setuptools
|
2021-06-28 23:13:55 +00:00
|
|
|
, sybil
|
2020-04-24 23:36:52 +00:00
|
|
|
, testfixtures
|
2022-03-30 09:31:56 +00:00
|
|
|
, tldextract
|
2020-04-24 23:36:52 +00:00
|
|
|
, twisted
|
|
|
|
, w3lib
|
2024-01-13 08:15:51 +00:00
|
|
|
, zope-interface
|
2020-04-24 23:36:52 +00:00
|
|
|
}:
|
|
|
|
|
|
|
|
buildPythonPackage rec {
|
2021-06-28 23:13:55 +00:00
|
|
|
pname = "scrapy";
|
2024-02-29 20:09:43 +00:00
|
|
|
version = "2.11.1";
|
|
|
|
pyproject = true;
|
2022-05-18 14:49:53 +00:00
|
|
|
|
2023-10-09 19:29:22 +00:00
|
|
|
disabled = pythonOlder "3.8";
|
2020-04-24 23:36:52 +00:00
|
|
|
|
2021-10-09 14:59:57 +00:00
|
|
|
src = fetchPypi {
|
|
|
|
inherit version;
|
|
|
|
pname = "Scrapy";
|
2024-02-29 20:09:43 +00:00
|
|
|
hash = "sha256-czoDnHQj5StpvygQtTMgk9TkKoSEYDWcB7Auz/j3Pr4=";
|
2021-06-28 23:13:55 +00:00
|
|
|
};
|
2020-04-24 23:36:52 +00:00
|
|
|
|
2021-06-28 23:13:55 +00:00
|
|
|
nativeBuildInputs = [
|
|
|
|
installShellFiles
|
2024-02-29 20:09:43 +00:00
|
|
|
setuptools
|
2020-04-24 23:36:52 +00:00
|
|
|
];
|
|
|
|
|
|
|
|
propagatedBuildInputs = [
|
|
|
|
cryptography
|
|
|
|
cssselect
|
2021-06-28 23:13:55 +00:00
|
|
|
itemadapter
|
|
|
|
itemloaders
|
2020-04-24 23:36:52 +00:00
|
|
|
lxml
|
2022-10-30 15:09:59 +00:00
|
|
|
packaging
|
2020-04-24 23:36:52 +00:00
|
|
|
parsel
|
2021-06-28 23:13:55 +00:00
|
|
|
protego
|
2020-04-24 23:36:52 +00:00
|
|
|
pydispatcher
|
|
|
|
pyopenssl
|
|
|
|
queuelib
|
|
|
|
service-identity
|
2022-03-30 09:31:56 +00:00
|
|
|
tldextract
|
2021-06-28 23:13:55 +00:00
|
|
|
twisted
|
2020-04-24 23:36:52 +00:00
|
|
|
w3lib
|
2024-01-13 08:15:51 +00:00
|
|
|
zope-interface
|
2021-06-28 23:13:55 +00:00
|
|
|
];
|
|
|
|
|
2023-02-02 18:25:31 +00:00
|
|
|
nativeCheckInputs = [
|
2021-06-28 23:13:55 +00:00
|
|
|
botocore
|
|
|
|
glibcLocales
|
|
|
|
jmespath
|
2023-11-16 04:20:00 +00:00
|
|
|
pexpect
|
2021-06-28 23:13:55 +00:00
|
|
|
pytestCheckHook
|
|
|
|
sybil
|
|
|
|
testfixtures
|
|
|
|
];
|
|
|
|
|
2020-04-24 23:36:52 +00:00
|
|
|
LC_ALL = "en_US.UTF-8";
|
|
|
|
|
2021-06-28 23:13:55 +00:00
|
|
|
disabledTestPaths = [
|
|
|
|
"tests/test_proxy_connect.py"
|
|
|
|
"tests/test_utils_display.py"
|
|
|
|
"tests/test_command_check.py"
|
|
|
|
# Don't test the documentation
|
|
|
|
"docs"
|
2020-10-16 20:44:37 +00:00
|
|
|
];
|
|
|
|
|
|
|
|
disabledTests = [
|
2021-06-28 23:13:55 +00:00
|
|
|
# It's unclear if the failures are related to libxml2, https://github.com/NixOS/nixpkgs/pull/123890
|
|
|
|
"test_nested_css"
|
|
|
|
"test_nested_xpath"
|
|
|
|
"test_flavor_detection"
|
2022-12-28 21:21:41 +00:00
|
|
|
"test_follow_whitespace"
|
2021-06-28 23:13:55 +00:00
|
|
|
# Requires network access
|
2022-05-18 14:49:53 +00:00
|
|
|
"AnonymousFTPTestCase"
|
2020-10-16 20:44:37 +00:00
|
|
|
"FTPFeedStorageTest"
|
2021-06-28 23:13:55 +00:00
|
|
|
"FeedExportTest"
|
2020-10-16 20:44:37 +00:00
|
|
|
"test_custom_asyncio_loop_enabled_true"
|
|
|
|
"test_custom_loop_asyncio"
|
2021-06-28 23:13:55 +00:00
|
|
|
"test_custom_loop_asyncio_deferred_signal"
|
2021-05-28 09:39:13 +00:00
|
|
|
"FileFeedStoragePreFeedOptionsTest" # https://github.com/scrapy/scrapy/issues/5157
|
2023-08-10 07:59:29 +00:00
|
|
|
"test_persist"
|
2022-01-13 20:06:32 +00:00
|
|
|
"test_timeout_download_from_spider_nodata_rcvd"
|
2022-05-18 14:49:53 +00:00
|
|
|
"test_timeout_download_from_spider_server_hangs"
|
2023-10-19 13:55:26 +00:00
|
|
|
"test_unbounded_response"
|
|
|
|
"CookiesMiddlewareTest"
|
2022-10-30 15:09:59 +00:00
|
|
|
# Depends on uvloop
|
|
|
|
"test_asyncio_enabled_reactor_different_loop"
|
|
|
|
"test_asyncio_enabled_reactor_same_loop"
|
2021-08-10 14:31:46 +00:00
|
|
|
# Fails with AssertionError
|
|
|
|
"test_peek_fifo"
|
|
|
|
"test_peek_one_element"
|
|
|
|
"test_peek_lifo"
|
2022-01-19 23:45:15 +00:00
|
|
|
"test_callback_kwargs"
|
2023-10-09 19:29:22 +00:00
|
|
|
# Test fails on Hydra
|
|
|
|
"test_start_requests_laziness"
|
2021-02-05 17:12:51 +00:00
|
|
|
] ++ lib.optionals stdenv.isDarwin [
|
2021-01-15 22:18:51 +00:00
|
|
|
"test_xmliter_encoding"
|
|
|
|
"test_download"
|
2022-08-12 12:06:08 +00:00
|
|
|
"test_reactor_default_twisted_reactor_select"
|
2023-02-09 11:40:11 +00:00
|
|
|
"URIParamsSettingTest"
|
|
|
|
"URIParamsFeedOptionTest"
|
|
|
|
# flaky on darwin-aarch64
|
|
|
|
"test_fixed_delay"
|
|
|
|
"test_start_requests_laziness"
|
2021-01-15 22:18:51 +00:00
|
|
|
];
|
2020-10-16 20:44:37 +00:00
|
|
|
|
2020-04-24 23:36:52 +00:00
|
|
|
postInstall = ''
|
2021-06-28 23:13:55 +00:00
|
|
|
installManPage extras/scrapy.1
|
2023-02-09 11:40:11 +00:00
|
|
|
installShellCompletion --cmd scrapy \
|
|
|
|
--zsh extras/scrapy_zsh_completion \
|
|
|
|
--bash extras/scrapy_bash_completion
|
2020-04-24 23:36:52 +00:00
|
|
|
'';
|
|
|
|
|
2022-01-19 23:45:15 +00:00
|
|
|
pythonImportsCheck = [
|
|
|
|
"scrapy"
|
|
|
|
];
|
2021-06-28 23:13:55 +00:00
|
|
|
|
2021-01-15 22:18:51 +00:00
|
|
|
__darwinAllowLocalNetworking = true;
|
|
|
|
|
|
|
|
meta = with lib; {
|
2021-06-28 23:13:55 +00:00
|
|
|
description = "High-level web crawling and web scraping framework";
|
2024-04-21 15:54:59 +00:00
|
|
|
mainProgram = "scrapy";
|
2021-06-28 23:13:55 +00:00
|
|
|
longDescription = ''
|
|
|
|
Scrapy is a fast high-level web crawling and web scraping framework, used to crawl
|
|
|
|
websites and extract structured data from their pages. It can be used for a wide
|
|
|
|
range of purposes, from data mining to monitoring and automated testing.
|
|
|
|
'';
|
2020-04-24 23:36:52 +00:00
|
|
|
homepage = "https://scrapy.org/";
|
2021-10-09 14:59:57 +00:00
|
|
|
changelog = "https://github.com/scrapy/scrapy/raw/${version}/docs/news.rst";
|
2020-04-24 23:36:52 +00:00
|
|
|
license = licenses.bsd3;
|
2022-07-14 12:49:19 +00:00
|
|
|
maintainers = with maintainers; [ marsam ];
|
2020-04-24 23:36:52 +00:00
|
|
|
};
|
|
|
|
}
|