Skip to content

Commit

Permalink
Merge pull request #5637 from Gallaecio/support-latest-openssl
Browse files Browse the repository at this point in the history
2.6.3 changes and release notes
  • Loading branch information
Gallaecio committed Sep 27, 2022
2 parents aecbccb + aec2d3a commit e5ed046
Show file tree
Hide file tree
Showing 9 changed files with 52 additions and 33 deletions.
2 changes: 1 addition & 1 deletion docs/contributing.rst
Expand Up @@ -214,7 +214,7 @@ Tests
=====

Tests are implemented using the :doc:`Twisted unit-testing framework
<twisted:core/development/policy/test-standard>`. Running tests requires
<twisted:development/test-standard>`. Running tests requires
:doc:`tox <tox:index>`.

.. _running-tests:
Expand Down
26 changes: 26 additions & 0 deletions docs/news.rst
Expand Up @@ -3,6 +3,32 @@
Release notes
=============

.. _release-2.6.3:

Scrapy 2.6.3 (to be determined)
-------------------------------

- Added support for pyOpenSSL_ 22.1.0, removing support for SSLv3
(:issue:`5634`, :issue:`5635`, :issue:`5636`).

- Upgraded the minimum versions of the following dependencies:

- cryptography_: 2.0 → 3.3

- pyOpenSSL_: 16.2.0 → 21.0.0

- service_identity_: 16.0.0 → 18.1.0

- Twisted_: 17.9.0 → 18.9.0

- zope.interface_: 4.1.3 → 5.0.0

(:issue:`5621`, :issue:`5632`)

- Fixes test and documentation issues (:issue:`5612`, :issue:`5617`,
:issue:`5631`).


.. _release-2.6.2:

Scrapy 2.6.2 (2022-07-25)
Expand Down
1 change: 0 additions & 1 deletion docs/topics/settings.rst
Expand Up @@ -560,7 +560,6 @@ This setting must be one of these string values:
set this if you want the behavior of Scrapy<1.1
- ``'TLSv1.1'``: forces TLS version 1.1
- ``'TLSv1.2'``: forces TLS version 1.2
- ``'SSLv3'``: forces SSL version 3 (**not recommended**)


.. setting:: DOWNLOADER_CLIENT_TLS_VERBOSE_LOGGING
Expand Down
2 changes: 1 addition & 1 deletion scrapy/core/downloader/contextfactory.py
Expand Up @@ -21,7 +21,7 @@ class ScrapyClientContextFactory(BrowserLikePolicyForHTTPS):
which allows TLS protocol negotiation
'A TLS/SSL connection established with [this method] may
understand the SSLv3, TLSv1, TLSv1.1 and TLSv1.2 protocols.'
understand the TLSv1, TLSv1.1 and TLSv1.2 protocols.'
"""

def __init__(self, method=SSL.SSLv23_METHOD, tls_verbose_logging=False, tls_ciphers=None, *args, **kwargs):
Expand Down
2 changes: 0 additions & 2 deletions scrapy/core/downloader/tls.py
Expand Up @@ -11,7 +11,6 @@
logger = logging.getLogger(__name__)


METHOD_SSLv3 = 'SSLv3'
METHOD_TLS = 'TLS'
METHOD_TLSv10 = 'TLSv1.0'
METHOD_TLSv11 = 'TLSv1.1'
Expand All @@ -20,7 +19,6 @@

openssl_methods = {
METHOD_TLS: SSL.SSLv23_METHOD, # protocol negotiation (recommended)
METHOD_SSLv3: SSL.SSLv3_METHOD, # SSL 3 (NOT recommended)
METHOD_TLSv10: SSL.TLSv1_METHOD, # TLS 1.0 only
METHOD_TLSv11: getattr(SSL, 'TLSv1_1_METHOD', 5), # TLS 1.1 only
METHOD_TLSv12: getattr(SSL, 'TLSv1_2_METHOD', 6), # TLS 1.2 only
Expand Down
10 changes: 5 additions & 5 deletions setup.py
Expand Up @@ -19,16 +19,16 @@ def has_environment_marker_platform_impl_support():


install_requires = [
'Twisted>=17.9.0',
'cryptography>=2.0',
'Twisted>=18.9.0',
'cryptography>=3.3',
'cssselect>=0.9.1',
'itemloaders>=1.0.1',
'parsel>=1.5.0',
'pyOpenSSL>=16.2.0',
'pyOpenSSL>=21.0.0',
'queuelib>=1.4.2',
'service_identity>=16.0.0',
'service_identity>=18.1.0',
'w3lib>=1.17.0',
'zope.interface>=4.1.3',
'zope.interface>=5.0.0',
'protego>=0.1.15',
'itemadapter>=0.1.0',
'setuptools',
Expand Down
6 changes: 5 additions & 1 deletion tests/test_crawler.py
Expand Up @@ -21,6 +21,8 @@
from scrapy.extensions.throttle import AutoThrottle
from scrapy.extensions import telnet
from scrapy.utils.test import get_testenv
from pkg_resources import parse_version
from w3lib import __version__ as w3lib_version

from tests.mockserver import MockServer

Expand Down Expand Up @@ -316,7 +318,7 @@ def test_reactor_default(self):

def test_reactor_default_twisted_reactor_select(self):
log = self.run_script('reactor_default_twisted_reactor_select.py')
if platform.system() == 'Windows':
if platform.system() in ['Windows', 'Darwin']:
# The goal of this test function is to test that, when a reactor is
# installed (the default one here) and a different reactor is
# configured (select here), an error raises.
Expand Down Expand Up @@ -369,6 +371,8 @@ def test_asyncio_enabled_reactor(self):
self.assertIn('Spider closed (finished)', log)
self.assertIn("Using reactor: twisted.internet.asyncioreactor.AsyncioSelectorReactor", log)

@mark.skipif(parse_version(w3lib_version) >= parse_version("2.0.0"),
reason='w3lib 2.0.0 and later do not allow invalid domains.')
def test_ipv6_default_name_resolver(self):
log = self.run_script('default_name_resolver.py')
self.assertIn('Spider closed (finished)', log)
Expand Down
7 changes: 4 additions & 3 deletions tests/test_spider.py
Expand Up @@ -22,6 +22,7 @@
from scrapy.linkextractors import LinkExtractor
from scrapy.utils.test import get_crawler
from tests import get_testdata
from w3lib.url import safe_url_string


class SpiderTest(unittest.TestCase):
Expand Down Expand Up @@ -361,9 +362,9 @@ def process_request_upper(self, request, response):
self.assertEqual(len(output), 3)
self.assertTrue(all(map(lambda r: isinstance(r, Request), output)))
self.assertEqual([r.url for r in output],
['http://EXAMPLE.ORG/SOMEPAGE/ITEM/12.HTML',
'http://EXAMPLE.ORG/ABOUT.HTML',
'http://EXAMPLE.ORG/NOFOLLOW.HTML'])
[safe_url_string('http://EXAMPLE.ORG/SOMEPAGE/ITEM/12.HTML'),
safe_url_string('http://EXAMPLE.ORG/ABOUT.HTML'),
safe_url_string('http://EXAMPLE.ORG/NOFOLLOW.HTML')])

def test_process_request_instance_method_with_response(self):

Expand Down
29 changes: 10 additions & 19 deletions tox.ini
Expand Up @@ -11,15 +11,10 @@ minversion = 1.7.0
deps =
-rtests/requirements.txt
# mitmproxy does not support PyPy
# mitmproxy does not support Windows when running Python < 3.7
# Python 3.9+ requires mitmproxy >= 5.3.0
# mitmproxy >= 5.3.0 requires h2 >= 4.0, Twisted 21.2 requires h2 < 4.0
#mitmproxy >= 5.3.0; python_version >= '3.9' and implementation_name != 'pypy'
# mitmproxy < 7.0.0 is not compatible with pyOpenSSL >= 22.1.0, so no mitmproxy for Python ≤ 3.7
# The tests hang with mitmproxy 8.0.0: https://github.com/scrapy/scrapy/issues/5454
mitmproxy >= 4.0.4, < 8; python_version >= '3.7' and python_version < '3.9' and implementation_name != 'pypy'
mitmproxy >= 4.0.4, < 5; python_version >= '3.6' and python_version < '3.7' and platform_system != 'Windows' and implementation_name != 'pypy'
# newer markupsafe is incompatible with deps of old mitmproxy (which we get on Python 3.7 and lower)
markupsafe < 2.1.0; python_version >= '3.6' and python_version < '3.8' and implementation_name != 'pypy'
mitmproxy >= 7.0.0, < 8; python_version >= '3.8' and python_version < '3.9' and implementation_name != 'pypy'
# Extras
botocore>=1.4.87
passenv =
Expand Down Expand Up @@ -58,8 +53,6 @@ commands =
basepython = python3
deps =
{[testenv]deps}
# Twisted[http2] is required to import some files
Twisted[http2]>=17.9.0
pytest-flake8
flake8==3.9.2 # https://github.com/tholo/pytest-flake8/issues/81
commands =
Expand All @@ -75,18 +68,18 @@ commands =

[pinned]
deps =
cryptography==2.0
cryptography==3.3
cssselect==0.9.1
h2==3.0
itemadapter==0.1.0
parsel==1.5.0
Protego==0.1.15
pyOpenSSL==16.2.0
pyOpenSSL==21.0.0
queuelib==1.4.2
service_identity==16.0.0
Twisted[http2]==17.9.0
service_identity==18.1.0
Twisted[http2]==18.9.0
w3lib==1.17.0
zope.interface==4.1.3
zope.interface==5.0.0
-rtests/requirements.txt

# mitmproxy 4.0.4+ requires upgrading some of the pinned dependencies
Expand All @@ -102,6 +95,7 @@ install_command =
pip install -U {opts} {packages}

[testenv:pinned]
basepython = python3.6
deps =
{[pinned]deps}
lxml==3.5.0
Expand All @@ -111,7 +105,7 @@ setenv =
{[pinned]setenv}

[testenv:windows-pinned]
basepython = python3
basepython = python3.6
deps =
{[pinned]deps}
# First lxml version that includes a Windows wheel for Python 3.6, so we do
Expand All @@ -129,16 +123,13 @@ deps =
reppy
robotexclusionrulesparser
Pillow>=4.0.0
Twisted[http2]>=17.9.0
# Twisted[http2] currently forces old mitmproxy because of h2 version restrictions in their deps,
# so we need to pin old markupsafe here too
markupsafe < 2.1.0

[testenv:asyncio]
commands =
{[testenv]commands} --reactor=asyncio

[testenv:asyncio-pinned]
basepython = python3.6
deps = {[testenv:pinned]deps}
commands = {[testenv:asyncio]commands}
install_command = {[pinned]install_command}
Expand Down

0 comments on commit e5ed046

Please sign in to comment.