From 82d239f3b148d9ce69f67bd7a2cb00de7e934aa6 Mon Sep 17 00:00:00 2001 From: Anubhav Patel Date: Wed, 6 Mar 2019 12:08:09 +0530 Subject: [PATCH 01/19] docs for scrapy.logformatter --- docs/topics/logging.rst | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/docs/topics/logging.rst b/docs/topics/logging.rst index 0986929addb..a5fecebba6d 100644 --- a/docs/topics/logging.rst +++ b/docs/topics/logging.rst @@ -193,6 +193,45 @@ to override some of the Scrapy settings regarding logging. Module `logging.handlers `_ Further documentation on available handlers +Custom Log Formats +------------------- + +Custom log format can be set for different actions by extending ``scrapy.logformatter.LogFormatter`` class. + +Each method of ``scrapy.logformatter.LogFormatter`` represents an action. All methods inherited from +``scrapy.logformatter.LogFormatter`` in your custom log formatting class must return a dictionary listing +the parameters ``level``, ``msg`` and ``args`` which are going to be used for constructing the log message. +Listed below is details of what each key represents : + +* ``level`` is the log level for that action, you can use those from the python logging library: + :setting:`logging.DEBUG`, :setting:`logging.INFO`, :setting:`logging.WARNING`, :setting:`logging.ERROR` + and :setting:`logging.CRITICAL`. + +* ``msg`` should be a string that can contain different formatting placeholders. This string, formatted + with the provided ``args``, is going to be the long message for that action. + +* ``args`` should be a tuple or dict with the formatting placeholders for `msg`. The final log message is + computed as ``msg % args``. + +.. note:: To use custom log formatting class, you must mention it in ``settings.py``, by adding a line + ``LOG_FORMATTER = '’`` + +.. class:: scrapy.logformatter.LogFormatter + + The default log formatting class in Scrapy. + + .. method:: crawled (request, response, spider) + + ``crawled`` is called to log message when the crawler finds a webpage. + + .. method:: scraped(item, response, spider) + + ``scraped`` is called to log message when an item scraped by a spider. + + .. method:: dropped(item, exception, response, spider) + + ``dropped`` is called to log message when an item is dropped while it is passing through the item pipeline. + Advanced customization ---------------------- From 924b67437b92f14601816d02c5d153e7281da6d4 Mon Sep 17 00:00:00 2001 From: Anubhav Patel Date: Thu, 7 Mar 2019 16:40:59 +0530 Subject: [PATCH 02/19] move api docs to source code --- docs/topics/logging.rst | 38 ++++---------------------------------- docs/topics/settings.rst | 9 +++++++++ scrapy/logformatter.py | 36 +++++++++++++++++++++++------------- 3 files changed, 36 insertions(+), 47 deletions(-) diff --git a/docs/topics/logging.rst b/docs/topics/logging.rst index a5fecebba6d..72f24bae61b 100644 --- a/docs/topics/logging.rst +++ b/docs/topics/logging.rst @@ -196,41 +196,11 @@ to override some of the Scrapy settings regarding logging. Custom Log Formats ------------------- -Custom log format can be set for different actions by extending ``scrapy.logformatter.LogFormatter`` class. - -Each method of ``scrapy.logformatter.LogFormatter`` represents an action. All methods inherited from -``scrapy.logformatter.LogFormatter`` in your custom log formatting class must return a dictionary listing -the parameters ``level``, ``msg`` and ``args`` which are going to be used for constructing the log message. -Listed below is details of what each key represents : - -* ``level`` is the log level for that action, you can use those from the python logging library: - :setting:`logging.DEBUG`, :setting:`logging.INFO`, :setting:`logging.WARNING`, :setting:`logging.ERROR` - and :setting:`logging.CRITICAL`. - -* ``msg`` should be a string that can contain different formatting placeholders. This string, formatted - with the provided ``args``, is going to be the long message for that action. - -* ``args`` should be a tuple or dict with the formatting placeholders for `msg`. The final log message is - computed as ``msg % args``. - -.. note:: To use custom log formatting class, you must mention it in ``settings.py``, by adding a line - ``LOG_FORMATTER = '’`` +Custom log format can be set for different actions by extending :class:`~scrapy.logformatter.LogFormatter` class +and making :setting:`LOG_FORMATTER` inside ``settings.py`` point to your new class. -.. class:: scrapy.logformatter.LogFormatter - - The default log formatting class in Scrapy. - - .. method:: crawled (request, response, spider) - - ``crawled`` is called to log message when the crawler finds a webpage. - - .. method:: scraped(item, response, spider) - - ``scraped`` is called to log message when an item scraped by a spider. - - .. method:: dropped(item, exception, response, spider) - - ``dropped`` is called to log message when an item is dropped while it is passing through the item pipeline. +.. autoclass:: scrapy.logformatter.LogFormatter + :members: Advanced customization ---------------------- diff --git a/docs/topics/settings.rst b/docs/topics/settings.rst index 0ac26a9bd64..1dfb5b8aa1f 100644 --- a/docs/topics/settings.rst +++ b/docs/topics/settings.rst @@ -866,6 +866,15 @@ directives. .. _Python datetime documentation: https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior +.. setting:: LOG_FORMATTER + +LOG_FORMATTER +------------- + +Default: ``scrapy.logformatter.LogFormatter`` + +The class to use for formatting log messages for different actions. + .. setting:: LOG_LEVEL LOG_LEVEL diff --git a/scrapy/logformatter.py b/scrapy/logformatter.py index 075a6d862d6..0bb8aee5855 100644 --- a/scrapy/logformatter.py +++ b/scrapy/logformatter.py @@ -13,25 +13,29 @@ class LogFormatter(object): """Class for generating log messages for different actions. - All methods must return a dictionary listing the parameters `level`, `msg` - and `args` which are going to be used for constructing the log message when - calling logging.log. + All methods must return a dictionary listing the parameters ``level``, ``msg`` + and ``args`` which are going to be used for constructing the log message when + calling ``logging.log``. Dictionary keys for the method outputs: - * `level` should be the log level for that action, you can use those - from the python logging library: logging.DEBUG, logging.INFO, - logging.WARNING, logging.ERROR and logging.CRITICAL. - * `msg` should be a string that can contain different formatting - placeholders. This string, formatted with the provided `args`, is going - to be the log message for that action. + * ``level`` is the log level for that action, you can use those from the + `python logging library `_ : + ``logging.DEBUG``, ``logging.INFO``, ``logging.WARNING``, ``logging.ERROR`` + and ``logging.CRITICAL``. + + * ``msg`` should be a string that can contain different formatting placeholders. This string, formatted + with the provided ``args``, is going to be the long message for that action. + + * ``args`` should be a tuple or dict with the formatting placeholders for ``msg``. The final log message is + computed as ``msg % args``. - * `args` should be a tuple or dict with the formatting placeholders for - `msg`. The final log message is computed as output['msg'] % - output['args']. """ def crawled(self, request, response, spider): + """ + ``crawled`` is called to log message when the crawler finds a webpage. + """ request_flags = ' %s' % str(request.flags) if request.flags else '' response_flags = ' %s' % str(response.flags) if response.flags else '' return { @@ -40,7 +44,7 @@ def crawled(self, request, response, spider): 'args': { 'status': response.status, 'request': request, - 'request_flags' : request_flags, + 'request_flags': request_flags, 'referer': referer_str(request), 'response_flags': response_flags, # backward compatibility with Scrapy logformatter below 1.4 version @@ -49,6 +53,9 @@ def crawled(self, request, response, spider): } def scraped(self, item, response, spider): + """ + ``scraped`` is called to log message when an item is scraped by a spider. + """ if isinstance(response, Failure): src = response.getErrorMessage() else: @@ -63,6 +70,9 @@ def scraped(self, item, response, spider): } def dropped(self, item, exception, response, spider): + """ + ``dropped`` is called to log message when an item is dropped while it is passing through the item pipeline. + """ return { 'level': logging.WARNING, 'msg': DROPPEDMSG, From 82049e9c41f878d84f0fe10f827c6fe2a33f7ba6 Mon Sep 17 00:00:00 2001 From: Anubhav Patel Date: Sun, 10 Mar 2019 20:14:55 +0530 Subject: [PATCH 03/19] make suggested changes. --- docs/topics/logging.rst | 8 +++++--- docs/topics/settings.rst | 4 ++-- scrapy/logformatter.py | 26 +++++++++++++++++--------- 3 files changed, 24 insertions(+), 14 deletions(-) diff --git a/docs/topics/logging.rst b/docs/topics/logging.rst index 72f24bae61b..006530a8c68 100644 --- a/docs/topics/logging.rst +++ b/docs/topics/logging.rst @@ -193,11 +193,13 @@ to override some of the Scrapy settings regarding logging. Module `logging.handlers `_ Further documentation on available handlers +.. _custom-log-formats: + Custom Log Formats -------------------- +------------------ -Custom log format can be set for different actions by extending :class:`~scrapy.logformatter.LogFormatter` class -and making :setting:`LOG_FORMATTER` inside ``settings.py`` point to your new class. +A custom log format can be set for different actions by extending :class:`~scrapy.logformatter.LogFormatter` class +and making :setting:`LOG_FORMATTER` point to your new class. .. autoclass:: scrapy.logformatter.LogFormatter :members: diff --git a/docs/topics/settings.rst b/docs/topics/settings.rst index 1dfb5b8aa1f..a36c0b34c67 100644 --- a/docs/topics/settings.rst +++ b/docs/topics/settings.rst @@ -871,9 +871,9 @@ directives. LOG_FORMATTER ------------- -Default: ``scrapy.logformatter.LogFormatter`` +Default: :class:`scrapy.logformatter.LogFormatter` -The class to use for formatting log messages for different actions. +The class to use for :ref:`formatting log messages ` for different actions. .. setting:: LOG_LEVEL diff --git a/scrapy/logformatter.py b/scrapy/logformatter.py index 0bb8aee5855..17c69cba875 100644 --- a/scrapy/logformatter.py +++ b/scrapy/logformatter.py @@ -30,12 +30,24 @@ class LogFormatter(object): * ``args`` should be a tuple or dict with the formatting placeholders for ``msg``. The final log message is computed as ``msg % args``. + Here is an example on how to create a custom log formatter to lower the severity level of the log message + when an item is dropped from the pipeline:: + + class PoliteLogFormatter(logformatter.LogFormatter): + def dropped(self, item, exception, response, spider): + return { + 'level': logging.INFO, # lowering the level from logging.WARNING + 'msg': u"Dropped: %(exception)s" + os.linesep + "%(item)s", + 'args': { + 'exception': exception, + 'item': item, + } + } + """ def crawled(self, request, response, spider): - """ - ``crawled`` is called to log message when the crawler finds a webpage. - """ + """Logs a message when the crawler finds a webpage.""" request_flags = ' %s' % str(request.flags) if request.flags else '' response_flags = ' %s' % str(response.flags) if response.flags else '' return { @@ -53,9 +65,7 @@ def crawled(self, request, response, spider): } def scraped(self, item, response, spider): - """ - ``scraped`` is called to log message when an item is scraped by a spider. - """ + """Logs a message when an item is scraped by a spider.""" if isinstance(response, Failure): src = response.getErrorMessage() else: @@ -70,9 +80,7 @@ def scraped(self, item, response, spider): } def dropped(self, item, exception, response, spider): - """ - ``dropped`` is called to log message when an item is dropped while it is passing through the item pipeline. - """ + """Logs a message when an item is dropped while it is passing through the item pipeline.""" return { 'level': logging.WARNING, 'msg': DROPPEDMSG, From e9cd4ee03aa41e27bea0408b10970ec5bedf35d3 Mon Sep 17 00:00:00 2001 From: Anubhav Patel Date: Sun, 10 Mar 2019 20:37:56 +0530 Subject: [PATCH 04/19] fix list alignment and line width --- scrapy/logformatter.py | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/scrapy/logformatter.py b/scrapy/logformatter.py index 17c69cba875..7171202425a 100644 --- a/scrapy/logformatter.py +++ b/scrapy/logformatter.py @@ -19,19 +19,18 @@ class LogFormatter(object): Dictionary keys for the method outputs: - * ``level`` is the log level for that action, you can use those from the - `python logging library `_ : - ``logging.DEBUG``, ``logging.INFO``, ``logging.WARNING``, ``logging.ERROR`` - and ``logging.CRITICAL``. - - * ``msg`` should be a string that can contain different formatting placeholders. This string, formatted - with the provided ``args``, is going to be the long message for that action. - - * ``args`` should be a tuple or dict with the formatting placeholders for ``msg``. The final log message is - computed as ``msg % args``. - - Here is an example on how to create a custom log formatter to lower the severity level of the log message - when an item is dropped from the pipeline:: + * ``level`` is the log level for that action, you can use those from the + `python logging library `_ : + ``logging.DEBUG``, ``logging.INFO``, ``logging.WARNING``, ``logging.ERROR`` + and ``logging.CRITICAL``. + * ``msg`` should be a string that can contain different formatting placeholders. + This string, formatted with the provided ``args``, is going to be the long message + for that action. + * ``args`` should be a tuple or dict with the formatting placeholders for ``msg``. + The final log message is computed as ``msg % args``. + + Here is an example on how to create a custom log formatter to lower the severity level of + the log message when an item is dropped from the pipeline:: class PoliteLogFormatter(logformatter.LogFormatter): def dropped(self, item, exception, response, spider): From 69b1d5d3d7050b5c60a95cc547febaded1b3685f Mon Sep 17 00:00:00 2001 From: Andrey Rakhmatullin Date: Fri, 5 Oct 2018 18:21:26 +0500 Subject: [PATCH 05/19] Log cipher, certificate and temp key info on establishing an SSL connection. --- scrapy/core/downloader/tls.py | 18 ++++++++++++- scrapy/utils/ssl.py | 50 +++++++++++++++++++++++++++++++++++ 2 files changed, 67 insertions(+), 1 deletion(-) create mode 100644 scrapy/utils/ssl.py diff --git a/scrapy/core/downloader/tls.py b/scrapy/core/downloader/tls.py index df805118249..2ba72593fa5 100644 --- a/scrapy/core/downloader/tls.py +++ b/scrapy/core/downloader/tls.py @@ -2,6 +2,7 @@ from OpenSSL import SSL from scrapy import twisted_version +from scrapy.utils.ssl import x509name_to_string, get_temp_key_info logger = logging.getLogger(__name__) @@ -20,6 +21,7 @@ METHOD_TLSv12: getattr(SSL, 'TLSv1_2_METHOD', 6), # TLS 1.2 only } + if twisted_version >= (14, 0, 0): # ClientTLSOptions requires a recent-enough version of Twisted. # Not having ScrapyClientTLSOptions should not matter for older @@ -65,13 +67,27 @@ class ScrapyClientTLSOptions(ClientTLSOptions): Same as Twisted's private _sslverify.ClientTLSOptions, except that VerificationError, CertificateError and ValueError exceptions are caught, so that the connection is not closed, only - logging warnings. + logging warnings. Also, HTTPS connection parameters logging is added. """ def _identityVerifyingInfoCallback(self, connection, where, ret): if where & SSL_CB_HANDSHAKE_START: set_tlsext_host_name(connection, self._hostnameBytes) elif where & SSL_CB_HANDSHAKE_DONE: + logger.debug('SSL connection to %s using protocol %s, cipher %s', + self._hostnameASCII, + connection.get_protocol_version_name(), + connection.get_cipher_name(), + ) + server_cert = connection.get_peer_certificate() + logger.debug('SSL connection certificate: issuer "%s", subject "%s"', + x509name_to_string(server_cert.get_issuer()), + x509name_to_string(server_cert.get_subject()), + ) + key_info = get_temp_key_info(connection._ssl) + if key_info: + logger.debug('SSL temp key: %s', key_info) + try: verifyHostname(connection, self._hostnameASCII) except verification_errors as e: diff --git a/scrapy/utils/ssl.py b/scrapy/utils/ssl.py new file mode 100644 index 00000000000..5db1608bf48 --- /dev/null +++ b/scrapy/utils/ssl.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- + +import OpenSSL._util as pyOpenSSLutil + +from scrapy.utils.python import to_native_str + + +def ffi_buf_to_string(buf): + return to_native_str(pyOpenSSLutil.ffi.string(buf)) + + +def x509name_to_string(x509name): + # from OpenSSL.crypto.X509Name.__repr__ + result_buffer = pyOpenSSLutil.ffi.new("char[]", 512) + pyOpenSSLutil.lib.X509_NAME_oneline(x509name._name, result_buffer, len(result_buffer)) + + return ffi_buf_to_string(result_buffer) + + +def get_temp_key_info(ssl_object): + if not hasattr(pyOpenSSLutil.lib, 'SSL_get_server_tmp_key'): # requires OpenSSL 1.0.2 + return None + + # adapted from OpenSSL apps/s_cb.c::ssl_print_tmp_key() + temp_key_p = pyOpenSSLutil.ffi.new("EVP_PKEY **") + pyOpenSSLutil.lib.SSL_get_server_tmp_key(ssl_object, temp_key_p) + if temp_key_p == pyOpenSSLutil.ffi.NULL: + return None + + temp_key = temp_key_p[0] + pyOpenSSLutil.ffi.gc(temp_key, pyOpenSSLutil.lib.EVP_PKEY_free) + key_info = [] + key_type = pyOpenSSLutil.lib.EVP_PKEY_id(temp_key) + if key_type == pyOpenSSLutil.lib.EVP_PKEY_RSA: + key_info.append('RSA') + elif key_type == pyOpenSSLutil.lib.EVP_PKEY_DH: + key_info.append('DH') + elif key_type == pyOpenSSLutil.lib.EVP_PKEY_EC: + key_info.append('ECDH') + ec_key = pyOpenSSLutil.lib.EVP_PKEY_get1_EC_KEY(temp_key) + pyOpenSSLutil.ffi.gc(ec_key, pyOpenSSLutil.lib.EC_KEY_free) + nid = pyOpenSSLutil.lib.EC_GROUP_get_curve_name(pyOpenSSLutil.lib.EC_KEY_get0_group(ec_key)) + cname = pyOpenSSLutil.lib.EC_curve_nid2nist(nid) + if cname == pyOpenSSLutil.ffi.NULL: + cname = pyOpenSSLutil.lib.OBJ_nid2sn(nid) + key_info.append(ffi_buf_to_string(cname)) + else: + key_info.append(ffi_buf_to_string(pyOpenSSLutil.lib.OBJ_nid2sn(key_type))) + key_info.append('%s bits' % pyOpenSSLutil.lib.EVP_PKEY_bits(temp_key)) + return ', '.join(key_info) From 67a400092805ec0d643bd7de0481cc45d5ce8471 Mon Sep 17 00:00:00 2001 From: Andrey Rakhmatullin Date: Mon, 8 Jul 2019 10:31:52 +0500 Subject: [PATCH 06/19] Work around older pyOpenSSL not having get_cipher_name or get_protocol_version_name. --- scrapy/core/downloader/tls.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/scrapy/core/downloader/tls.py b/scrapy/core/downloader/tls.py index 2ba72593fa5..7e5882663a4 100644 --- a/scrapy/core/downloader/tls.py +++ b/scrapy/core/downloader/tls.py @@ -74,11 +74,18 @@ def _identityVerifyingInfoCallback(self, connection, where, ret): if where & SSL_CB_HANDSHAKE_START: set_tlsext_host_name(connection, self._hostnameBytes) elif where & SSL_CB_HANDSHAKE_DONE: - logger.debug('SSL connection to %s using protocol %s, cipher %s', - self._hostnameASCII, - connection.get_protocol_version_name(), - connection.get_cipher_name(), - ) + if hasattr(connection, 'get_cipher_name'): # requires pyOPenSSL 0.15 + if hasattr(connection, 'get_protocol_version_name'): # requires pyOPenSSL 16.0.0 + logger.debug('SSL connection to %s using protocol %s, cipher %s', + self._hostnameASCII, + connection.get_protocol_version_name(), + connection.get_cipher_name(), + ) + else: + logger.debug('SSL connection to %s using cipher %s', + self._hostnameASCII, + connection.get_cipher_name(), + ) server_cert = connection.get_peer_certificate() logger.debug('SSL connection certificate: issuer "%s", subject "%s"', x509name_to_string(server_cert.get_issuer()), From 0b9dce3a6c17d8dc827df57367383e0b82fa8b07 Mon Sep 17 00:00:00 2001 From: Andrey Rakhmatullin Date: Mon, 8 Jul 2019 17:40:56 +0500 Subject: [PATCH 07/19] Add DOWNLOADER_CLIENT_TLS_VERBOSE_LOGGING setting. --- docs/topics/settings.rst | 21 +++++++++-- scrapy/core/downloader/contextfactory.py | 14 ++++++- scrapy/core/downloader/handlers/http10.py | 7 ++-- scrapy/core/downloader/handlers/http11.py | 11 +++--- scrapy/core/downloader/tls.py | 45 +++++++++++++---------- scrapy/settings/default_settings.py | 1 + 6 files changed, 66 insertions(+), 33 deletions(-) diff --git a/docs/topics/settings.rst b/docs/topics/settings.rst index 371f21c72f5..5cc87bb64ed 100644 --- a/docs/topics/settings.rst +++ b/docs/topics/settings.rst @@ -438,9 +438,10 @@ or even enable client-side authentication (and various other things). which uses the platform's certificates to validate remote endpoints. **This is only available if you use Twisted>=14.0.** -If you do use a custom ContextFactory, make sure it accepts a ``method`` -parameter at init (this is the ``OpenSSL.SSL`` method mapping -:setting:`DOWNLOADER_CLIENT_TLS_METHOD`). +If you do use a custom ContextFactory, make sure its ``__init__` method accepts +a ``method`` parameter (this is the ``OpenSSL.SSL`` method mapping +:setting:`DOWNLOADER_CLIENT_TLS_METHOD`) and a ``settings`` parameter (this is +the Scrapy settings object). .. setting:: DOWNLOADER_CLIENT_TLS_METHOD @@ -468,6 +469,20 @@ This setting must be one of these string values: We recommend that you use PyOpenSSL>=0.13 and Twisted>=0.13 or above (Twisted>=14.0 if you can). +.. setting:: DOWNLOADER_CLIENT_TLS_VERBOSE_LOGGING + +DOWNLOADER_CLIENT_TLS_VERBOSE_LOGGING +------------------------------------- + +Default: ``False`` + +Setting this to ``True`` will enable DEBUG level messages about TLS connection +parameters after establishing HTTPS connections. The kind of information logged +depends on the versions of OpenSSL and pyOpenSSL. + +This setting is only used for the default +:setting:`DOWNLOADER_CLIENTCONTEXTFACTORY`. + .. setting:: DOWNLOADER_MIDDLEWARES DOWNLOADER_MIDDLEWARES diff --git a/scrapy/core/downloader/contextfactory.py b/scrapy/core/downloader/contextfactory.py index 783d4c38341..80c784f5af9 100644 --- a/scrapy/core/downloader/contextfactory.py +++ b/scrapy/core/downloader/contextfactory.py @@ -2,6 +2,7 @@ from twisted.internet.ssl import ClientContextFactory from scrapy import twisted_version +from scrapy.utils.misc import create_instance if twisted_version >= (14, 0, 0): @@ -28,9 +29,17 @@ class ScrapyClientContextFactory(BrowserLikePolicyForHTTPS): understand the SSLv3, TLSv1, TLSv1.1 and TLSv1.2 protocols.' """ - def __init__(self, method=SSL.SSLv23_METHOD, *args, **kwargs): + def __init__(self, method=SSL.SSLv23_METHOD, settings=None, *args, **kwargs): super(ScrapyClientContextFactory, self).__init__(*args, **kwargs) self._ssl_method = method + if settings: + self.tls_verbose_logging = settings['DOWNLOADER_CLIENT_TLS_VERBOSE_LOGGING'] + else: + self.tls_verbose_logging = False + + @classmethod + def from_settings(cls, settings, method=SSL.SSLv23_METHOD, *args, **kwargs): + return cls(method=method, settings=settings, *args, **kwargs) def getCertificateOptions(self): # setting verify=True will require you to provide CAs @@ -56,7 +65,8 @@ def getContext(self, hostname=None, port=None): return self.getCertificateOptions().getContext() def creatorForNetloc(self, hostname, port): - return ScrapyClientTLSOptions(hostname.decode("ascii"), self.getContext()) + return ScrapyClientTLSOptions(hostname.decode("ascii"), self.getContext(), + verbose_logging=self.tls_verbose_logging) @implementer(IPolicyForHTTPS) diff --git a/scrapy/core/downloader/handlers/http10.py b/scrapy/core/downloader/handlers/http10.py index d875fb1e441..be729853111 100644 --- a/scrapy/core/downloader/handlers/http10.py +++ b/scrapy/core/downloader/handlers/http10.py @@ -1,7 +1,7 @@ """Download handlers for http and https schemes """ from twisted.internet import reactor -from scrapy.utils.misc import load_object +from scrapy.utils.misc import load_object, create_instance from scrapy.utils.python import to_unicode @@ -11,6 +11,7 @@ class HTTP10DownloadHandler(object): def __init__(self, settings): self.HTTPClientFactory = load_object(settings['DOWNLOADER_HTTPCLIENTFACTORY']) self.ClientContextFactory = load_object(settings['DOWNLOADER_CLIENTCONTEXTFACTORY']) + self._settings = settings def download_request(self, request, spider): """Return a deferred for the HTTP download""" @@ -21,7 +22,7 @@ def download_request(self, request, spider): def _connect(self, factory): host, port = to_unicode(factory.host), factory.port if factory.scheme == b'https': - return reactor.connectSSL(host, port, factory, - self.ClientContextFactory()) + client_context_factory = create_instance(self.ClientContextFactory, settings=self._settings, crawler=None) + return reactor.connectSSL(host, port, factory, client_context_factory) else: return reactor.connectTCP(host, port, factory) diff --git a/scrapy/core/downloader/handlers/http11.py b/scrapy/core/downloader/handlers/http11.py index 0673188a165..9b0c7977d0a 100644 --- a/scrapy/core/downloader/handlers/http11.py +++ b/scrapy/core/downloader/handlers/http11.py @@ -25,7 +25,7 @@ from scrapy.responsetypes import responsetypes from scrapy.core.downloader.webclient import _parse from scrapy.core.downloader.tls import openssl_methods -from scrapy.utils.misc import load_object +from scrapy.utils.misc import load_object, create_instance from scrapy.utils.python import to_bytes, to_unicode from scrapy import twisted_version @@ -44,14 +44,15 @@ def __init__(self, settings): self._contextFactoryClass = load_object(settings['DOWNLOADER_CLIENTCONTEXTFACTORY']) # try method-aware context factory try: - self._contextFactory = self._contextFactoryClass(method=self._sslMethod) + self._contextFactory = create_instance(self._contextFactoryClass, settings=settings, crawler=None, + method=self._sslMethod) except TypeError: # use context factory defaults - self._contextFactory = self._contextFactoryClass() + self._contextFactory = create_instance(self._contextFactoryClass, settings=settings, crawler=None) msg = """ '%s' does not accept `method` argument (type OpenSSL.SSL method,\ - e.g. OpenSSL.SSL.SSLv23_METHOD).\ - Please upgrade your context factory class to handle it or ignore it.""" % ( + e.g. OpenSSL.SSL.SSLv23_METHOD) and/or `settings` argument.\ + Please upgrade your context factory class to handle them or ignore them.""" % ( settings['DOWNLOADER_CLIENTCONTEXTFACTORY'],) warnings.warn(msg) self._default_maxsize = settings.getint('DOWNLOAD_MAXSIZE') diff --git a/scrapy/core/downloader/tls.py b/scrapy/core/downloader/tls.py index 7e5882663a4..74be85d52dd 100644 --- a/scrapy/core/downloader/tls.py +++ b/scrapy/core/downloader/tls.py @@ -70,30 +70,35 @@ class ScrapyClientTLSOptions(ClientTLSOptions): logging warnings. Also, HTTPS connection parameters logging is added. """ + def __init__(self, hostname, ctx, verbose_logging=False): + super().__init__(hostname, ctx) + self.verbose_logging = verbose_logging + def _identityVerifyingInfoCallback(self, connection, where, ret): if where & SSL_CB_HANDSHAKE_START: set_tlsext_host_name(connection, self._hostnameBytes) elif where & SSL_CB_HANDSHAKE_DONE: - if hasattr(connection, 'get_cipher_name'): # requires pyOPenSSL 0.15 - if hasattr(connection, 'get_protocol_version_name'): # requires pyOPenSSL 16.0.0 - logger.debug('SSL connection to %s using protocol %s, cipher %s', - self._hostnameASCII, - connection.get_protocol_version_name(), - connection.get_cipher_name(), - ) - else: - logger.debug('SSL connection to %s using cipher %s', - self._hostnameASCII, - connection.get_cipher_name(), - ) - server_cert = connection.get_peer_certificate() - logger.debug('SSL connection certificate: issuer "%s", subject "%s"', - x509name_to_string(server_cert.get_issuer()), - x509name_to_string(server_cert.get_subject()), - ) - key_info = get_temp_key_info(connection._ssl) - if key_info: - logger.debug('SSL temp key: %s', key_info) + if self.verbose_logging: + if hasattr(connection, 'get_cipher_name'): # requires pyOPenSSL 0.15 + if hasattr(connection, 'get_protocol_version_name'): # requires pyOPenSSL 16.0.0 + logger.debug('SSL connection to %s using protocol %s, cipher %s', + self._hostnameASCII, + connection.get_protocol_version_name(), + connection.get_cipher_name(), + ) + else: + logger.debug('SSL connection to %s using cipher %s', + self._hostnameASCII, + connection.get_cipher_name(), + ) + server_cert = connection.get_peer_certificate() + logger.debug('SSL connection certificate: issuer "%s", subject "%s"', + x509name_to_string(server_cert.get_issuer()), + x509name_to_string(server_cert.get_subject()), + ) + key_info = get_temp_key_info(connection._ssl) + if key_info: + logger.debug('SSL temp key: %s', key_info) try: verifyHostname(connection, self._hostnameASCII) diff --git a/scrapy/settings/default_settings.py b/scrapy/settings/default_settings.py index 10b6cf9bc29..af8305b250e 100644 --- a/scrapy/settings/default_settings.py +++ b/scrapy/settings/default_settings.py @@ -87,6 +87,7 @@ DOWNLOADER_CLIENTCONTEXTFACTORY = 'scrapy.core.downloader.contextfactory.ScrapyClientContextFactory' DOWNLOADER_CLIENT_TLS_METHOD = 'TLS' # Use highest TLS/SSL protocol version supported by the platform, # also allowing negotiation +DOWNLOADER_CLIENT_TLS_VERBOSE_LOGGING = False DOWNLOADER_MIDDLEWARES = {} From 0de6ffc8e1354ac7ffcdf7a75e3ec8d26ed23a01 Mon Sep 17 00:00:00 2001 From: Andrey Rakhmatullin Date: Thu, 11 Jul 2019 13:12:56 +0500 Subject: [PATCH 08/19] Fix super() call. --- scrapy/core/downloader/tls.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scrapy/core/downloader/tls.py b/scrapy/core/downloader/tls.py index 74be85d52dd..74afb3f102c 100644 --- a/scrapy/core/downloader/tls.py +++ b/scrapy/core/downloader/tls.py @@ -71,7 +71,7 @@ class ScrapyClientTLSOptions(ClientTLSOptions): """ def __init__(self, hostname, ctx, verbose_logging=False): - super().__init__(hostname, ctx) + super(ScrapyClientTLSOptions, self).__init__(hostname, ctx) self.verbose_logging = verbose_logging def _identityVerifyingInfoCallback(self, connection, where, ret): From 98689b27a8839a21aed52ada44143456ead84e24 Mon Sep 17 00:00:00 2001 From: Andrey Rakhmatullin Date: Thu, 11 Jul 2019 14:02:35 +0500 Subject: [PATCH 09/19] Improve the DOWNLOADER_CLIENTCONTEXTFACTORY doc. --- docs/topics/settings.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/topics/settings.rst b/docs/topics/settings.rst index 5cc87bb64ed..53c62467967 100644 --- a/docs/topics/settings.rst +++ b/docs/topics/settings.rst @@ -438,10 +438,10 @@ or even enable client-side authentication (and various other things). which uses the platform's certificates to validate remote endpoints. **This is only available if you use Twisted>=14.0.** -If you do use a custom ContextFactory, make sure its ``__init__` method accepts -a ``method`` parameter (this is the ``OpenSSL.SSL`` method mapping +If you do use a custom ContextFactory, make sure its ``__init__`` method +accepts a ``method`` parameter (this is the ``OpenSSL.SSL`` method mapping :setting:`DOWNLOADER_CLIENT_TLS_METHOD`) and a ``settings`` parameter (this is -the Scrapy settings object). +the Scrapy :class:`~scrapy.settings.Settings` object). .. setting:: DOWNLOADER_CLIENT_TLS_METHOD From a96a07bc762287a1f2056d1e142aff9f33e206fe Mon Sep 17 00:00:00 2001 From: Andrey Rakhmatullin Date: Fri, 12 Jul 2019 18:44:45 +0500 Subject: [PATCH 10/19] Add a test for DOWNLOADER_CLIENT_TLS_VERBOSE_LOGGING. --- tests/test_downloader_handlers.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/tests/test_downloader_handlers.py b/tests/test_downloader_handlers.py index 81235a16fd5..8d0df6b5b1b 100644 --- a/tests/test_downloader_handlers.py +++ b/tests/test_downloader_handlers.py @@ -8,6 +8,7 @@ except ImportError: import mock +from testfixtures import LogCapture from twisted.trial import unittest from twisted.protocols.policies import WrappingFactory from twisted.python.filepath import FilePath @@ -503,6 +504,24 @@ def test_download_broken_chunked_content_allow_data_loss_via_setting(self): class Https11TestCase(Http11TestCase): scheme = 'https' + tls_log_message = 'SSL connection certificate: issuer "/C=IE/O=Scrapy/CN=localhost", subject "/C=IE/O=Scrapy/CN=localhost"' + + @defer.inlineCallbacks + def test_tls_logging(self): + download_handler = self.download_handler_cls(Settings({ + 'DOWNLOADER_CLIENT_TLS_VERBOSE_LOGGING': True, + })) + try: + with LogCapture() as log_capture: + request = Request(self.getURL('file')) + d = download_handler.download_request(request, Spider('foo')) + d.addCallback(lambda r: r.body) + d.addCallback(self.assertEqual, b"0123456789") + yield d + log_capture.check_present(('scrapy.core.downloader.tls', 'DEBUG', self.tls_log_message)) + finally: + yield download_handler.close() + class Https11WrongHostnameTestCase(Http11TestCase): scheme = 'https' @@ -523,6 +542,7 @@ def setUp(self): super(Https11InvalidDNSId, self).setUp() self.host = '127.0.0.1' + class Https11InvalidDNSPattern(Https11TestCase): """Connect to HTTPS hosts where the certificate are issued to an ip instead of a domain.""" @@ -534,6 +554,7 @@ def setUp(self): from service_identity.exceptions import CertificateError except ImportError: raise unittest.SkipTest("cryptography lib is too old") + self.tls_log_message = 'SSL connection certificate: issuer "/C=IE/O=Scrapy/CN=127.0.0.1", subject "/C=IE/O=Scrapy/CN=127.0.0.1"' super(Https11InvalidDNSPattern, self).setUp() From 42743fd9dd9d7116848fd3ad6b657453dd0b117d Mon Sep 17 00:00:00 2001 From: Andrey Rakhmatullin Date: Thu, 18 Jul 2019 20:49:25 +0500 Subject: [PATCH 11/19] Move tls_verbose_logging extraction from __init__ to from_settings. --- docs/topics/settings.rst | 4 ++-- scrapy/core/downloader/contextfactory.py | 13 +++++++------ scrapy/core/downloader/handlers/http11.py | 2 +- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/docs/topics/settings.rst b/docs/topics/settings.rst index 53c62467967..8705a524909 100644 --- a/docs/topics/settings.rst +++ b/docs/topics/settings.rst @@ -440,8 +440,8 @@ or even enable client-side authentication (and various other things). If you do use a custom ContextFactory, make sure its ``__init__`` method accepts a ``method`` parameter (this is the ``OpenSSL.SSL`` method mapping -:setting:`DOWNLOADER_CLIENT_TLS_METHOD`) and a ``settings`` parameter (this is -the Scrapy :class:`~scrapy.settings.Settings` object). +:setting:`DOWNLOADER_CLIENT_TLS_METHOD`) and a ``tls_verbose_logging`` +parameter (``bool``). .. setting:: DOWNLOADER_CLIENT_TLS_METHOD diff --git a/scrapy/core/downloader/contextfactory.py b/scrapy/core/downloader/contextfactory.py index 80c784f5af9..d5d238b9ce4 100644 --- a/scrapy/core/downloader/contextfactory.py +++ b/scrapy/core/downloader/contextfactory.py @@ -29,17 +29,18 @@ class ScrapyClientContextFactory(BrowserLikePolicyForHTTPS): understand the SSLv3, TLSv1, TLSv1.1 and TLSv1.2 protocols.' """ - def __init__(self, method=SSL.SSLv23_METHOD, settings=None, *args, **kwargs): + def __init__(self, method=SSL.SSLv23_METHOD, tls_verbose_logging=False, *args, **kwargs): super(ScrapyClientContextFactory, self).__init__(*args, **kwargs) self._ssl_method = method - if settings: - self.tls_verbose_logging = settings['DOWNLOADER_CLIENT_TLS_VERBOSE_LOGGING'] - else: - self.tls_verbose_logging = False + self.tls_verbose_logging = tls_verbose_logging @classmethod def from_settings(cls, settings, method=SSL.SSLv23_METHOD, *args, **kwargs): - return cls(method=method, settings=settings, *args, **kwargs) + if settings: + tls_verbose_logging = settings.getbool('DOWNLOADER_CLIENT_TLS_VERBOSE_LOGGING') + else: + tls_verbose_logging = False + return cls(method=method, tls_verbose_logging=tls_verbose_logging, *args, **kwargs) def getCertificateOptions(self): # setting verify=True will require you to provide CAs diff --git a/scrapy/core/downloader/handlers/http11.py b/scrapy/core/downloader/handlers/http11.py index 9b0c7977d0a..deb0f9d21f1 100644 --- a/scrapy/core/downloader/handlers/http11.py +++ b/scrapy/core/downloader/handlers/http11.py @@ -51,7 +51,7 @@ def __init__(self, settings): self._contextFactory = create_instance(self._contextFactoryClass, settings=settings, crawler=None) msg = """ '%s' does not accept `method` argument (type OpenSSL.SSL method,\ - e.g. OpenSSL.SSL.SSLv23_METHOD) and/or `settings` argument.\ + e.g. OpenSSL.SSL.SSLv23_METHOD) and/or `tls_verbose_logging` argument.\ Please upgrade your context factory class to handle them or ignore them.""" % ( settings['DOWNLOADER_CLIENTCONTEXTFACTORY'],) warnings.warn(msg) From 95dd2df7b5dc6836a784a2b373009b17ca2eb475 Mon Sep 17 00:00:00 2001 From: Andrey Rakhmatullin Date: Thu, 18 Jul 2019 20:51:26 +0500 Subject: [PATCH 12/19] Drop an unused import. --- scrapy/core/downloader/contextfactory.py | 1 - 1 file changed, 1 deletion(-) diff --git a/scrapy/core/downloader/contextfactory.py b/scrapy/core/downloader/contextfactory.py index d5d238b9ce4..188d9f91731 100644 --- a/scrapy/core/downloader/contextfactory.py +++ b/scrapy/core/downloader/contextfactory.py @@ -2,7 +2,6 @@ from twisted.internet.ssl import ClientContextFactory from scrapy import twisted_version -from scrapy.utils.misc import create_instance if twisted_version >= (14, 0, 0): From c6453800cd612297aa635636477c68a109a4a542 Mon Sep 17 00:00:00 2001 From: Andrey Rakhmatullin Date: Thu, 18 Jul 2019 22:17:39 +0500 Subject: [PATCH 13/19] Remove an unneeded if. --- scrapy/core/downloader/contextfactory.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/scrapy/core/downloader/contextfactory.py b/scrapy/core/downloader/contextfactory.py index 188d9f91731..5ac20c0bbb2 100644 --- a/scrapy/core/downloader/contextfactory.py +++ b/scrapy/core/downloader/contextfactory.py @@ -35,10 +35,7 @@ def __init__(self, method=SSL.SSLv23_METHOD, tls_verbose_logging=False, *args, * @classmethod def from_settings(cls, settings, method=SSL.SSLv23_METHOD, *args, **kwargs): - if settings: - tls_verbose_logging = settings.getbool('DOWNLOADER_CLIENT_TLS_VERBOSE_LOGGING') - else: - tls_verbose_logging = False + tls_verbose_logging = settings.getbool('DOWNLOADER_CLIENT_TLS_VERBOSE_LOGGING') return cls(method=method, tls_verbose_logging=tls_verbose_logging, *args, **kwargs) def getCertificateOptions(self): From b8a43011e75da4353b0d5ef314c96cb1276f12f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adri=C3=A1n=20Chaves?= Date: Thu, 18 Jul 2019 18:47:29 +0200 Subject: [PATCH 14/19] Cover Scrapy 1.7.1 in the release notes --- docs/news.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/news.rst b/docs/news.rst index 921089ccd96..a0f0c5697a9 100644 --- a/docs/news.rst +++ b/docs/news.rst @@ -6,6 +6,11 @@ Release notes .. note:: Scrapy 1.x will be the last series supporting Python 2. Scrapy 2.0, planned for Q4 2019 or Q1 2020, will support **Python 3 only**. +Scrapy 1.7.1 (2019-07-18) +------------------------- + +Re-packaging of Scrapy 1.7.0, which was missing some changes in PyPI. + .. _release-1.7.0: Scrapy 1.7.0 (2019-07-18) From 43d5b5a524ff2cce6fd4620f8e2460489da39f42 Mon Sep 17 00:00:00 2001 From: Kristobal Junta Date: Mon, 22 Jul 2019 10:19:08 +0300 Subject: [PATCH 15/19] fix default RETRY_HTTP_CODES value in docs --- docs/topics/downloader-middleware.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/topics/downloader-middleware.rst b/docs/topics/downloader-middleware.rst index 38a4fdb2593..a3780a177cc 100644 --- a/docs/topics/downloader-middleware.rst +++ b/docs/topics/downloader-middleware.rst @@ -963,7 +963,7 @@ precedence over the :setting:`RETRY_TIMES` setting. RETRY_HTTP_CODES ^^^^^^^^^^^^^^^^ -Default: ``[500, 502, 503, 504, 522, 524, 408]`` +Default: ``[500, 502, 503, 504, 522, 524, 408, 429]`` Which HTTP response codes to retry. Other errors (DNS lookup issues, connections lost, etc) are always retried. From 7e622af4e5b0f49c88101ad370941b33e4833e1e Mon Sep 17 00:00:00 2001 From: Eugenio Lacuesta Date: Mon, 22 Jul 2019 14:53:17 -0300 Subject: [PATCH 16/19] Fix ConfigParser import in py2 --- scrapy/utils/conf.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/scrapy/utils/conf.py b/scrapy/utils/conf.py index 26d66eaf893..fb7ca3310d6 100644 --- a/scrapy/utils/conf.py +++ b/scrapy/utils/conf.py @@ -1,10 +1,13 @@ import os import sys import numbers -import configparser from operator import itemgetter import six +if six.PY2: + from ConfigParser import SafeConfigParser as ConfigParser +else: + from configparser import ConfigParser from scrapy.settings import BaseSettings from scrapy.utils.deprecate import update_classpath @@ -94,7 +97,7 @@ def init_env(project='default', set_syspath=True): def get_config(use_closest=True): """Get Scrapy config file as a ConfigParser""" sources = get_sources(use_closest) - cfg = configparser.ConfigParser() + cfg = ConfigParser() cfg.read(sources) return cfg From 7843101f9abad302e6c9c997f9d2a7adff98380b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adri=C3=A1n=20Chaves?= Date: Tue, 23 Jul 2019 12:04:26 +0200 Subject: [PATCH 17/19] Cover Scrapy 1.7.2 in the release notes --- docs/news.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/news.rst b/docs/news.rst index a0f0c5697a9..d79844ed25f 100644 --- a/docs/news.rst +++ b/docs/news.rst @@ -6,6 +6,12 @@ Release notes .. note:: Scrapy 1.x will be the last series supporting Python 2. Scrapy 2.0, planned for Q4 2019 or Q1 2020, will support **Python 3 only**. +Scrapy 1.7.2 (2019-07-23) +------------------------- + +Fix Python 2 support (:issue:`3889`, :issue:`3893`, :issue:`3896`). + + Scrapy 1.7.1 (2019-07-18) ------------------------- From 7551689c75a1f2b4dbed72184f1dabab2f6c3c4a Mon Sep 17 00:00:00 2001 From: Lucy Wang Date: Fri, 26 Jul 2019 09:07:29 +0800 Subject: [PATCH 18/19] s3 file store should accept all supported headers --- scrapy/pipelines/files.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/scrapy/pipelines/files.py b/scrapy/pipelines/files.py index 2145e6d2b5e..ea06d2ae87e 100644 --- a/scrapy/pipelines/files.py +++ b/scrapy/pipelines/files.py @@ -189,6 +189,19 @@ def _headers_to_botocore_kwargs(self, headers): 'X-Amz-Grant-Read': 'GrantRead', 'X-Amz-Grant-Read-ACP': 'GrantReadACP', 'X-Amz-Grant-Write-ACP': 'GrantWriteACP', + 'X-Amz-Object-Lock-Legal-Hold': 'ObjectLockLegalHoldStatus', + 'X-Amz-Object-Lock-Mode': 'ObjectLockMode', + 'X-Amz-Object-Lock-Retain-Until-Date': 'ObjectLockRetainUntilDate', + 'X-Amz-Request-Payer': 'RequestPayer', + 'X-Amz-Server-Side-Encryption': 'ServerSideEncryption', + 'X-Amz-Server-Side-Encryption-Aws-Kms-Key-Id': 'SSEKMSKeyId', + 'X-Amz-Server-Side-Encryption-Context': 'SSEKMSEncryptionContext', + 'X-Amz-Server-Side-Encryption-Customer-Algorithm': 'SSECustomerAlgorithm', + 'X-Amz-Server-Side-Encryption-Customer-Key': 'SSECustomerKey', + 'X-Amz-Server-Side-Encryption-Customer-Key-Md5': 'SSECustomerKeyMD5', + 'X-Amz-Storage-Class': 'StorageClass', + 'X-Amz-Tagging': 'Tagging', + 'X-Amz-Website-Redirect-Location': 'WebsiteRedirectLocation', }) extra = {} for key, value in six.iteritems(headers): From dffd163b50e98c1272b2ded33645dec4cb7591ab Mon Sep 17 00:00:00 2001 From: Renne Rocha Date: Mon, 29 Jul 2019 19:07:34 -0300 Subject: [PATCH 19/19] Added constrain on lxml version based on Python version --- requirements-py3.txt | 3 ++- setup.py | 3 ++- tests/constraints.txt | 3 +-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/requirements-py3.txt b/requirements-py3.txt index 5a5d4c95af4..478ed0010dd 100644 --- a/requirements-py3.txt +++ b/requirements-py3.txt @@ -1,5 +1,6 @@ Twisted>=17.9.0 -lxml>=3.2.4 +lxml;python_version!="3.4" +lxml<=4.3.5;python_version=="3.4" pyOpenSSL>=0.13.1 cssselect>=0.9 queuelib>=1.1.1 diff --git a/setup.py b/setup.py index 4dc6d18c165..ee0aaabf0aa 100644 --- a/setup.py +++ b/setup.py @@ -69,7 +69,8 @@ def has_environment_marker_platform_impl_support(): 'Twisted>=13.1.0,<=19.2.0;python_version=="3.4"', 'w3lib>=1.17.0', 'queuelib', - 'lxml', + 'lxml;python_version!="3.4"', + 'lxml<=4.3.5;python_version=="3.4"', 'pyOpenSSL', 'cssselect>=0.9', 'six>=1.5.2', diff --git a/tests/constraints.txt b/tests/constraints.txt index e59e68b3f20..5655ac2d374 100644 --- a/tests/constraints.txt +++ b/tests/constraints.txt @@ -1,2 +1 @@ -Twisted!=18.4.0 -lxml!=4.2.2 \ No newline at end of file +Twisted!=18.4.0 \ No newline at end of file