Update request urllib3 (#464)
* requests: updated to version 2.27.1 * urllib3: updated to version 1.26.18
This commit is contained in:
@@ -9,14 +9,14 @@
|
||||
Requests HTTP Library
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Requests is an HTTP library, written in Python, for human beings. Basic GET
|
||||
usage:
|
||||
Requests is an HTTP library, written in Python, for human beings.
|
||||
Basic GET usage:
|
||||
|
||||
>>> import requests
|
||||
>>> r = requests.get('https://www.python.org')
|
||||
>>> r.status_code
|
||||
200
|
||||
>>> 'Python is a programming language' in r.content
|
||||
>>> b'Python is a programming language' in r.content
|
||||
True
|
||||
|
||||
... or POST:
|
||||
@@ -27,26 +27,34 @@ usage:
|
||||
{
|
||||
...
|
||||
"form": {
|
||||
"key2": "value2",
|
||||
"key1": "value1"
|
||||
"key1": "value1",
|
||||
"key2": "value2"
|
||||
},
|
||||
...
|
||||
}
|
||||
|
||||
The other HTTP methods are supported - see `requests.api`. Full documentation
|
||||
is at <http://python-requests.org>.
|
||||
is at <https://requests.readthedocs.io>.
|
||||
|
||||
:copyright: (c) 2017 by Kenneth Reitz.
|
||||
:license: Apache 2.0, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
import urllib3
|
||||
import chardet
|
||||
import warnings
|
||||
from .exceptions import RequestsDependencyWarning
|
||||
|
||||
try:
|
||||
from charset_normalizer import __version__ as charset_normalizer_version
|
||||
except ImportError:
|
||||
charset_normalizer_version = None
|
||||
|
||||
def check_compatibility(urllib3_version, chardet_version):
|
||||
try:
|
||||
from chardet import __version__ as chardet_version
|
||||
except ImportError:
|
||||
chardet_version = None
|
||||
|
||||
def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):
|
||||
urllib3_version = urllib3_version.split('.')
|
||||
assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git.
|
||||
|
||||
@@ -57,19 +65,24 @@ def check_compatibility(urllib3_version, chardet_version):
|
||||
# Check urllib3 for compatibility.
|
||||
major, minor, patch = urllib3_version # noqa: F811
|
||||
major, minor, patch = int(major), int(minor), int(patch)
|
||||
# urllib3 >= 1.21.1, <= 1.25
|
||||
# urllib3 >= 1.21.1, <= 1.26
|
||||
assert major == 1
|
||||
assert minor >= 21
|
||||
assert minor <= 25
|
||||
|
||||
# Check chardet for compatibility.
|
||||
major, minor, patch = chardet_version.split('.')[:3]
|
||||
major, minor, patch = int(major), int(minor), int(patch)
|
||||
# chardet >= 3.0.2, < 3.1.0
|
||||
assert major == 3
|
||||
assert minor < 1
|
||||
assert patch >= 2
|
||||
assert minor <= 26
|
||||
|
||||
# Check charset_normalizer for compatibility.
|
||||
if chardet_version:
|
||||
major, minor, patch = chardet_version.split('.')[:3]
|
||||
major, minor, patch = int(major), int(minor), int(patch)
|
||||
# chardet_version >= 3.0.2, < 5.0.0
|
||||
assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0)
|
||||
elif charset_normalizer_version:
|
||||
major, minor, patch = charset_normalizer_version.split('.')[:3]
|
||||
major, minor, patch = int(major), int(minor), int(patch)
|
||||
# charset_normalizer >= 2.0.0 < 3.0.0
|
||||
assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0)
|
||||
else:
|
||||
raise Exception("You need either charset_normalizer or chardet installed")
|
||||
|
||||
def _check_cryptography(cryptography_version):
|
||||
# cryptography < 1.3.4
|
||||
@@ -84,20 +97,28 @@ def _check_cryptography(cryptography_version):
|
||||
|
||||
# Check imported dependencies for compatibility.
|
||||
try:
|
||||
check_compatibility(urllib3.__version__, chardet.__version__)
|
||||
check_compatibility(urllib3.__version__, chardet_version, charset_normalizer_version)
|
||||
except (AssertionError, ValueError):
|
||||
warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported "
|
||||
"version!".format(urllib3.__version__, chardet.__version__),
|
||||
warnings.warn("urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
|
||||
"version!".format(urllib3.__version__, chardet_version, charset_normalizer_version),
|
||||
RequestsDependencyWarning)
|
||||
|
||||
# Attempt to enable urllib3's SNI support, if possible
|
||||
# Attempt to enable urllib3's fallback for SNI support
|
||||
# if the standard library doesn't support SNI or the
|
||||
# 'ssl' library isn't available.
|
||||
try:
|
||||
from urllib3.contrib import pyopenssl
|
||||
pyopenssl.inject_into_urllib3()
|
||||
try:
|
||||
import ssl
|
||||
except ImportError:
|
||||
ssl = None
|
||||
|
||||
# Check cryptography version
|
||||
from cryptography import __version__ as cryptography_version
|
||||
_check_cryptography(cryptography_version)
|
||||
if not getattr(ssl, "HAS_SNI", False):
|
||||
from urllib3.contrib import pyopenssl
|
||||
pyopenssl.inject_into_urllib3()
|
||||
|
||||
# Check cryptography version
|
||||
from cryptography import __version__ as cryptography_version
|
||||
_check_cryptography(cryptography_version)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
@@ -118,7 +139,7 @@ from .status_codes import codes
|
||||
from .exceptions import (
|
||||
RequestException, Timeout, URLRequired,
|
||||
TooManyRedirects, HTTPError, ConnectionError,
|
||||
FileModeWarning, ConnectTimeout, ReadTimeout
|
||||
FileModeWarning, ConnectTimeout, ReadTimeout, JSONDecodeError
|
||||
)
|
||||
|
||||
# Set default logging handler to avoid "No handler found" warnings.
|
||||
|
||||
@@ -4,11 +4,11 @@
|
||||
|
||||
__title__ = 'requests'
|
||||
__description__ = 'Python HTTP for Humans.'
|
||||
__url__ = 'http://python-requests.org'
|
||||
__version__ = '2.22.0'
|
||||
__build__ = 0x022200
|
||||
__url__ = 'https://requests.readthedocs.io'
|
||||
__version__ = '2.27.1'
|
||||
__build__ = 0x022701
|
||||
__author__ = 'Kenneth Reitz'
|
||||
__author_email__ = 'me@kennethreitz.org'
|
||||
__license__ = 'Apache 2.0'
|
||||
__copyright__ = 'Copyright 2019 Kenneth Reitz'
|
||||
__copyright__ = 'Copyright 2022 Kenneth Reitz'
|
||||
__cake__ = u'\u2728 \U0001f370 \u2728'
|
||||
|
||||
@@ -19,6 +19,7 @@ from urllib3.util.retry import Retry
|
||||
from urllib3.exceptions import ClosedPoolError
|
||||
from urllib3.exceptions import ConnectTimeoutError
|
||||
from urllib3.exceptions import HTTPError as _HTTPError
|
||||
from urllib3.exceptions import InvalidHeader as _InvalidHeader
|
||||
from urllib3.exceptions import MaxRetryError
|
||||
from urllib3.exceptions import NewConnectionError
|
||||
from urllib3.exceptions import ProxyError as _ProxyError
|
||||
@@ -37,7 +38,7 @@ from .structures import CaseInsensitiveDict
|
||||
from .cookies import extract_cookies_to_jar
|
||||
from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
|
||||
ProxyError, RetryError, InvalidSchema, InvalidProxyURL,
|
||||
InvalidURL)
|
||||
InvalidURL, InvalidHeader)
|
||||
from .auth import _basic_auth_str
|
||||
|
||||
try:
|
||||
@@ -457,9 +458,11 @@ class HTTPAdapter(BaseAdapter):
|
||||
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
|
||||
|
||||
try:
|
||||
skip_host = 'Host' in request.headers
|
||||
low_conn.putrequest(request.method,
|
||||
url,
|
||||
skip_accept_encoding=True)
|
||||
skip_accept_encoding=True,
|
||||
skip_host=skip_host)
|
||||
|
||||
for header, value in request.headers.items():
|
||||
low_conn.putheader(header, value)
|
||||
@@ -527,6 +530,8 @@ class HTTPAdapter(BaseAdapter):
|
||||
raise SSLError(e, request=request)
|
||||
elif isinstance(e, ReadTimeoutError):
|
||||
raise ReadTimeout(e, request=request)
|
||||
elif isinstance(e, _InvalidHeader):
|
||||
raise InvalidHeader(e, request=request)
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ from . import sessions
|
||||
def request(method, url, **kwargs):
|
||||
"""Constructs and sends a :class:`Request <Request>`.
|
||||
|
||||
:param method: method for the new :class:`Request` object.
|
||||
:param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``.
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param params: (optional) Dictionary, list of tuples or bytes to send
|
||||
in the query string for the :class:`Request`.
|
||||
@@ -50,6 +50,7 @@ def request(method, url, **kwargs):
|
||||
|
||||
>>> import requests
|
||||
>>> req = requests.request('GET', 'https://httpbin.org/get')
|
||||
>>> req
|
||||
<Response [200]>
|
||||
"""
|
||||
|
||||
@@ -71,7 +72,6 @@ def get(url, params=None, **kwargs):
|
||||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return request('get', url, params=params, **kwargs)
|
||||
|
||||
|
||||
@@ -84,7 +84,6 @@ def options(url, **kwargs):
|
||||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return request('options', url, **kwargs)
|
||||
|
||||
|
||||
@@ -92,7 +91,9 @@ def head(url, **kwargs):
|
||||
r"""Sends a HEAD request.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||||
:param \*\*kwargs: Optional arguments that ``request`` takes. If
|
||||
`allow_redirects` is not provided, it will be set to `False` (as
|
||||
opposed to the default :meth:`request` behavior).
|
||||
:return: :class:`Response <Response>` object
|
||||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
@@ -50,7 +50,7 @@ def _basic_auth_str(username, password):
|
||||
"Non-string passwords will no longer be supported in Requests "
|
||||
"3.0.0. Please convert the object you've passed in ({!r}) to "
|
||||
"a string or bytes object in the near future to avoid "
|
||||
"problems.".format(password),
|
||||
"problems.".format(type(password)),
|
||||
category=DeprecationWarning,
|
||||
)
|
||||
password = str(password)
|
||||
@@ -239,7 +239,7 @@ class HTTPDigestAuth(AuthBase):
|
||||
"""
|
||||
|
||||
# If response is not 4xx, do not auth
|
||||
# See https://github.com/requests/requests/issues/3772
|
||||
# See https://github.com/psf/requests/issues/3772
|
||||
if not 400 <= r.status_code < 500:
|
||||
self._thread_local.num_401_calls = 1
|
||||
return r
|
||||
|
||||
@@ -8,7 +8,10 @@ This module handles import compatibility issues between Python 2 and
|
||||
Python 3.
|
||||
"""
|
||||
|
||||
import chardet
|
||||
try:
|
||||
import chardet
|
||||
except ImportError:
|
||||
import charset_normalizer as chardet
|
||||
|
||||
import sys
|
||||
|
||||
@@ -25,8 +28,10 @@ is_py2 = (_ver[0] == 2)
|
||||
#: Python 3.x?
|
||||
is_py3 = (_ver[0] == 3)
|
||||
|
||||
has_simplejson = False
|
||||
try:
|
||||
import simplejson as json
|
||||
has_simplejson = True
|
||||
except ImportError:
|
||||
import json
|
||||
|
||||
@@ -43,15 +48,16 @@ if is_py2:
|
||||
import cookielib
|
||||
from Cookie import Morsel
|
||||
from StringIO import StringIO
|
||||
# Keep OrderedDict for backwards compatibility.
|
||||
from collections import Callable, Mapping, MutableMapping, OrderedDict
|
||||
|
||||
|
||||
builtin_str = str
|
||||
bytes = str
|
||||
str = unicode
|
||||
basestring = basestring
|
||||
numeric_types = (int, long, float)
|
||||
integer_types = (int, long)
|
||||
JSONDecodeError = ValueError
|
||||
|
||||
elif is_py3:
|
||||
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
|
||||
@@ -59,8 +65,13 @@ elif is_py3:
|
||||
from http import cookiejar as cookielib
|
||||
from http.cookies import Morsel
|
||||
from io import StringIO
|
||||
# Keep OrderedDict for backwards compatibility.
|
||||
from collections import OrderedDict
|
||||
from collections.abc import Callable, Mapping, MutableMapping
|
||||
if has_simplejson:
|
||||
from simplejson import JSONDecodeError
|
||||
else:
|
||||
from json import JSONDecodeError
|
||||
|
||||
builtin_str = str
|
||||
str = str
|
||||
|
||||
@@ -8,6 +8,8 @@ This module contains the set of Requests' exceptions.
|
||||
"""
|
||||
from urllib3.exceptions import HTTPError as BaseHTTPError
|
||||
|
||||
from .compat import JSONDecodeError as CompatJSONDecodeError
|
||||
|
||||
|
||||
class RequestException(IOError):
|
||||
"""There was an ambiguous exception that occurred while handling your
|
||||
@@ -25,6 +27,14 @@ class RequestException(IOError):
|
||||
super(RequestException, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class InvalidJSONError(RequestException):
|
||||
"""A JSON error occurred."""
|
||||
|
||||
|
||||
class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
|
||||
"""Couldn't decode the text into json"""
|
||||
|
||||
|
||||
class HTTPError(RequestException):
|
||||
"""An HTTP error occurred."""
|
||||
|
||||
@@ -70,11 +80,11 @@ class TooManyRedirects(RequestException):
|
||||
|
||||
|
||||
class MissingSchema(RequestException, ValueError):
|
||||
"""The URL schema (e.g. http or https) is missing."""
|
||||
"""The URL scheme (e.g. http or https) is missing."""
|
||||
|
||||
|
||||
class InvalidSchema(RequestException, ValueError):
|
||||
"""See defaults.py for valid schemas."""
|
||||
"""The URL scheme provided is either invalid or unsupported."""
|
||||
|
||||
|
||||
class InvalidURL(RequestException, ValueError):
|
||||
@@ -94,11 +104,11 @@ class ChunkedEncodingError(RequestException):
|
||||
|
||||
|
||||
class ContentDecodingError(RequestException, BaseHTTPError):
|
||||
"""Failed to decode response content"""
|
||||
"""Failed to decode response content."""
|
||||
|
||||
|
||||
class StreamConsumedError(RequestException, TypeError):
|
||||
"""The content for this response was already consumed"""
|
||||
"""The content for this response was already consumed."""
|
||||
|
||||
|
||||
class RetryError(RequestException):
|
||||
@@ -106,21 +116,18 @@ class RetryError(RequestException):
|
||||
|
||||
|
||||
class UnrewindableBodyError(RequestException):
|
||||
"""Requests encountered an error when trying to rewind a body"""
|
||||
"""Requests encountered an error when trying to rewind a body."""
|
||||
|
||||
# Warnings
|
||||
|
||||
|
||||
class RequestsWarning(Warning):
|
||||
"""Base warning for Requests."""
|
||||
pass
|
||||
|
||||
|
||||
class FileModeWarning(RequestsWarning, DeprecationWarning):
|
||||
"""A file was opened in text mode, but Requests determined its binary length."""
|
||||
pass
|
||||
|
||||
|
||||
class RequestsDependencyWarning(RequestsWarning):
|
||||
"""An imported dependency doesn't match the expected version range."""
|
||||
pass
|
||||
|
||||
@@ -8,10 +8,19 @@ import ssl
|
||||
|
||||
import idna
|
||||
import urllib3
|
||||
import chardet
|
||||
|
||||
from . import __version__ as requests_version
|
||||
|
||||
try:
|
||||
import charset_normalizer
|
||||
except ImportError:
|
||||
charset_normalizer = None
|
||||
|
||||
try:
|
||||
import chardet
|
||||
except ImportError:
|
||||
chardet = None
|
||||
|
||||
try:
|
||||
from urllib3.contrib import pyopenssl
|
||||
except ImportError:
|
||||
@@ -71,7 +80,12 @@ def info():
|
||||
|
||||
implementation_info = _implementation()
|
||||
urllib3_info = {'version': urllib3.__version__}
|
||||
chardet_info = {'version': chardet.__version__}
|
||||
charset_normalizer_info = {'version': None}
|
||||
chardet_info = {'version': None}
|
||||
if charset_normalizer:
|
||||
charset_normalizer_info = {'version': charset_normalizer.__version__}
|
||||
if chardet:
|
||||
chardet_info = {'version': chardet.__version__}
|
||||
|
||||
pyopenssl_info = {
|
||||
'version': None,
|
||||
@@ -99,9 +113,11 @@ def info():
|
||||
'implementation': implementation_info,
|
||||
'system_ssl': system_ssl_info,
|
||||
'using_pyopenssl': pyopenssl is not None,
|
||||
'using_charset_normalizer': chardet is None,
|
||||
'pyOpenSSL': pyopenssl_info,
|
||||
'urllib3': urllib3_info,
|
||||
'chardet': chardet_info,
|
||||
'charset_normalizer': charset_normalizer_info,
|
||||
'cryptography': cryptography_info,
|
||||
'idna': idna_info,
|
||||
'requests': {
|
||||
|
||||
@@ -12,7 +12,7 @@ import sys
|
||||
|
||||
# Import encoding now, to avoid implicit import later.
|
||||
# Implicit import within threads may cause LookupError when standard library is in a ZIP,
|
||||
# such as in Embedded Python. See https://github.com/requests/requests/issues/3578.
|
||||
# such as in Embedded Python. See https://github.com/psf/requests/issues/3578.
|
||||
import encodings.idna
|
||||
|
||||
from urllib3.fields import RequestField
|
||||
@@ -29,7 +29,9 @@ from .auth import HTTPBasicAuth
|
||||
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
|
||||
from .exceptions import (
|
||||
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
|
||||
ContentDecodingError, ConnectionError, StreamConsumedError)
|
||||
ContentDecodingError, ConnectionError, StreamConsumedError,
|
||||
InvalidJSONError)
|
||||
from .exceptions import JSONDecodeError as RequestsJSONDecodeError
|
||||
from ._internal_utils import to_native_string, unicode_is_ascii
|
||||
from .utils import (
|
||||
guess_filename, get_auth_from_url, requote_uri,
|
||||
@@ -38,7 +40,7 @@ from .utils import (
|
||||
from .compat import (
|
||||
Callable, Mapping,
|
||||
cookielib, urlunparse, urlsplit, urlencode, str, bytes,
|
||||
is_py2, chardet, builtin_str, basestring)
|
||||
is_py2, chardet, builtin_str, basestring, JSONDecodeError)
|
||||
from .compat import json as complexjson
|
||||
from .status_codes import codes
|
||||
|
||||
@@ -273,13 +275,16 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
|
||||
containing the exact bytes that will be sent to the server.
|
||||
|
||||
Generated from either a :class:`Request <Request>` object or manually.
|
||||
Instances are generated from a :class:`Request <Request>` object, and
|
||||
should not be instantiated manually; doing so may produce undesirable
|
||||
effects.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> import requests
|
||||
>>> req = requests.Request('GET', 'https://httpbin.org/get')
|
||||
>>> r = req.prepare()
|
||||
>>> r
|
||||
<PreparedRequest [GET]>
|
||||
|
||||
>>> s = requests.Session()
|
||||
@@ -358,7 +363,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||
#: We're unable to blindly call unicode/str functions
|
||||
#: as this will include the bytestring indicator (b'')
|
||||
#: on python 3.x.
|
||||
#: https://github.com/requests/requests/pull/2238
|
||||
#: https://github.com/psf/requests/pull/2238
|
||||
if isinstance(url, bytes):
|
||||
url = url.decode('utf8')
|
||||
else:
|
||||
@@ -381,7 +386,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||
raise InvalidURL(*e.args)
|
||||
|
||||
if not scheme:
|
||||
error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
|
||||
error = ("Invalid URL {0!r}: No scheme supplied. Perhaps you meant http://{0}?")
|
||||
error = error.format(to_native_string(url, 'utf8'))
|
||||
|
||||
raise MissingSchema(error)
|
||||
@@ -398,7 +403,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||
host = self._get_idna_encoded_host(host)
|
||||
except UnicodeError:
|
||||
raise InvalidURL('URL has an invalid label.')
|
||||
elif host.startswith(u'*'):
|
||||
elif host.startswith((u'*', u'.')):
|
||||
raise InvalidURL('URL has an invalid label.')
|
||||
|
||||
# Carefully reconstruct the network location
|
||||
@@ -463,7 +468,12 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||
# urllib3 requires a bytes-like body. Python 2's json.dumps
|
||||
# provides this natively, but Python 3 gives a Unicode string.
|
||||
content_type = 'application/json'
|
||||
body = complexjson.dumps(json)
|
||||
|
||||
try:
|
||||
body = complexjson.dumps(json, allow_nan=False)
|
||||
except ValueError as ve:
|
||||
raise InvalidJSONError(ve, request=self)
|
||||
|
||||
if not isinstance(body, bytes):
|
||||
body = body.encode('utf-8')
|
||||
|
||||
@@ -472,12 +482,12 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||
not isinstance(data, (basestring, list, tuple, Mapping))
|
||||
])
|
||||
|
||||
try:
|
||||
length = super_len(data)
|
||||
except (TypeError, AttributeError, UnsupportedOperation):
|
||||
length = None
|
||||
|
||||
if is_stream:
|
||||
try:
|
||||
length = super_len(data)
|
||||
except (TypeError, AttributeError, UnsupportedOperation):
|
||||
length = None
|
||||
|
||||
body = data
|
||||
|
||||
if getattr(body, 'tell', None) is not None:
|
||||
@@ -608,7 +618,7 @@ class Response(object):
|
||||
|
||||
#: File-like object representation of response (for advanced usage).
|
||||
#: Use of ``raw`` requires that ``stream=True`` be set on the request.
|
||||
# This requirement does not apply for use internally to Requests.
|
||||
#: This requirement does not apply for use internally to Requests.
|
||||
self.raw = None
|
||||
|
||||
#: Final URL location of Response.
|
||||
@@ -723,7 +733,7 @@ class Response(object):
|
||||
|
||||
@property
|
||||
def apparent_encoding(self):
|
||||
"""The apparent encoding, provided by the chardet library."""
|
||||
"""The apparent encoding, provided by the charset_normalizer or chardet libraries."""
|
||||
return chardet.detect(self.content)['encoding']
|
||||
|
||||
def iter_content(self, chunk_size=1, decode_unicode=False):
|
||||
@@ -837,7 +847,7 @@ class Response(object):
|
||||
"""Content of the response, in unicode.
|
||||
|
||||
If Response.encoding is None, encoding will be guessed using
|
||||
``chardet``.
|
||||
``charset_normalizer`` or ``chardet``.
|
||||
|
||||
The encoding of the response content is determined based solely on HTTP
|
||||
headers, following RFC 2616 to the letter. If you can take advantage of
|
||||
@@ -874,13 +884,14 @@ class Response(object):
|
||||
r"""Returns the json-encoded content of a response, if any.
|
||||
|
||||
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
|
||||
:raises ValueError: If the response body does not contain valid json.
|
||||
:raises requests.exceptions.JSONDecodeError: If the response body does not
|
||||
contain valid json.
|
||||
"""
|
||||
|
||||
if not self.encoding and self.content and len(self.content) > 3:
|
||||
# No encoding set. JSON RFC 4627 section 3 states we should expect
|
||||
# UTF-8, -16 or -32. Detect which one to use; If the detection or
|
||||
# decoding fails, fall back to `self.text` (using chardet to make
|
||||
# decoding fails, fall back to `self.text` (using charset_normalizer to make
|
||||
# a best guess).
|
||||
encoding = guess_json_utf(self.content)
|
||||
if encoding is not None:
|
||||
@@ -894,7 +905,16 @@ class Response(object):
|
||||
# and the server didn't bother to tell us what codec *was*
|
||||
# used.
|
||||
pass
|
||||
return complexjson.loads(self.text, **kwargs)
|
||||
|
||||
try:
|
||||
return complexjson.loads(self.text, **kwargs)
|
||||
except JSONDecodeError as e:
|
||||
# Catch JSON-related errors and raise as requests.JSONDecodeError
|
||||
# This aliases json.JSONDecodeError and simplejson.JSONDecodeError
|
||||
if is_py2: # e is a ValueError
|
||||
raise RequestsJSONDecodeError(e.message)
|
||||
else:
|
||||
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
|
||||
|
||||
@property
|
||||
def links(self):
|
||||
@@ -915,7 +935,7 @@ class Response(object):
|
||||
return l
|
||||
|
||||
def raise_for_status(self):
|
||||
"""Raises stored :class:`HTTPError`, if one occurred."""
|
||||
"""Raises :class:`HTTPError`, if one occurred."""
|
||||
|
||||
http_error_msg = ''
|
||||
if isinstance(self.reason, bytes):
|
||||
|
||||
@@ -1,9 +1,17 @@
|
||||
import sys
|
||||
|
||||
try:
|
||||
import chardet
|
||||
except ImportError:
|
||||
import charset_normalizer as chardet
|
||||
import warnings
|
||||
|
||||
warnings.filterwarnings('ignore', 'Trying to detect', module='charset_normalizer')
|
||||
|
||||
# This code exists for backwards compatibility reasons.
|
||||
# I don't like it either. Just look the other way. :)
|
||||
|
||||
for package in ('urllib3', 'idna', 'chardet'):
|
||||
for package in ('urllib3', 'idna'):
|
||||
locals()[package] = __import__(package)
|
||||
# This traversal is apparently necessary such that the identities are
|
||||
# preserved (requests.packages.urllib3.* is urllib3.*)
|
||||
@@ -11,4 +19,8 @@ for package in ('urllib3', 'idna', 'chardet'):
|
||||
if mod == package or mod.startswith(package + '.'):
|
||||
sys.modules['requests.packages.' + mod] = sys.modules[mod]
|
||||
|
||||
target = chardet.__name__
|
||||
for mod in list(sys.modules):
|
||||
if mod == target or mod.startswith(target + '.'):
|
||||
sys.modules['requests.packages.' + target.replace(target, 'chardet')] = sys.modules[mod]
|
||||
# Kinda cool, though, right?
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.session
|
||||
~~~~~~~~~~~~~~~~
|
||||
requests.sessions
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
This module provides a Session object to manage and persist settings across
|
||||
requests (cookies, auth, proxies).
|
||||
@@ -11,9 +11,10 @@ import os
|
||||
import sys
|
||||
import time
|
||||
from datetime import timedelta
|
||||
from collections import OrderedDict
|
||||
|
||||
from .auth import _basic_auth_str
|
||||
from .compat import cookielib, is_py3, OrderedDict, urljoin, urlparse, Mapping
|
||||
from .compat import cookielib, is_py3, urljoin, urlparse, Mapping
|
||||
from .cookies import (
|
||||
cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
|
||||
from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
|
||||
@@ -28,7 +29,7 @@ from .adapters import HTTPAdapter
|
||||
|
||||
from .utils import (
|
||||
requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
|
||||
get_auth_from_url, rewind_body
|
||||
get_auth_from_url, rewind_body, resolve_proxies
|
||||
)
|
||||
|
||||
from .status_codes import codes
|
||||
@@ -162,7 +163,7 @@ class SessionRedirectMixin(object):
|
||||
resp.raw.read(decode_content=False)
|
||||
|
||||
if len(resp.history) >= self.max_redirects:
|
||||
raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects, response=resp)
|
||||
raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp)
|
||||
|
||||
# Release the connection back into the pool.
|
||||
resp.close()
|
||||
@@ -170,7 +171,7 @@ class SessionRedirectMixin(object):
|
||||
# Handle redirection without scheme (see: RFC 1808 Section 4)
|
||||
if url.startswith('//'):
|
||||
parsed_rurl = urlparse(resp.url)
|
||||
url = '%s:%s' % (to_native_string(parsed_rurl.scheme), url)
|
||||
url = ':'.join([to_native_string(parsed_rurl.scheme), url])
|
||||
|
||||
# Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)
|
||||
parsed = urlparse(url)
|
||||
@@ -192,19 +193,16 @@ class SessionRedirectMixin(object):
|
||||
|
||||
self.rebuild_method(prepared_request, resp)
|
||||
|
||||
# https://github.com/requests/requests/issues/1084
|
||||
# https://github.com/psf/requests/issues/1084
|
||||
if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
|
||||
# https://github.com/requests/requests/issues/3490
|
||||
# https://github.com/psf/requests/issues/3490
|
||||
purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding')
|
||||
for header in purged_headers:
|
||||
prepared_request.headers.pop(header, None)
|
||||
prepared_request.body = None
|
||||
|
||||
headers = prepared_request.headers
|
||||
try:
|
||||
del headers['Cookie']
|
||||
except KeyError:
|
||||
pass
|
||||
headers.pop('Cookie', None)
|
||||
|
||||
# Extract any cookies sent on the response to the cookiejar
|
||||
# in the new request. Because we've mutated our copied prepared
|
||||
@@ -271,8 +269,6 @@ class SessionRedirectMixin(object):
|
||||
if new_auth is not None:
|
||||
prepared_request.prepare_auth(new_auth)
|
||||
|
||||
return
|
||||
|
||||
def rebuild_proxies(self, prepared_request, proxies):
|
||||
"""This method re-evaluates the proxy configuration by considering the
|
||||
environment variables. If we are redirected to a URL covered by
|
||||
@@ -285,21 +281,9 @@ class SessionRedirectMixin(object):
|
||||
|
||||
:rtype: dict
|
||||
"""
|
||||
proxies = proxies if proxies is not None else {}
|
||||
headers = prepared_request.headers
|
||||
url = prepared_request.url
|
||||
scheme = urlparse(url).scheme
|
||||
new_proxies = proxies.copy()
|
||||
no_proxy = proxies.get('no_proxy')
|
||||
|
||||
bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy)
|
||||
if self.trust_env and not bypass_proxy:
|
||||
environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)
|
||||
|
||||
proxy = environ_proxies.get(scheme, environ_proxies.get('all'))
|
||||
|
||||
if proxy:
|
||||
new_proxies.setdefault(scheme, proxy)
|
||||
scheme = urlparse(prepared_request.url).scheme
|
||||
new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env)
|
||||
|
||||
if 'Proxy-Authorization' in headers:
|
||||
del headers['Proxy-Authorization']
|
||||
@@ -352,13 +336,13 @@ class Session(SessionRedirectMixin):
|
||||
Or as a context manager::
|
||||
|
||||
>>> with requests.Session() as s:
|
||||
>>> s.get('https://httpbin.org/get')
|
||||
... s.get('https://httpbin.org/get')
|
||||
<Response [200]>
|
||||
"""
|
||||
|
||||
__attrs__ = [
|
||||
'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',
|
||||
'cert', 'prefetch', 'adapters', 'stream', 'trust_env',
|
||||
'cert', 'adapters', 'stream', 'trust_env',
|
||||
'max_redirects',
|
||||
]
|
||||
|
||||
@@ -390,6 +374,13 @@ class Session(SessionRedirectMixin):
|
||||
self.stream = False
|
||||
|
||||
#: SSL Verification default.
|
||||
#: Defaults to `True`, requiring requests to verify the TLS certificate at the
|
||||
#: remote end.
|
||||
#: If verify is set to `False`, requests will accept any TLS certificate
|
||||
#: presented by the server, and will ignore hostname mismatches and/or
|
||||
#: expired certificates, which will make your application vulnerable to
|
||||
#: man-in-the-middle (MitM) attacks.
|
||||
#: Only set this to `False` for testing.
|
||||
self.verify = True
|
||||
|
||||
#: SSL client certificate default, if String, path to ssl client
|
||||
@@ -498,7 +489,12 @@ class Session(SessionRedirectMixin):
|
||||
content. Defaults to ``False``.
|
||||
:param verify: (optional) Either a boolean, in which case it controls whether we verify
|
||||
the server's TLS certificate, or a string, in which case it must be a path
|
||||
to a CA bundle to use. Defaults to ``True``.
|
||||
to a CA bundle to use. Defaults to ``True``. When set to
|
||||
``False``, requests will accept any TLS certificate presented by
|
||||
the server, and will ignore hostname mismatches and/or expired
|
||||
certificates, which will make your application vulnerable to
|
||||
man-in-the-middle (MitM) attacks. Setting verify to ``False``
|
||||
may be useful during local development or testing.
|
||||
:param cert: (optional) if String, path to ssl client cert file (.pem).
|
||||
If Tuple, ('cert', 'key') pair.
|
||||
:rtype: requests.Response
|
||||
@@ -624,7 +620,10 @@ class Session(SessionRedirectMixin):
|
||||
kwargs.setdefault('stream', self.stream)
|
||||
kwargs.setdefault('verify', self.verify)
|
||||
kwargs.setdefault('cert', self.cert)
|
||||
kwargs.setdefault('proxies', self.proxies)
|
||||
if 'proxies' not in kwargs:
|
||||
kwargs['proxies'] = resolve_proxies(
|
||||
request, self.proxies, self.trust_env
|
||||
)
|
||||
|
||||
# It's possible that users might accidentally send a Request object.
|
||||
# Guard against that specific failure case.
|
||||
@@ -661,11 +660,13 @@ class Session(SessionRedirectMixin):
|
||||
|
||||
extract_cookies_to_jar(self.cookies, request, r.raw)
|
||||
|
||||
# Redirect resolving generator.
|
||||
gen = self.resolve_redirects(r, request, **kwargs)
|
||||
|
||||
# Resolve redirects if allowed.
|
||||
history = [resp for resp in gen] if allow_redirects else []
|
||||
if allow_redirects:
|
||||
# Redirect resolving generator.
|
||||
gen = self.resolve_redirects(r, request, **kwargs)
|
||||
history = [resp for resp in gen]
|
||||
else:
|
||||
history = []
|
||||
|
||||
# Shuffle things around if there's history.
|
||||
if history:
|
||||
@@ -728,7 +729,7 @@ class Session(SessionRedirectMixin):
|
||||
return adapter
|
||||
|
||||
# Nothing matches :-/
|
||||
raise InvalidSchema("No connection adapters were found for '%s'" % url)
|
||||
raise InvalidSchema("No connection adapters were found for {!r}".format(url))
|
||||
|
||||
def close(self):
|
||||
"""Closes all adapters and as such the session"""
|
||||
|
||||
@@ -5,12 +5,15 @@ The ``codes`` object defines a mapping from common names for HTTP statuses
|
||||
to their numerical codes, accessible either as attributes or as dictionary
|
||||
items.
|
||||
|
||||
>>> requests.codes['temporary_redirect']
|
||||
307
|
||||
>>> requests.codes.teapot
|
||||
418
|
||||
>>> requests.codes['\o/']
|
||||
200
|
||||
Example::
|
||||
|
||||
>>> import requests
|
||||
>>> requests.codes['temporary_redirect']
|
||||
307
|
||||
>>> requests.codes.teapot
|
||||
418
|
||||
>>> requests.codes['\o/']
|
||||
200
|
||||
|
||||
Some codes have multiple names, and both upper- and lower-case versions of
|
||||
the names are allowed. For example, ``codes.ok``, ``codes.OK``, and
|
||||
|
||||
@@ -7,7 +7,9 @@ requests.structures
|
||||
Data structures that power Requests.
|
||||
"""
|
||||
|
||||
from .compat import OrderedDict, Mapping, MutableMapping
|
||||
from collections import OrderedDict
|
||||
|
||||
from .compat import Mapping, MutableMapping
|
||||
|
||||
|
||||
class CaseInsensitiveDict(MutableMapping):
|
||||
|
||||
@@ -19,6 +19,9 @@ import sys
|
||||
import tempfile
|
||||
import warnings
|
||||
import zipfile
|
||||
from collections import OrderedDict
|
||||
from urllib3.util import make_headers
|
||||
from urllib3.util import parse_url
|
||||
|
||||
from .__version__ import __version__
|
||||
from . import certs
|
||||
@@ -26,7 +29,7 @@ from . import certs
|
||||
from ._internal_utils import to_native_string
|
||||
from .compat import parse_http_list as _parse_list_header
|
||||
from .compat import (
|
||||
quote, urlparse, bytes, str, OrderedDict, unquote, getproxies,
|
||||
quote, urlparse, bytes, str, unquote, getproxies,
|
||||
proxy_bypass, urlunparse, basestring, integer_types, is_py3,
|
||||
proxy_bypass_environment, getproxies_environment, Mapping)
|
||||
from .cookies import cookiejar_from_dict
|
||||
@@ -40,6 +43,11 @@ DEFAULT_CA_BUNDLE_PATH = certs.where()
|
||||
|
||||
DEFAULT_PORTS = {'http': 80, 'https': 443}
|
||||
|
||||
# Ensure that ', ' is used to preserve previous delimiter behavior.
|
||||
DEFAULT_ACCEPT_ENCODING = ", ".join(
|
||||
re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"])
|
||||
)
|
||||
|
||||
|
||||
if sys.platform == 'win32':
|
||||
# provide a proxy_bypass version on Windows without DNS lookups
|
||||
@@ -117,7 +125,10 @@ def super_len(o):
|
||||
elif hasattr(o, 'fileno'):
|
||||
try:
|
||||
fileno = o.fileno()
|
||||
except io.UnsupportedOperation:
|
||||
except (io.UnsupportedOperation, AttributeError):
|
||||
# AttributeError is a surprising exception, seeing as how we've just checked
|
||||
# that `hasattr(o, 'fileno')`. It happens for objects obtained via
|
||||
# `Tarfile.extractfile()`, per issue 5229.
|
||||
pass
|
||||
else:
|
||||
total_length = os.fstat(fileno).st_size
|
||||
@@ -147,7 +158,7 @@ def super_len(o):
|
||||
current_position = total_length
|
||||
else:
|
||||
if hasattr(o, 'seek') and total_length is None:
|
||||
# StringIO and BytesIO have seek but no useable fileno
|
||||
# StringIO and BytesIO have seek but no usable fileno
|
||||
try:
|
||||
# seek to end of file
|
||||
o.seek(0, 2)
|
||||
@@ -168,18 +179,24 @@ def super_len(o):
|
||||
def get_netrc_auth(url, raise_errors=False):
|
||||
"""Returns the Requests tuple auth for a given url from netrc."""
|
||||
|
||||
netrc_file = os.environ.get('NETRC')
|
||||
if netrc_file is not None:
|
||||
netrc_locations = (netrc_file,)
|
||||
else:
|
||||
netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES)
|
||||
|
||||
try:
|
||||
from netrc import netrc, NetrcParseError
|
||||
|
||||
netrc_path = None
|
||||
|
||||
for f in NETRC_FILES:
|
||||
for f in netrc_locations:
|
||||
try:
|
||||
loc = os.path.expanduser('~/{}'.format(f))
|
||||
loc = os.path.expanduser(f)
|
||||
except KeyError:
|
||||
# os.path.expanduser can fail when $HOME is undefined and
|
||||
# getpwuid fails. See https://bugs.python.org/issue20164 &
|
||||
# https://github.com/requests/requests/issues/1846
|
||||
# https://github.com/psf/requests/issues/1846
|
||||
return
|
||||
|
||||
if os.path.exists(loc):
|
||||
@@ -211,7 +228,7 @@ def get_netrc_auth(url, raise_errors=False):
|
||||
if raise_errors:
|
||||
raise
|
||||
|
||||
# AppEngine hackiness.
|
||||
# App Engine hackiness.
|
||||
except (ImportError, AttributeError):
|
||||
pass
|
||||
|
||||
@@ -238,6 +255,10 @@ def extract_zipped_paths(path):
|
||||
archive, member = os.path.split(path)
|
||||
while archive and not os.path.exists(archive):
|
||||
archive, prefix = os.path.split(archive)
|
||||
if not prefix:
|
||||
# If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),
|
||||
# we _can_ end up in an infinite loop on a rare corner case affecting a small number of users
|
||||
break
|
||||
member = '/'.join([prefix, member])
|
||||
|
||||
if not zipfile.is_zipfile(archive):
|
||||
@@ -249,13 +270,28 @@ def extract_zipped_paths(path):
|
||||
|
||||
# we have a valid zip archive and a valid member of that archive
|
||||
tmp = tempfile.gettempdir()
|
||||
extracted_path = os.path.join(tmp, *member.split('/'))
|
||||
extracted_path = os.path.join(tmp, member.split('/')[-1])
|
||||
if not os.path.exists(extracted_path):
|
||||
extracted_path = zip_file.extract(member, path=tmp)
|
||||
|
||||
# use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition
|
||||
with atomic_open(extracted_path) as file_handler:
|
||||
file_handler.write(zip_file.read(member))
|
||||
return extracted_path
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def atomic_open(filename):
|
||||
"""Write a file to the disk in an atomic fashion"""
|
||||
replacer = os.rename if sys.version_info[0] == 2 else os.replace
|
||||
tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))
|
||||
try:
|
||||
with os.fdopen(tmp_descriptor, 'wb') as tmp_handler:
|
||||
yield tmp_handler
|
||||
replacer(tmp_name, filename)
|
||||
except BaseException:
|
||||
os.remove(tmp_name)
|
||||
raise
|
||||
|
||||
|
||||
def from_key_val_list(value):
|
||||
"""Take an object and test to see if it can be represented as a
|
||||
dictionary. Unless it can not be represented as such, return an
|
||||
@@ -266,6 +302,8 @@ def from_key_val_list(value):
|
||||
>>> from_key_val_list([('key', 'val')])
|
||||
OrderedDict([('key', 'val')])
|
||||
>>> from_key_val_list('string')
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValueError: cannot encode objects that are not 2-tuples
|
||||
>>> from_key_val_list({'key': 'val'})
|
||||
OrderedDict([('key', 'val')])
|
||||
@@ -292,7 +330,9 @@ def to_key_val_list(value):
|
||||
>>> to_key_val_list({'key': 'val'})
|
||||
[('key', 'val')]
|
||||
>>> to_key_val_list('string')
|
||||
ValueError: cannot encode objects that are not 2-tuples.
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValueError: cannot encode objects that are not 2-tuples
|
||||
|
||||
:rtype: list
|
||||
"""
|
||||
@@ -492,6 +532,10 @@ def get_encoding_from_headers(headers):
|
||||
if 'text' in content_type:
|
||||
return 'ISO-8859-1'
|
||||
|
||||
if 'application/json' in content_type:
|
||||
# Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset
|
||||
return 'utf-8'
|
||||
|
||||
|
||||
def stream_decode_response_unicode(iterator, r):
|
||||
"""Stream decodes a iterator."""
|
||||
@@ -790,6 +834,33 @@ def select_proxy(url, proxies):
|
||||
return proxy
|
||||
|
||||
|
||||
def resolve_proxies(request, proxies, trust_env=True):
|
||||
"""This method takes proxy information from a request and configuration
|
||||
input to resolve a mapping of target proxies. This will consider settings
|
||||
such a NO_PROXY to strip proxy configurations.
|
||||
|
||||
:param request: Request or PreparedRequest
|
||||
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
|
||||
:param trust_env: Boolean declaring whether to trust environment configs
|
||||
|
||||
:rtype: dict
|
||||
"""
|
||||
proxies = proxies if proxies is not None else {}
|
||||
url = request.url
|
||||
scheme = urlparse(url).scheme
|
||||
no_proxy = proxies.get('no_proxy')
|
||||
new_proxies = proxies.copy()
|
||||
|
||||
if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy):
|
||||
environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)
|
||||
|
||||
proxy = environ_proxies.get(scheme, environ_proxies.get('all'))
|
||||
|
||||
if proxy:
|
||||
new_proxies.setdefault(scheme, proxy)
|
||||
return new_proxies
|
||||
|
||||
|
||||
def default_user_agent(name="python-requests"):
|
||||
"""
|
||||
Return a string representing the default user agent.
|
||||
@@ -805,7 +876,7 @@ def default_headers():
|
||||
"""
|
||||
return CaseInsensitiveDict({
|
||||
'User-Agent': default_user_agent(),
|
||||
'Accept-Encoding': ', '.join(('gzip', 'deflate')),
|
||||
'Accept-Encoding': DEFAULT_ACCEPT_ENCODING,
|
||||
'Accept': '*/*',
|
||||
'Connection': 'keep-alive',
|
||||
})
|
||||
@@ -892,15 +963,27 @@ def prepend_scheme_if_needed(url, new_scheme):
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)
|
||||
parsed = parse_url(url)
|
||||
scheme, auth, host, port, path, query, fragment = parsed
|
||||
|
||||
# urlparse is a finicky beast, and sometimes decides that there isn't a
|
||||
# netloc present. Assume that it's being over-cautious, and switch netloc
|
||||
# and path if urlparse decided there was no netloc.
|
||||
# A defect in urlparse determines that there isn't a netloc present in some
|
||||
# urls. We previously assumed parsing was overly cautious, and swapped the
|
||||
# netloc and path. Due to a lack of tests on the original defect, this is
|
||||
# maintained with parse_url for backwards compatibility.
|
||||
netloc = parsed.netloc
|
||||
if not netloc:
|
||||
netloc, path = path, netloc
|
||||
|
||||
return urlunparse((scheme, netloc, path, params, query, fragment))
|
||||
if auth:
|
||||
# parse_url doesn't provide the netloc with auth
|
||||
# so we'll add it ourselves.
|
||||
netloc = '@'.join([auth, netloc])
|
||||
if scheme is None:
|
||||
scheme = new_scheme
|
||||
if path is None:
|
||||
path = ''
|
||||
|
||||
return urlunparse((scheme, netloc, path, '', query, fragment))
|
||||
|
||||
|
||||
def get_auth_from_url(url):
|
||||
|
||||
@@ -1,28 +1,44 @@
|
||||
"""
|
||||
urllib3 - Thread-safe connection pooling and re-using.
|
||||
Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
import warnings
|
||||
|
||||
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
|
||||
# Set default logging handler to avoid "No handler found" warnings.
|
||||
import logging
|
||||
import warnings
|
||||
from logging import NullHandler
|
||||
|
||||
from . import exceptions
|
||||
from ._version import __version__
|
||||
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
|
||||
from .filepost import encode_multipart_formdata
|
||||
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
|
||||
from .response import HTTPResponse
|
||||
from .util.request import make_headers
|
||||
from .util.url import get_host
|
||||
from .util.timeout import Timeout
|
||||
from .util.retry import Retry
|
||||
from .util.timeout import Timeout
|
||||
from .util.url import get_host
|
||||
|
||||
|
||||
# Set default logging handler to avoid "No handler found" warnings.
|
||||
import logging
|
||||
from logging import NullHandler
|
||||
# === NOTE TO REPACKAGERS AND VENDORS ===
|
||||
# Please delete this block, this logic is only
|
||||
# for urllib3 being distributed via PyPI.
|
||||
# See: https://github.com/urllib3/urllib3/issues/2680
|
||||
try:
|
||||
import urllib3_secure_extra # type: ignore # noqa: F401
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
warnings.warn(
|
||||
"'urllib3[secure]' extra is deprecated and will be removed "
|
||||
"in a future release of urllib3 2.x. Read more in this issue: "
|
||||
"https://github.com/urllib3/urllib3/issues/2680",
|
||||
category=DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
|
||||
__license__ = "MIT"
|
||||
__version__ = "1.25.6"
|
||||
__version__ = __version__
|
||||
|
||||
__all__ = (
|
||||
"HTTPConnectionPool",
|
||||
|
||||
@@ -17,9 +17,10 @@ except ImportError: # Platform-specific: No threads available
|
||||
|
||||
|
||||
from collections import OrderedDict
|
||||
from .exceptions import InvalidHeader
|
||||
from .packages.six import iterkeys, itervalues, PY3
|
||||
|
||||
from .exceptions import InvalidHeader
|
||||
from .packages import six
|
||||
from .packages.six import iterkeys, itervalues
|
||||
|
||||
__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
|
||||
|
||||
@@ -174,7 +175,7 @@ class HTTPHeaderDict(MutableMapping):
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
if not PY3: # Python 2
|
||||
if six.PY2: # Python 2
|
||||
iterkeys = MutableMapping.iterkeys
|
||||
itervalues = MutableMapping.itervalues
|
||||
|
||||
@@ -190,7 +191,7 @@ class HTTPHeaderDict(MutableMapping):
|
||||
|
||||
def pop(self, key, default=__marker):
|
||||
"""D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
|
||||
If key is not found, d is returned if given, otherwise KeyError is raised.
|
||||
If key is not found, d is returned if given, otherwise KeyError is raised.
|
||||
"""
|
||||
# Using the MutableMapping function directly fails due to the private marker.
|
||||
# Using ordinary dict.pop would expose the internal structures.
|
||||
@@ -267,6 +268,24 @@ class HTTPHeaderDict(MutableMapping):
|
||||
else:
|
||||
return vals[1:]
|
||||
|
||||
def _prepare_for_method_change(self):
|
||||
"""
|
||||
Remove content-specific header fields before changing the request
|
||||
method to GET or HEAD according to RFC 9110, Section 15.4.
|
||||
"""
|
||||
content_specific_headers = [
|
||||
"Content-Encoding",
|
||||
"Content-Language",
|
||||
"Content-Location",
|
||||
"Content-Type",
|
||||
"Content-Length",
|
||||
"Digest",
|
||||
"Last-Modified",
|
||||
]
|
||||
for header in content_specific_headers:
|
||||
self.discard(header)
|
||||
return self
|
||||
|
||||
# Backwards compatibility for httplib
|
||||
getheaders = getlist
|
||||
getallmatchingheaders = getlist
|
||||
|
||||
2
lib/urllib3/_version.py
Normal file
2
lib/urllib3/_version.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# This file is protected via CODEOWNERS
|
||||
__version__ = "1.26.18"
|
||||
@@ -1,13 +1,18 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
from socket import error as SocketError, timeout as SocketTimeout
|
||||
import warnings
|
||||
from socket import error as SocketError
|
||||
from socket import timeout as SocketTimeout
|
||||
|
||||
from .packages import six
|
||||
from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
|
||||
from .packages.six.moves.http_client import HTTPException # noqa: F401
|
||||
from .util.proxy import create_proxy_ssl_context
|
||||
|
||||
try: # Compiled with SSL?
|
||||
import ssl
|
||||
@@ -29,26 +34,33 @@ except NameError:
|
||||
pass
|
||||
|
||||
|
||||
try: # Python 3:
|
||||
# Not a no-op, we're adding this to the namespace so it can be imported.
|
||||
BrokenPipeError = BrokenPipeError
|
||||
except NameError: # Python 2:
|
||||
|
||||
class BrokenPipeError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
from ._collections import HTTPHeaderDict # noqa (historical, removed in v2)
|
||||
from ._version import __version__
|
||||
from .exceptions import (
|
||||
NewConnectionError,
|
||||
ConnectTimeoutError,
|
||||
NewConnectionError,
|
||||
SubjectAltNameWarning,
|
||||
SystemTimeWarning,
|
||||
)
|
||||
from .packages.ssl_match_hostname import match_hostname, CertificateError
|
||||
|
||||
from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
|
||||
from .util.ssl_ import (
|
||||
resolve_cert_reqs,
|
||||
resolve_ssl_version,
|
||||
assert_fingerprint,
|
||||
create_urllib3_context,
|
||||
is_ipaddress,
|
||||
resolve_cert_reqs,
|
||||
resolve_ssl_version,
|
||||
ssl_wrap_socket,
|
||||
)
|
||||
|
||||
|
||||
from .util import connection
|
||||
|
||||
from ._collections import HTTPHeaderDict
|
||||
from .util.ssl_match_hostname import CertificateError, match_hostname
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -56,37 +68,35 @@ port_by_scheme = {"http": 80, "https": 443}
|
||||
|
||||
# When it comes time to update this value as a part of regular maintenance
|
||||
# (ie test_recent_date is failing) update it to ~6 months before the current date.
|
||||
RECENT_DATE = datetime.date(2019, 1, 1)
|
||||
RECENT_DATE = datetime.date(2022, 1, 1)
|
||||
|
||||
|
||||
class DummyConnection(object):
|
||||
"""Used to detect a failed ConnectionCls import."""
|
||||
|
||||
pass
|
||||
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
|
||||
|
||||
|
||||
class HTTPConnection(_HTTPConnection, object):
|
||||
"""
|
||||
Based on httplib.HTTPConnection but provides an extra constructor
|
||||
Based on :class:`http.client.HTTPConnection` but provides an extra constructor
|
||||
backwards-compatibility layer between older and newer Pythons.
|
||||
|
||||
Additional keyword parameters are used to configure attributes of the connection.
|
||||
Accepted parameters include:
|
||||
|
||||
- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
|
||||
- ``source_address``: Set the source address for the current connection.
|
||||
- ``socket_options``: Set specific options on the underlying socket. If not specified, then
|
||||
defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
|
||||
Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
|
||||
- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
|
||||
- ``source_address``: Set the source address for the current connection.
|
||||
- ``socket_options``: Set specific options on the underlying socket. If not specified, then
|
||||
defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
|
||||
Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
|
||||
|
||||
For example, if you wish to enable TCP Keep Alive in addition to the defaults,
|
||||
you might pass::
|
||||
For example, if you wish to enable TCP Keep Alive in addition to the defaults,
|
||||
you might pass:
|
||||
|
||||
HTTPConnection.default_socket_options + [
|
||||
(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
|
||||
]
|
||||
.. code-block:: python
|
||||
|
||||
Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
|
||||
HTTPConnection.default_socket_options + [
|
||||
(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
|
||||
]
|
||||
|
||||
Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
|
||||
"""
|
||||
|
||||
default_port = port_by_scheme["http"]
|
||||
@@ -98,6 +108,10 @@ class HTTPConnection(_HTTPConnection, object):
|
||||
#: Whether this connection verifies the host's certificate.
|
||||
is_verified = False
|
||||
|
||||
#: Whether this proxy connection (if used) verifies the proxy host's
|
||||
#: certificate.
|
||||
proxy_is_verified = None
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
if not six.PY2:
|
||||
kw.pop("strict", None)
|
||||
@@ -109,6 +123,10 @@ class HTTPConnection(_HTTPConnection, object):
|
||||
#: provided, we use the default options.
|
||||
self.socket_options = kw.pop("socket_options", self.default_socket_options)
|
||||
|
||||
# Proxy options provided by the user.
|
||||
self.proxy = kw.pop("proxy", None)
|
||||
self.proxy_config = kw.pop("proxy_config", None)
|
||||
|
||||
_HTTPConnection.__init__(self, *args, **kw)
|
||||
|
||||
@property
|
||||
@@ -141,7 +159,7 @@ class HTTPConnection(_HTTPConnection, object):
|
||||
self._dns_host = value
|
||||
|
||||
def _new_conn(self):
|
||||
""" Establish a socket connection and set nodelay settings on it.
|
||||
"""Establish a socket connection and set nodelay settings on it.
|
||||
|
||||
:return: New socket connection.
|
||||
"""
|
||||
@@ -171,10 +189,13 @@ class HTTPConnection(_HTTPConnection, object):
|
||||
|
||||
return conn
|
||||
|
||||
def _is_using_tunnel(self):
|
||||
# Google App Engine's httplib does not define _tunnel_host
|
||||
return getattr(self, "_tunnel_host", None)
|
||||
|
||||
def _prepare_conn(self, conn):
|
||||
self.sock = conn
|
||||
# Google App Engine's httplib does not define _tunnel_host
|
||||
if getattr(self, "_tunnel_host", None):
|
||||
if self._is_using_tunnel():
|
||||
# TODO: Fix tunnel so it doesn't depend on self.sock state.
|
||||
self._tunnel()
|
||||
# Mark this connection as not reusable
|
||||
@@ -184,20 +205,61 @@ class HTTPConnection(_HTTPConnection, object):
|
||||
conn = self._new_conn()
|
||||
self._prepare_conn(conn)
|
||||
|
||||
def putrequest(self, method, url, *args, **kwargs):
|
||||
""" """
|
||||
# Empty docstring because the indentation of CPython's implementation
|
||||
# is broken but we don't want this method in our documentation.
|
||||
match = _CONTAINS_CONTROL_CHAR_RE.search(method)
|
||||
if match:
|
||||
raise ValueError(
|
||||
"Method cannot contain non-token characters %r (found at least %r)"
|
||||
% (method, match.group())
|
||||
)
|
||||
|
||||
return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
|
||||
|
||||
def putheader(self, header, *values):
|
||||
""" """
|
||||
if not any(isinstance(v, str) and v == SKIP_HEADER for v in values):
|
||||
_HTTPConnection.putheader(self, header, *values)
|
||||
elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
|
||||
raise ValueError(
|
||||
"urllib3.util.SKIP_HEADER only supports '%s'"
|
||||
% ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),)
|
||||
)
|
||||
|
||||
def request(self, method, url, body=None, headers=None):
|
||||
# Update the inner socket's timeout value to send the request.
|
||||
# This only triggers if the connection is re-used.
|
||||
if getattr(self, "sock", None) is not None:
|
||||
self.sock.settimeout(self.timeout)
|
||||
|
||||
if headers is None:
|
||||
headers = {}
|
||||
else:
|
||||
# Avoid modifying the headers passed into .request()
|
||||
headers = headers.copy()
|
||||
if "user-agent" not in (six.ensure_str(k.lower()) for k in headers):
|
||||
headers["User-Agent"] = _get_default_user_agent()
|
||||
super(HTTPConnection, self).request(method, url, body=body, headers=headers)
|
||||
|
||||
def request_chunked(self, method, url, body=None, headers=None):
|
||||
"""
|
||||
Alternative to the common request method, which sends the
|
||||
body with chunked encoding and not as one block
|
||||
"""
|
||||
headers = HTTPHeaderDict(headers if headers is not None else {})
|
||||
skip_accept_encoding = "accept-encoding" in headers
|
||||
skip_host = "host" in headers
|
||||
headers = headers or {}
|
||||
header_keys = set([six.ensure_str(k.lower()) for k in headers])
|
||||
skip_accept_encoding = "accept-encoding" in header_keys
|
||||
skip_host = "host" in header_keys
|
||||
self.putrequest(
|
||||
method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
|
||||
)
|
||||
if "user-agent" not in header_keys:
|
||||
self.putheader("User-Agent", _get_default_user_agent())
|
||||
for header, value in headers.items():
|
||||
self.putheader(header, value)
|
||||
if "transfer-encoding" not in headers:
|
||||
if "transfer-encoding" not in header_keys:
|
||||
self.putheader("Transfer-Encoding", "chunked")
|
||||
self.endheaders()
|
||||
|
||||
@@ -211,19 +273,31 @@ class HTTPConnection(_HTTPConnection, object):
|
||||
if not isinstance(chunk, bytes):
|
||||
chunk = chunk.encode("utf8")
|
||||
len_str = hex(len(chunk))[2:]
|
||||
self.send(len_str.encode("utf-8"))
|
||||
self.send(b"\r\n")
|
||||
self.send(chunk)
|
||||
self.send(b"\r\n")
|
||||
to_send = bytearray(len_str.encode())
|
||||
to_send += b"\r\n"
|
||||
to_send += chunk
|
||||
to_send += b"\r\n"
|
||||
self.send(to_send)
|
||||
|
||||
# After the if clause, to always have a closed body
|
||||
self.send(b"0\r\n\r\n")
|
||||
|
||||
|
||||
class HTTPSConnection(HTTPConnection):
|
||||
"""
|
||||
Many of the parameters to this constructor are passed to the underlying SSL
|
||||
socket by means of :py:func:`urllib3.util.ssl_wrap_socket`.
|
||||
"""
|
||||
|
||||
default_port = port_by_scheme["https"]
|
||||
|
||||
cert_reqs = None
|
||||
ca_certs = None
|
||||
ca_cert_dir = None
|
||||
ca_cert_data = None
|
||||
ssl_version = None
|
||||
assert_fingerprint = None
|
||||
tls_in_tls_required = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -251,53 +325,6 @@ class HTTPSConnection(HTTPConnection):
|
||||
# HTTPS requests to go out as HTTP. (See Issue #356)
|
||||
self._protocol = "https"
|
||||
|
||||
def connect(self):
|
||||
conn = self._new_conn()
|
||||
self._prepare_conn(conn)
|
||||
|
||||
# Wrap socket using verification with the root certs in
|
||||
# trusted_root_certs
|
||||
default_ssl_context = False
|
||||
if self.ssl_context is None:
|
||||
default_ssl_context = True
|
||||
self.ssl_context = create_urllib3_context(
|
||||
ssl_version=resolve_ssl_version(self.ssl_version),
|
||||
cert_reqs=resolve_cert_reqs(self.cert_reqs),
|
||||
)
|
||||
|
||||
# Try to load OS default certs if none are given.
|
||||
# Works well on Windows (requires Python3.4+)
|
||||
context = self.ssl_context
|
||||
if (
|
||||
not self.ca_certs
|
||||
and not self.ca_cert_dir
|
||||
and default_ssl_context
|
||||
and hasattr(context, "load_default_certs")
|
||||
):
|
||||
context.load_default_certs()
|
||||
|
||||
self.sock = ssl_wrap_socket(
|
||||
sock=conn,
|
||||
keyfile=self.key_file,
|
||||
certfile=self.cert_file,
|
||||
key_password=self.key_password,
|
||||
ssl_context=self.ssl_context,
|
||||
server_hostname=self.server_hostname,
|
||||
)
|
||||
|
||||
|
||||
class VerifiedHTTPSConnection(HTTPSConnection):
|
||||
"""
|
||||
Based on httplib.HTTPSConnection but wraps the socket with
|
||||
SSL certification.
|
||||
"""
|
||||
|
||||
cert_reqs = None
|
||||
ca_certs = None
|
||||
ca_cert_dir = None
|
||||
ssl_version = None
|
||||
assert_fingerprint = None
|
||||
|
||||
def set_cert(
|
||||
self,
|
||||
key_file=None,
|
||||
@@ -308,6 +335,7 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
||||
assert_hostname=None,
|
||||
assert_fingerprint=None,
|
||||
ca_cert_dir=None,
|
||||
ca_cert_data=None,
|
||||
):
|
||||
"""
|
||||
This method should only be called once, before the connection is used.
|
||||
@@ -328,15 +356,19 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
||||
self.assert_fingerprint = assert_fingerprint
|
||||
self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
|
||||
self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
|
||||
self.ca_cert_data = ca_cert_data
|
||||
|
||||
def connect(self):
|
||||
# Add certificate verification
|
||||
conn = self._new_conn()
|
||||
self.sock = conn = self._new_conn()
|
||||
hostname = self.host
|
||||
tls_in_tls = False
|
||||
|
||||
if self._is_using_tunnel():
|
||||
if self.tls_in_tls_required:
|
||||
self.sock = conn = self._connect_tls_proxy(hostname, conn)
|
||||
tls_in_tls = True
|
||||
|
||||
# Google App Engine's httplib does not define _tunnel_host
|
||||
if getattr(self, "_tunnel_host", None):
|
||||
self.sock = conn
|
||||
# Calls self._set_hostport(), so self.host is
|
||||
# self._tunnel_host below.
|
||||
self._tunnel()
|
||||
@@ -378,6 +410,7 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
||||
if (
|
||||
not self.ca_certs
|
||||
and not self.ca_cert_dir
|
||||
and not self.ca_cert_data
|
||||
and default_ssl_context
|
||||
and hasattr(context, "load_default_certs")
|
||||
):
|
||||
@@ -390,10 +423,29 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
||||
key_password=self.key_password,
|
||||
ca_certs=self.ca_certs,
|
||||
ca_cert_dir=self.ca_cert_dir,
|
||||
ca_cert_data=self.ca_cert_data,
|
||||
server_hostname=server_hostname,
|
||||
ssl_context=context,
|
||||
tls_in_tls=tls_in_tls,
|
||||
)
|
||||
|
||||
# If we're using all defaults and the connection
|
||||
# is TLSv1 or TLSv1.1 we throw a DeprecationWarning
|
||||
# for the host.
|
||||
if (
|
||||
default_ssl_context
|
||||
and self.ssl_version is None
|
||||
and hasattr(self.sock, "version")
|
||||
and self.sock.version() in {"TLSv1", "TLSv1.1"}
|
||||
):
|
||||
warnings.warn(
|
||||
"Negotiating TLSv1/TLSv1.1 by default is deprecated "
|
||||
"and will be disabled in urllib3 v2.0.0. Connecting to "
|
||||
"'%s' with '%s' can be enabled by explicitly opting-in "
|
||||
"with 'ssl_version'" % (self.host, self.sock.version()),
|
||||
DeprecationWarning,
|
||||
)
|
||||
|
||||
if self.assert_fingerprint:
|
||||
assert_fingerprint(
|
||||
self.sock.getpeercert(binary_form=True), self.assert_fingerprint
|
||||
@@ -412,7 +464,7 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
||||
(
|
||||
"Certificate for {0} has no `subjectAltName`, falling back to check for a "
|
||||
"`commonName` for now. This feature is being removed by major browsers and "
|
||||
"deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 "
|
||||
"deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
|
||||
"for details.)".format(hostname)
|
||||
),
|
||||
SubjectAltNameWarning,
|
||||
@@ -424,13 +476,76 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
||||
or self.assert_fingerprint is not None
|
||||
)
|
||||
|
||||
def _connect_tls_proxy(self, hostname, conn):
|
||||
"""
|
||||
Establish a TLS connection to the proxy using the provided SSL context.
|
||||
"""
|
||||
proxy_config = self.proxy_config
|
||||
ssl_context = proxy_config.ssl_context
|
||||
if ssl_context:
|
||||
# If the user provided a proxy context, we assume CA and client
|
||||
# certificates have already been set
|
||||
return ssl_wrap_socket(
|
||||
sock=conn,
|
||||
server_hostname=hostname,
|
||||
ssl_context=ssl_context,
|
||||
)
|
||||
|
||||
ssl_context = create_proxy_ssl_context(
|
||||
self.ssl_version,
|
||||
self.cert_reqs,
|
||||
self.ca_certs,
|
||||
self.ca_cert_dir,
|
||||
self.ca_cert_data,
|
||||
)
|
||||
|
||||
# If no cert was provided, use only the default options for server
|
||||
# certificate validation
|
||||
socket = ssl_wrap_socket(
|
||||
sock=conn,
|
||||
ca_certs=self.ca_certs,
|
||||
ca_cert_dir=self.ca_cert_dir,
|
||||
ca_cert_data=self.ca_cert_data,
|
||||
server_hostname=hostname,
|
||||
ssl_context=ssl_context,
|
||||
)
|
||||
|
||||
if ssl_context.verify_mode != ssl.CERT_NONE and not getattr(
|
||||
ssl_context, "check_hostname", False
|
||||
):
|
||||
# While urllib3 attempts to always turn off hostname matching from
|
||||
# the TLS library, this cannot always be done. So we check whether
|
||||
# the TLS Library still thinks it's matching hostnames.
|
||||
cert = socket.getpeercert()
|
||||
if not cert.get("subjectAltName", ()):
|
||||
warnings.warn(
|
||||
(
|
||||
"Certificate for {0} has no `subjectAltName`, falling back to check for a "
|
||||
"`commonName` for now. This feature is being removed by major browsers and "
|
||||
"deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
|
||||
"for details.)".format(hostname)
|
||||
),
|
||||
SubjectAltNameWarning,
|
||||
)
|
||||
_match_hostname(cert, hostname)
|
||||
|
||||
self.proxy_is_verified = ssl_context.verify_mode == ssl.CERT_REQUIRED
|
||||
return socket
|
||||
|
||||
|
||||
def _match_hostname(cert, asserted_hostname):
|
||||
# Our upstream implementation of ssl.match_hostname()
|
||||
# only applies this normalization to IP addresses so it doesn't
|
||||
# match DNS SANs so we do the same thing!
|
||||
stripped_hostname = asserted_hostname.strip("u[]")
|
||||
if is_ipaddress(stripped_hostname):
|
||||
asserted_hostname = stripped_hostname
|
||||
|
||||
try:
|
||||
match_hostname(cert, asserted_hostname)
|
||||
except CertificateError as e:
|
||||
log.warning(
|
||||
"Certificate did not match expected hostname: %s. " "Certificate: %s",
|
||||
"Certificate did not match expected hostname: %s. Certificate: %s",
|
||||
asserted_hostname,
|
||||
cert,
|
||||
)
|
||||
@@ -440,9 +555,18 @@ def _match_hostname(cert, asserted_hostname):
|
||||
raise
|
||||
|
||||
|
||||
if ssl:
|
||||
# Make a copy for testing.
|
||||
UnverifiedHTTPSConnection = HTTPSConnection
|
||||
HTTPSConnection = VerifiedHTTPSConnection
|
||||
else:
|
||||
HTTPSConnection = DummyConnection
|
||||
def _get_default_user_agent():
|
||||
return "python-urllib3/%s" % __version__
|
||||
|
||||
|
||||
class DummyConnection(object):
|
||||
"""Used to detect a failed ConnectionCls import."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
if not ssl:
|
||||
HTTPSConnection = DummyConnection # noqa: F811
|
||||
|
||||
|
||||
VerifiedHTTPSConnection = HTTPSConnection
|
||||
|
||||
@@ -1,57 +1,62 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import errno
|
||||
import logging
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
import warnings
|
||||
from socket import error as SocketError
|
||||
from socket import timeout as SocketTimeout
|
||||
|
||||
from socket import error as SocketError, timeout as SocketTimeout
|
||||
import socket
|
||||
|
||||
|
||||
from ._collections import HTTPHeaderDict
|
||||
from .connection import (
|
||||
BaseSSLError,
|
||||
BrokenPipeError,
|
||||
DummyConnection,
|
||||
HTTPConnection,
|
||||
HTTPException,
|
||||
HTTPSConnection,
|
||||
VerifiedHTTPSConnection,
|
||||
port_by_scheme,
|
||||
)
|
||||
from .exceptions import (
|
||||
ClosedPoolError,
|
||||
ProtocolError,
|
||||
EmptyPoolError,
|
||||
HeaderParsingError,
|
||||
HostChangedError,
|
||||
InsecureRequestWarning,
|
||||
LocationValueError,
|
||||
MaxRetryError,
|
||||
NewConnectionError,
|
||||
ProtocolError,
|
||||
ProxyError,
|
||||
ReadTimeoutError,
|
||||
SSLError,
|
||||
TimeoutError,
|
||||
InsecureRequestWarning,
|
||||
NewConnectionError,
|
||||
)
|
||||
from .packages.ssl_match_hostname import CertificateError
|
||||
from .packages import six
|
||||
from .packages.six.moves import queue
|
||||
from .connection import (
|
||||
port_by_scheme,
|
||||
DummyConnection,
|
||||
HTTPConnection,
|
||||
HTTPSConnection,
|
||||
VerifiedHTTPSConnection,
|
||||
HTTPException,
|
||||
BaseSSLError,
|
||||
)
|
||||
from .request import RequestMethods
|
||||
from .response import HTTPResponse
|
||||
|
||||
from .util.connection import is_connection_dropped
|
||||
from .util.proxy import connection_requires_http_tunnel
|
||||
from .util.queue import LifoQueue
|
||||
from .util.request import set_file_position
|
||||
from .util.response import assert_header_parsing
|
||||
from .util.retry import Retry
|
||||
from .util.ssl_match_hostname import CertificateError
|
||||
from .util.timeout import Timeout
|
||||
from .util.url import (
|
||||
get_host,
|
||||
parse_url,
|
||||
Url,
|
||||
_normalize_host as normalize_host,
|
||||
_encode_target,
|
||||
)
|
||||
from .util.queue import LifoQueue
|
||||
from .util.url import Url, _encode_target
|
||||
from .util.url import _normalize_host as normalize_host
|
||||
from .util.url import get_host, parse_url
|
||||
|
||||
try: # Platform-specific: Python 3
|
||||
import weakref
|
||||
|
||||
weakref_finalize = weakref.finalize
|
||||
except AttributeError: # Platform-specific: Python 2
|
||||
from .packages.backports.weakref_finalize import weakref_finalize
|
||||
|
||||
xrange = six.moves.xrange
|
||||
|
||||
@@ -65,6 +70,11 @@ class ConnectionPool(object):
|
||||
"""
|
||||
Base class for all connection pools, such as
|
||||
:class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
|
||||
|
||||
.. note::
|
||||
ConnectionPool.urlopen() does not normalize or percent-encode target URIs
|
||||
which is useful if your target server doesn't support percent-encoded
|
||||
target URIs.
|
||||
"""
|
||||
|
||||
scheme = None
|
||||
@@ -106,16 +116,16 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
|
||||
:param host:
|
||||
Host used for this HTTP Connection (e.g. "localhost"), passed into
|
||||
:class:`httplib.HTTPConnection`.
|
||||
:class:`http.client.HTTPConnection`.
|
||||
|
||||
:param port:
|
||||
Port used for this HTTP Connection (None is equivalent to 80), passed
|
||||
into :class:`httplib.HTTPConnection`.
|
||||
into :class:`http.client.HTTPConnection`.
|
||||
|
||||
:param strict:
|
||||
Causes BadStatusLine to be raised if the status line can't be parsed
|
||||
as a valid HTTP/1.0 or 1.1 status line, passed into
|
||||
:class:`httplib.HTTPConnection`.
|
||||
:class:`http.client.HTTPConnection`.
|
||||
|
||||
.. note::
|
||||
Only works in Python 2. This parameter is ignored in Python 3.
|
||||
@@ -149,11 +159,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
|
||||
:param _proxy:
|
||||
Parsed proxy URL, should not be used directly, instead, see
|
||||
:class:`urllib3.connectionpool.ProxyManager`"
|
||||
:class:`urllib3.ProxyManager`
|
||||
|
||||
:param _proxy_headers:
|
||||
A dictionary with proxy headers, should not be used directly,
|
||||
instead, see :class:`urllib3.connectionpool.ProxyManager`"
|
||||
instead, see :class:`urllib3.ProxyManager`
|
||||
|
||||
:param \\**conn_kw:
|
||||
Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
|
||||
@@ -176,6 +186,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
retries=None,
|
||||
_proxy=None,
|
||||
_proxy_headers=None,
|
||||
_proxy_config=None,
|
||||
**conn_kw
|
||||
):
|
||||
ConnectionPool.__init__(self, host, port)
|
||||
@@ -197,6 +208,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
|
||||
self.proxy = _proxy
|
||||
self.proxy_headers = _proxy_headers or {}
|
||||
self.proxy_config = _proxy_config
|
||||
|
||||
# Fill the queue up so that doing get() on it will block properly
|
||||
for _ in xrange(maxsize):
|
||||
@@ -213,6 +225,19 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
# list.
|
||||
self.conn_kw.setdefault("socket_options", [])
|
||||
|
||||
self.conn_kw["proxy"] = self.proxy
|
||||
self.conn_kw["proxy_config"] = self.proxy_config
|
||||
|
||||
# Do not pass 'self' as callback to 'finalize'.
|
||||
# Then the 'finalize' would keep an endless living (leak) to self.
|
||||
# By just passing a reference to the pool allows the garbage collector
|
||||
# to free self if nobody else has a reference to it.
|
||||
pool = self.pool
|
||||
|
||||
# Close all the HTTPConnections in the pool before the
|
||||
# HTTPConnectionPool object is garbage collected.
|
||||
weakref_finalize(self, _close_pool_connections, pool)
|
||||
|
||||
def _new_conn(self):
|
||||
"""
|
||||
Return a fresh :class:`HTTPConnection`.
|
||||
@@ -257,7 +282,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
if self.block:
|
||||
raise EmptyPoolError(
|
||||
self,
|
||||
"Pool reached maximum size and no more " "connections are allowed.",
|
||||
"Pool reached maximum size and no more connections are allowed.",
|
||||
)
|
||||
pass # Oh well, we'll create a new connection then
|
||||
|
||||
@@ -267,7 +292,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
conn.close()
|
||||
if getattr(conn, "auto_open", 1) == 0:
|
||||
# This is a proxied connection that has been mutated by
|
||||
# httplib._tunnel() and cannot be reused (since it would
|
||||
# http.client._tunnel() and cannot be reused (since it would
|
||||
# attempt to bypass the proxy)
|
||||
conn = None
|
||||
|
||||
@@ -295,8 +320,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
pass
|
||||
except queue.Full:
|
||||
# This should never happen if self.block == True
|
||||
log.warning("Connection pool is full, discarding connection: %s", self.host)
|
||||
|
||||
log.warning(
|
||||
"Connection pool is full, discarding connection: %s. Connection pool size: %s",
|
||||
self.host,
|
||||
self.pool.qsize(),
|
||||
)
|
||||
# Connection never got put back into the pool, close it.
|
||||
if conn:
|
||||
conn.close()
|
||||
@@ -312,7 +340,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
pass
|
||||
|
||||
def _get_timeout(self, timeout):
|
||||
""" Helper that always returns a :class:`urllib3.util.Timeout` """
|
||||
"""Helper that always returns a :class:`urllib3.util.Timeout`"""
|
||||
if timeout is _Default:
|
||||
return self.timeout.clone()
|
||||
|
||||
@@ -369,7 +397,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
|
||||
timeout_obj = self._get_timeout(timeout)
|
||||
timeout_obj.start_connect()
|
||||
conn.timeout = timeout_obj.connect_timeout
|
||||
conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout)
|
||||
|
||||
# Trigger any extra validation we need to do.
|
||||
try:
|
||||
@@ -379,12 +407,30 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
|
||||
raise
|
||||
|
||||
# conn.request() calls httplib.*.request, not the method in
|
||||
# conn.request() calls http.client.*.request, not the method in
|
||||
# urllib3.request. It also calls makefile (recv) on the socket.
|
||||
if chunked:
|
||||
conn.request_chunked(method, url, **httplib_request_kw)
|
||||
else:
|
||||
conn.request(method, url, **httplib_request_kw)
|
||||
try:
|
||||
if chunked:
|
||||
conn.request_chunked(method, url, **httplib_request_kw)
|
||||
else:
|
||||
conn.request(method, url, **httplib_request_kw)
|
||||
|
||||
# We are swallowing BrokenPipeError (errno.EPIPE) since the server is
|
||||
# legitimately able to close the connection after sending a valid response.
|
||||
# With this behaviour, the received response is still readable.
|
||||
except BrokenPipeError:
|
||||
# Python 3
|
||||
pass
|
||||
except IOError as e:
|
||||
# Python 2 and macOS/Linux
|
||||
# EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
|
||||
# https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
|
||||
if e.errno not in {
|
||||
errno.EPIPE,
|
||||
errno.ESHUTDOWN,
|
||||
errno.EPROTOTYPE,
|
||||
}:
|
||||
raise
|
||||
|
||||
# Reset the timeout for the recv() on the socket
|
||||
read_timeout = timeout_obj.read_timeout
|
||||
@@ -461,14 +507,8 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
# Disable access to the pool
|
||||
old_pool, self.pool = self.pool, None
|
||||
|
||||
try:
|
||||
while True:
|
||||
conn = old_pool.get(block=False)
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
except queue.Empty:
|
||||
pass # Done.
|
||||
# Close all the HTTPConnections in the pool.
|
||||
_close_pool_connections(old_pool)
|
||||
|
||||
def is_same_host(self, url):
|
||||
"""
|
||||
@@ -527,10 +567,12 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
:param method:
|
||||
HTTP request method (such as GET, POST, PUT, etc.)
|
||||
|
||||
:param url:
|
||||
The URL to perform the request on.
|
||||
|
||||
:param body:
|
||||
Data to send in the request body (useful for creating
|
||||
POST requests, see HTTPConnectionPool.post_url for
|
||||
more convenience).
|
||||
Data to send in the request body, either :class:`str`, :class:`bytes`,
|
||||
an iterable of :class:`str`/:class:`bytes`, or a file-like object.
|
||||
|
||||
:param headers:
|
||||
Dictionary of custom headers to send, such as User-Agent,
|
||||
@@ -560,7 +602,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
|
||||
:param assert_same_host:
|
||||
If ``True``, will make sure that the host of the pool requests is
|
||||
consistent else will raise HostChangedError. When False, you can
|
||||
consistent else will raise HostChangedError. When ``False``, you can
|
||||
use the pool on an HTTP proxy and request foreign hosts.
|
||||
|
||||
:param timeout:
|
||||
@@ -597,6 +639,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
Additional parameters are passed to
|
||||
:meth:`urllib3.response.HTTPResponse.from_httplib`
|
||||
"""
|
||||
|
||||
parsed_url = parse_url(url)
|
||||
destination_scheme = parsed_url.scheme
|
||||
|
||||
if headers is None:
|
||||
headers = self.headers
|
||||
|
||||
@@ -614,7 +660,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
if url.startswith("/"):
|
||||
url = six.ensure_str(_encode_target(url))
|
||||
else:
|
||||
url = six.ensure_str(parse_url(url).url)
|
||||
url = six.ensure_str(parsed_url.url)
|
||||
|
||||
conn = None
|
||||
|
||||
@@ -626,13 +672,17 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
#
|
||||
# See issue #651 [1] for details.
|
||||
#
|
||||
# [1] <https://github.com/shazow/urllib3/issues/651>
|
||||
# [1] <https://github.com/urllib3/urllib3/issues/651>
|
||||
release_this_conn = release_conn
|
||||
|
||||
# Merge the proxy headers. Only do this in HTTP. We have to copy the
|
||||
# headers dict so we can safely change it without those changes being
|
||||
# reflected in anyone else's copy.
|
||||
if self.scheme == "http":
|
||||
http_tunnel_required = connection_requires_http_tunnel(
|
||||
self.proxy, self.proxy_config, destination_scheme
|
||||
)
|
||||
|
||||
# Merge the proxy headers. Only done when not using HTTP CONNECT. We
|
||||
# have to copy the headers dict so we can safely change it without those
|
||||
# changes being reflected in anyone else's copy.
|
||||
if not http_tunnel_required:
|
||||
headers = headers.copy()
|
||||
headers.update(self.proxy_headers)
|
||||
|
||||
@@ -658,7 +708,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
is_new_proxy_conn = self.proxy is not None and not getattr(
|
||||
conn, "sock", None
|
||||
)
|
||||
if is_new_proxy_conn:
|
||||
if is_new_proxy_conn and http_tunnel_required:
|
||||
self._prepare_proxy(conn)
|
||||
|
||||
# Make the request on the httplib connection object.
|
||||
@@ -693,9 +743,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
# Everything went great!
|
||||
clean_exit = True
|
||||
|
||||
except queue.Empty:
|
||||
# Timed out by queue.
|
||||
raise EmptyPoolError(self, "No pool connections are available.")
|
||||
except EmptyPoolError:
|
||||
# Didn't get a connection from the pool, no need to clean up
|
||||
clean_exit = True
|
||||
release_this_conn = False
|
||||
raise
|
||||
|
||||
except (
|
||||
TimeoutError,
|
||||
@@ -709,7 +761,35 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
# Discard the connection for these exceptions. It will be
|
||||
# replaced during the next _get_conn() call.
|
||||
clean_exit = False
|
||||
if isinstance(e, (BaseSSLError, CertificateError)):
|
||||
|
||||
def _is_ssl_error_message_from_http_proxy(ssl_error):
|
||||
# We're trying to detect the message 'WRONG_VERSION_NUMBER' but
|
||||
# SSLErrors are kinda all over the place when it comes to the message,
|
||||
# so we try to cover our bases here!
|
||||
message = " ".join(re.split("[^a-z]", str(ssl_error).lower()))
|
||||
return (
|
||||
"wrong version number" in message or "unknown protocol" in message
|
||||
)
|
||||
|
||||
# Try to detect a common user error with proxies which is to
|
||||
# set an HTTP proxy to be HTTPS when it should be 'http://'
|
||||
# (ie {'http': 'http://proxy', 'https': 'https://proxy'})
|
||||
# Instead we add a nice error message and point to a URL.
|
||||
if (
|
||||
isinstance(e, BaseSSLError)
|
||||
and self.proxy
|
||||
and _is_ssl_error_message_from_http_proxy(e)
|
||||
and conn.proxy
|
||||
and conn.proxy.scheme == "https"
|
||||
):
|
||||
e = ProxyError(
|
||||
"Your proxy appears to only use HTTP and not HTTPS, "
|
||||
"try changing your proxy URL to be HTTP. See: "
|
||||
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
|
||||
"#https-proxy-error-http-proxy",
|
||||
SSLError(e),
|
||||
)
|
||||
elif isinstance(e, (BaseSSLError, CertificateError)):
|
||||
e = SSLError(e)
|
||||
elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
|
||||
e = ProxyError("Cannot connect to proxy.", e)
|
||||
@@ -742,10 +822,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
if not conn:
|
||||
# Try again
|
||||
log.warning(
|
||||
"Retrying (%r) after connection " "broken by '%r': %s",
|
||||
retries,
|
||||
err,
|
||||
url,
|
||||
"Retrying (%r) after connection broken by '%r': %s", retries, err, url
|
||||
)
|
||||
return self.urlopen(
|
||||
method,
|
||||
@@ -758,44 +835,30 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
timeout=timeout,
|
||||
pool_timeout=pool_timeout,
|
||||
release_conn=release_conn,
|
||||
chunked=chunked,
|
||||
body_pos=body_pos,
|
||||
**response_kw
|
||||
)
|
||||
|
||||
def drain_and_release_conn(response):
|
||||
try:
|
||||
# discard any remaining response body, the connection will be
|
||||
# released back to the pool once the entire response is read
|
||||
response.read()
|
||||
except (
|
||||
TimeoutError,
|
||||
HTTPException,
|
||||
SocketError,
|
||||
ProtocolError,
|
||||
BaseSSLError,
|
||||
SSLError,
|
||||
):
|
||||
pass
|
||||
|
||||
# Handle redirect?
|
||||
redirect_location = redirect and response.get_redirect_location()
|
||||
if redirect_location:
|
||||
if response.status == 303:
|
||||
# Change the method according to RFC 9110, Section 15.4.4.
|
||||
method = "GET"
|
||||
# And lose the body not to transfer anything sensitive.
|
||||
body = None
|
||||
headers = HTTPHeaderDict(headers)._prepare_for_method_change()
|
||||
|
||||
try:
|
||||
retries = retries.increment(method, url, response=response, _pool=self)
|
||||
except MaxRetryError:
|
||||
if retries.raise_on_redirect:
|
||||
# Drain and release the connection for this response, since
|
||||
# we're not returning it to be released manually.
|
||||
drain_and_release_conn(response)
|
||||
response.drain_conn()
|
||||
raise
|
||||
return response
|
||||
|
||||
# drain and return the connection to the pool before recursing
|
||||
drain_and_release_conn(response)
|
||||
|
||||
response.drain_conn()
|
||||
retries.sleep_for_retry(response)
|
||||
log.debug("Redirecting %s -> %s", url, redirect_location)
|
||||
return self.urlopen(
|
||||
@@ -809,26 +872,23 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
timeout=timeout,
|
||||
pool_timeout=pool_timeout,
|
||||
release_conn=release_conn,
|
||||
chunked=chunked,
|
||||
body_pos=body_pos,
|
||||
**response_kw
|
||||
)
|
||||
|
||||
# Check if we should retry the HTTP response.
|
||||
has_retry_after = bool(response.getheader("Retry-After"))
|
||||
has_retry_after = bool(response.headers.get("Retry-After"))
|
||||
if retries.is_retry(method, response.status, has_retry_after):
|
||||
try:
|
||||
retries = retries.increment(method, url, response=response, _pool=self)
|
||||
except MaxRetryError:
|
||||
if retries.raise_on_status:
|
||||
# Drain and release the connection for this response, since
|
||||
# we're not returning it to be released manually.
|
||||
drain_and_release_conn(response)
|
||||
response.drain_conn()
|
||||
raise
|
||||
return response
|
||||
|
||||
# drain and return the connection to the pool before recursing
|
||||
drain_and_release_conn(response)
|
||||
|
||||
response.drain_conn()
|
||||
retries.sleep(response)
|
||||
log.debug("Retry: %s", url)
|
||||
return self.urlopen(
|
||||
@@ -842,6 +902,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
timeout=timeout,
|
||||
pool_timeout=pool_timeout,
|
||||
release_conn=release_conn,
|
||||
chunked=chunked,
|
||||
body_pos=body_pos,
|
||||
**response_kw
|
||||
)
|
||||
@@ -853,11 +914,7 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
||||
"""
|
||||
Same as :class:`.HTTPConnectionPool`, but HTTPS.
|
||||
|
||||
When Python is compiled with the :mod:`ssl` module, then
|
||||
:class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
|
||||
instead of :class:`.HTTPSConnection`.
|
||||
|
||||
:class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
|
||||
:class:`.HTTPSConnection` uses one of ``assert_fingerprint``,
|
||||
``assert_hostname`` and ``host`` in this order to verify connections.
|
||||
If ``assert_hostname`` is False, no verification is done.
|
||||
|
||||
@@ -941,15 +998,22 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
||||
|
||||
def _prepare_proxy(self, conn):
|
||||
"""
|
||||
Establish tunnel connection early, because otherwise httplib
|
||||
would improperly set Host: header to proxy's IP:port.
|
||||
Establishes a tunnel connection through HTTP CONNECT.
|
||||
|
||||
Tunnel connection is established early because otherwise httplib would
|
||||
improperly set Host: header to proxy's IP:port.
|
||||
"""
|
||||
|
||||
conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
|
||||
|
||||
if self.proxy.scheme == "https":
|
||||
conn.tls_in_tls_required = True
|
||||
|
||||
conn.connect()
|
||||
|
||||
def _new_conn(self):
|
||||
"""
|
||||
Return a fresh :class:`httplib.HTTPSConnection`.
|
||||
Return a fresh :class:`http.client.HTTPSConnection`.
|
||||
"""
|
||||
self.num_connections += 1
|
||||
log.debug(
|
||||
@@ -961,7 +1025,7 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
||||
|
||||
if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
|
||||
raise SSLError(
|
||||
"Can't connect to HTTPS URL because the SSL " "module is not available."
|
||||
"Can't connect to HTTPS URL because the SSL module is not available."
|
||||
)
|
||||
|
||||
actual_host = self.host
|
||||
@@ -996,9 +1060,20 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
||||
if not conn.is_verified:
|
||||
warnings.warn(
|
||||
(
|
||||
"Unverified HTTPS request is being made. "
|
||||
"Unverified HTTPS request is being made to host '%s'. "
|
||||
"Adding certificate verification is strongly advised. See: "
|
||||
"https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
|
||||
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
|
||||
"#ssl-warnings" % conn.host
|
||||
),
|
||||
InsecureRequestWarning,
|
||||
)
|
||||
|
||||
if getattr(conn, "proxy_is_verified", None) is False:
|
||||
warnings.warn(
|
||||
(
|
||||
"Unverified HTTPS connection done to an HTTPS proxy. "
|
||||
"Adding certificate verification is strongly advised. See: "
|
||||
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
|
||||
"#ssl-warnings"
|
||||
),
|
||||
InsecureRequestWarning,
|
||||
@@ -1049,3 +1124,14 @@ def _normalize_host(host, scheme):
|
||||
if host.startswith("[") and host.endswith("]"):
|
||||
host = host[1:-1]
|
||||
return host
|
||||
|
||||
|
||||
def _close_pool_connections(pool):
|
||||
"""Drains a queue of connections and closes each one."""
|
||||
try:
|
||||
while True:
|
||||
conn = pool.get(block=False)
|
||||
if conn:
|
||||
conn.close()
|
||||
except queue.Empty:
|
||||
pass # Done.
|
||||
|
||||
@@ -6,27 +6,31 @@ import os
|
||||
|
||||
|
||||
def is_appengine():
|
||||
return is_local_appengine() or is_prod_appengine() or is_prod_appengine_mvms()
|
||||
return is_local_appengine() or is_prod_appengine()
|
||||
|
||||
|
||||
def is_appengine_sandbox():
|
||||
return is_appengine() and not is_prod_appengine_mvms()
|
||||
"""Reports if the app is running in the first generation sandbox.
|
||||
|
||||
The second generation runtimes are technically still in a sandbox, but it
|
||||
is much less restrictive, so generally you shouldn't need to check for it.
|
||||
see https://cloud.google.com/appengine/docs/standard/runtimes
|
||||
"""
|
||||
return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27"
|
||||
|
||||
|
||||
def is_local_appengine():
|
||||
return (
|
||||
"APPENGINE_RUNTIME" in os.environ
|
||||
and "Development/" in os.environ["SERVER_SOFTWARE"]
|
||||
)
|
||||
return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
|
||||
"SERVER_SOFTWARE", ""
|
||||
).startswith("Development/")
|
||||
|
||||
|
||||
def is_prod_appengine():
|
||||
return (
|
||||
"APPENGINE_RUNTIME" in os.environ
|
||||
and "Google App Engine/" in os.environ["SERVER_SOFTWARE"]
|
||||
and not is_prod_appengine_mvms()
|
||||
)
|
||||
return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
|
||||
"SERVER_SOFTWARE", ""
|
||||
).startswith("Google App Engine/")
|
||||
|
||||
|
||||
def is_prod_appengine_mvms():
|
||||
return os.environ.get("GAE_VM", False) == "true"
|
||||
"""Deprecated."""
|
||||
return False
|
||||
|
||||
@@ -32,30 +32,26 @@ license and by oscrypto's:
|
||||
from __future__ import absolute_import
|
||||
|
||||
import platform
|
||||
from ctypes.util import find_library
|
||||
from ctypes import (
|
||||
c_void_p,
|
||||
c_int32,
|
||||
c_char_p,
|
||||
c_size_t,
|
||||
CDLL,
|
||||
CFUNCTYPE,
|
||||
POINTER,
|
||||
c_bool,
|
||||
c_byte,
|
||||
c_char_p,
|
||||
c_int32,
|
||||
c_long,
|
||||
c_size_t,
|
||||
c_uint32,
|
||||
c_ulong,
|
||||
c_long,
|
||||
c_bool,
|
||||
c_void_p,
|
||||
)
|
||||
from ctypes import CDLL, POINTER, CFUNCTYPE
|
||||
from ctypes.util import find_library
|
||||
|
||||
from ...packages.six import raise_from
|
||||
|
||||
security_path = find_library("Security")
|
||||
if not security_path:
|
||||
raise ImportError("The library Security could not be found")
|
||||
|
||||
|
||||
core_foundation_path = find_library("CoreFoundation")
|
||||
if not core_foundation_path:
|
||||
raise ImportError("The library CoreFoundation could not be found")
|
||||
|
||||
if platform.system() != "Darwin":
|
||||
raise ImportError("Only macOS is supported")
|
||||
|
||||
version = platform.mac_ver()[0]
|
||||
version_info = tuple(map(int, version.split(".")))
|
||||
@@ -65,8 +61,31 @@ if version_info < (10, 8):
|
||||
% (version_info[0], version_info[1])
|
||||
)
|
||||
|
||||
Security = CDLL(security_path, use_errno=True)
|
||||
CoreFoundation = CDLL(core_foundation_path, use_errno=True)
|
||||
|
||||
def load_cdll(name, macos10_16_path):
|
||||
"""Loads a CDLL by name, falling back to known path on 10.16+"""
|
||||
try:
|
||||
# Big Sur is technically 11 but we use 10.16 due to the Big Sur
|
||||
# beta being labeled as 10.16.
|
||||
if version_info >= (10, 16):
|
||||
path = macos10_16_path
|
||||
else:
|
||||
path = find_library(name)
|
||||
if not path:
|
||||
raise OSError # Caught and reraised as 'ImportError'
|
||||
return CDLL(path, use_errno=True)
|
||||
except OSError:
|
||||
raise_from(ImportError("The library %s failed to load" % name), None)
|
||||
|
||||
|
||||
Security = load_cdll(
|
||||
"Security", "/System/Library/Frameworks/Security.framework/Security"
|
||||
)
|
||||
CoreFoundation = load_cdll(
|
||||
"CoreFoundation",
|
||||
"/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
|
||||
)
|
||||
|
||||
|
||||
Boolean = c_bool
|
||||
CFIndex = c_long
|
||||
@@ -276,6 +295,13 @@ try:
|
||||
Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
|
||||
Security.SSLSetProtocolVersionMax.restype = OSStatus
|
||||
|
||||
try:
|
||||
Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
|
||||
Security.SSLSetALPNProtocols.restype = OSStatus
|
||||
except AttributeError:
|
||||
# Supported only in 10.12+
|
||||
pass
|
||||
|
||||
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
|
||||
Security.SecCopyErrorMessageString.restype = CFStringRef
|
||||
|
||||
@@ -415,6 +441,7 @@ class SecurityConst(object):
|
||||
kTLSProtocol1 = 4
|
||||
kTLSProtocol11 = 7
|
||||
kTLSProtocol12 = 8
|
||||
# SecureTransport does not support TLS 1.3 even if there's a constant for it
|
||||
kTLSProtocol13 = 10
|
||||
kTLSProtocolMaxSupported = 999
|
||||
|
||||
|
||||
@@ -10,13 +10,13 @@ appropriate and useful assistance to the higher-level code.
|
||||
import base64
|
||||
import ctypes
|
||||
import itertools
|
||||
import re
|
||||
import os
|
||||
import re
|
||||
import ssl
|
||||
import struct
|
||||
import tempfile
|
||||
|
||||
from .bindings import Security, CoreFoundation, CFConst
|
||||
|
||||
from .bindings import CFConst, CoreFoundation, Security
|
||||
|
||||
# This regular expression is used to grab PEM data out of a PEM bundle.
|
||||
_PEM_CERTS_RE = re.compile(
|
||||
@@ -56,6 +56,51 @@ def _cf_dictionary_from_tuples(tuples):
|
||||
)
|
||||
|
||||
|
||||
def _cfstr(py_bstr):
|
||||
"""
|
||||
Given a Python binary data, create a CFString.
|
||||
The string must be CFReleased by the caller.
|
||||
"""
|
||||
c_str = ctypes.c_char_p(py_bstr)
|
||||
cf_str = CoreFoundation.CFStringCreateWithCString(
|
||||
CoreFoundation.kCFAllocatorDefault,
|
||||
c_str,
|
||||
CFConst.kCFStringEncodingUTF8,
|
||||
)
|
||||
return cf_str
|
||||
|
||||
|
||||
def _create_cfstring_array(lst):
|
||||
"""
|
||||
Given a list of Python binary data, create an associated CFMutableArray.
|
||||
The array must be CFReleased by the caller.
|
||||
|
||||
Raises an ssl.SSLError on failure.
|
||||
"""
|
||||
cf_arr = None
|
||||
try:
|
||||
cf_arr = CoreFoundation.CFArrayCreateMutable(
|
||||
CoreFoundation.kCFAllocatorDefault,
|
||||
0,
|
||||
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
|
||||
)
|
||||
if not cf_arr:
|
||||
raise MemoryError("Unable to allocate memory!")
|
||||
for item in lst:
|
||||
cf_str = _cfstr(item)
|
||||
if not cf_str:
|
||||
raise MemoryError("Unable to allocate memory!")
|
||||
try:
|
||||
CoreFoundation.CFArrayAppendValue(cf_arr, cf_str)
|
||||
finally:
|
||||
CoreFoundation.CFRelease(cf_str)
|
||||
except BaseException as e:
|
||||
if cf_arr:
|
||||
CoreFoundation.CFRelease(cf_arr)
|
||||
raise ssl.SSLError("Unable to allocate array: %s" % (e,))
|
||||
return cf_arr
|
||||
|
||||
|
||||
def _cf_string_to_unicode(value):
|
||||
"""
|
||||
Creates a Unicode string from a CFString object. Used entirely for error
|
||||
@@ -143,6 +188,7 @@ def _cert_array_from_pem(pem_bundle):
|
||||
# We only want to do that if an error occurs: otherwise, the caller
|
||||
# should free.
|
||||
CoreFoundation.CFRelease(cert_array)
|
||||
raise
|
||||
|
||||
return cert_array
|
||||
|
||||
@@ -326,3 +372,26 @@ def _load_client_cert_chain(keychain, *paths):
|
||||
finally:
|
||||
for obj in itertools.chain(identities, certificates):
|
||||
CoreFoundation.CFRelease(obj)
|
||||
|
||||
|
||||
TLS_PROTOCOL_VERSIONS = {
|
||||
"SSLv2": (0, 2),
|
||||
"SSLv3": (3, 0),
|
||||
"TLSv1": (3, 1),
|
||||
"TLSv1.1": (3, 2),
|
||||
"TLSv1.2": (3, 3),
|
||||
}
|
||||
|
||||
|
||||
def _build_tls_unknown_ca_alert(version):
|
||||
"""
|
||||
Builds a TLS alert record for an unknown CA.
|
||||
"""
|
||||
ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version]
|
||||
severity_fatal = 0x02
|
||||
description_unknown_ca = 0x30
|
||||
msg = struct.pack(">BB", severity_fatal, description_unknown_ca)
|
||||
msg_len = len(msg)
|
||||
record_type_alert = 0x15
|
||||
record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg
|
||||
return record
|
||||
|
||||
@@ -39,24 +39,24 @@ urllib3 on Google App Engine:
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import io
|
||||
import logging
|
||||
import warnings
|
||||
from ..packages.six.moves.urllib.parse import urljoin
|
||||
|
||||
from ..exceptions import (
|
||||
HTTPError,
|
||||
HTTPWarning,
|
||||
MaxRetryError,
|
||||
ProtocolError,
|
||||
TimeoutError,
|
||||
SSLError,
|
||||
TimeoutError,
|
||||
)
|
||||
|
||||
from ..packages.six.moves.urllib.parse import urljoin
|
||||
from ..request import RequestMethods
|
||||
from ..response import HTTPResponse
|
||||
from ..util.timeout import Timeout
|
||||
from ..util.retry import Retry
|
||||
from ..util.timeout import Timeout
|
||||
from . import _appengine_environ
|
||||
|
||||
try:
|
||||
@@ -90,7 +90,7 @@ class AppEngineManager(RequestMethods):
|
||||
* If you attempt to use this on App Engine Flexible, as full socket
|
||||
support is available.
|
||||
* If a request size is more than 10 megabytes.
|
||||
* If a response size is more than 32 megabtyes.
|
||||
* If a response size is more than 32 megabytes.
|
||||
* If you use an unsupported request method such as OPTIONS.
|
||||
|
||||
Beyond those cases, it will raise normal urllib3 errors.
|
||||
@@ -108,17 +108,10 @@ class AppEngineManager(RequestMethods):
|
||||
"URLFetch is not available in this environment."
|
||||
)
|
||||
|
||||
if is_prod_appengine_mvms():
|
||||
raise AppEnginePlatformError(
|
||||
"Use normal urllib3.PoolManager instead of AppEngineManager"
|
||||
"on Managed VMs, as using URLFetch is not necessary in "
|
||||
"this environment."
|
||||
)
|
||||
|
||||
warnings.warn(
|
||||
"urllib3 is using URLFetch on Google App Engine sandbox instead "
|
||||
"of sockets. To use sockets directly instead of URLFetch see "
|
||||
"https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.",
|
||||
"https://urllib3.readthedocs.io/en/1.26.x/reference/urllib3.contrib.html.",
|
||||
AppEnginePlatformWarning,
|
||||
)
|
||||
|
||||
@@ -231,7 +224,7 @@ class AppEngineManager(RequestMethods):
|
||||
)
|
||||
|
||||
# Check if we should retry the HTTP response.
|
||||
has_retry_after = bool(http_response.getheader("Retry-After"))
|
||||
has_retry_after = bool(http_response.headers.get("Retry-After"))
|
||||
if retries.is_retry(method, http_response.status, has_retry_after):
|
||||
retries = retries.increment(method, url, response=http_response, _pool=self)
|
||||
log.debug("Retry: %s", url)
|
||||
|
||||
@@ -5,12 +5,21 @@ Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import warnings
|
||||
from logging import getLogger
|
||||
|
||||
from ntlm import ntlm
|
||||
|
||||
from .. import HTTPSConnectionPool
|
||||
from ..packages.six.moves.http_client import HTTPSConnection
|
||||
|
||||
warnings.warn(
|
||||
"The 'urllib3.contrib.ntlmpool' module is deprecated and will be removed "
|
||||
"in urllib3 v2.0 release, urllib3 is not able to support it properly due "
|
||||
"to reasons listed in issue: https://github.com/urllib3/urllib3/issues/2282. "
|
||||
"If you are a user of this module please comment in the mentioned issue.",
|
||||
DeprecationWarning,
|
||||
)
|
||||
|
||||
log = getLogger(__name__)
|
||||
|
||||
@@ -60,7 +69,7 @@ class NTLMConnectionPool(HTTPSConnectionPool):
|
||||
log.debug("Request headers: %s", headers)
|
||||
conn.request("GET", self.authurl, None, headers)
|
||||
res = conn.getresponse()
|
||||
reshdr = dict(res.getheaders())
|
||||
reshdr = dict(res.headers)
|
||||
log.debug("Response status: %s %s", res.status, res.reason)
|
||||
log.debug("Response headers: %s", reshdr)
|
||||
log.debug("Response data: %s [...]", res.read(100))
|
||||
@@ -92,13 +101,11 @@ class NTLMConnectionPool(HTTPSConnectionPool):
|
||||
conn.request("GET", self.authurl, None, headers)
|
||||
res = conn.getresponse()
|
||||
log.debug("Response status: %s %s", res.status, res.reason)
|
||||
log.debug("Response headers: %s", dict(res.getheaders()))
|
||||
log.debug("Response headers: %s", dict(res.headers))
|
||||
log.debug("Response data: %s [...]", res.read()[:100])
|
||||
if res.status != 200:
|
||||
if res.status == 401:
|
||||
raise Exception(
|
||||
"Server rejected request: wrong " "username or password"
|
||||
)
|
||||
raise Exception("Server rejected request: wrong username or password")
|
||||
raise Exception("Wrong server response: %s %s" % (res.status, res.reason))
|
||||
|
||||
res.fp = None
|
||||
|
||||
@@ -1,27 +1,31 @@
|
||||
"""
|
||||
SSL with SNI_-support for Python 2. Follow these instructions if you would
|
||||
like to verify SSL certificates in Python 2. Note, the default libraries do
|
||||
TLS with SNI_-support for Python 2. Follow these instructions if you would
|
||||
like to verify TLS certificates in Python 2. Note, the default libraries do
|
||||
*not* do certificate checking; you need to do additional work to validate
|
||||
certificates yourself.
|
||||
|
||||
This needs the following packages installed:
|
||||
|
||||
* pyOpenSSL (tested with 16.0.0)
|
||||
* cryptography (minimum 1.3.4, from pyopenssl)
|
||||
* idna (minimum 2.0, from cryptography)
|
||||
* `pyOpenSSL`_ (tested with 16.0.0)
|
||||
* `cryptography`_ (minimum 1.3.4, from pyopenssl)
|
||||
* `idna`_ (minimum 2.0, from cryptography)
|
||||
|
||||
However, pyopenssl depends on cryptography, which depends on idna, so while we
|
||||
use all three directly here we end up having relatively few packages required.
|
||||
|
||||
You can install them with the following command:
|
||||
|
||||
pip install pyopenssl cryptography idna
|
||||
.. code-block:: bash
|
||||
|
||||
$ python -m pip install pyopenssl cryptography idna
|
||||
|
||||
To activate certificate checking, call
|
||||
:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
|
||||
before you begin making HTTP requests. This can be done in a ``sitecustomize``
|
||||
module, or at any other time before your application begins using ``urllib3``,
|
||||
like this::
|
||||
like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
try:
|
||||
import urllib3.contrib.pyopenssl
|
||||
@@ -35,18 +39,18 @@ when the required modules are installed.
|
||||
Activating this module also has the positive side effect of disabling SSL/TLS
|
||||
compression in Python 2 (see `CRIME attack`_).
|
||||
|
||||
If you want to configure the default list of supported cipher suites, you can
|
||||
set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
|
||||
|
||||
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
|
||||
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
|
||||
.. _pyopenssl: https://www.pyopenssl.org
|
||||
.. _cryptography: https://cryptography.io
|
||||
.. _idna: https://github.com/kjd/idna
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import OpenSSL.crypto
|
||||
import OpenSSL.SSL
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends.openssl import backend as openssl_backend
|
||||
from cryptography.hazmat.backends.openssl.x509 import _Certificate
|
||||
|
||||
try:
|
||||
from cryptography.x509 import UnsupportedExtension
|
||||
@@ -56,8 +60,9 @@ except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
from socket import timeout, error as SocketError
|
||||
from io import BytesIO
|
||||
from socket import error as SocketError
|
||||
from socket import timeout
|
||||
|
||||
try: # Platform-specific: Python 2
|
||||
from socket import _fileobject
|
||||
@@ -67,11 +72,20 @@ except ImportError: # Platform-specific: Python 3
|
||||
|
||||
import logging
|
||||
import ssl
|
||||
from ..packages import six
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
from .. import util
|
||||
from ..packages import six
|
||||
from ..util.ssl_ import PROTOCOL_TLS_CLIENT
|
||||
|
||||
warnings.warn(
|
||||
"'urllib3.contrib.pyopenssl' module is deprecated and will be removed "
|
||||
"in a future release of urllib3 2.x. Read more in this issue: "
|
||||
"https://github.com/urllib3/urllib3/issues/2680",
|
||||
category=DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
|
||||
|
||||
@@ -81,6 +95,7 @@ HAS_SNI = True
|
||||
# Map from urllib3 to PyOpenSSL compatible parameter-values.
|
||||
_openssl_versions = {
|
||||
util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
|
||||
PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD,
|
||||
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
|
||||
}
|
||||
|
||||
@@ -213,9 +228,8 @@ def get_subj_alt_name(peer_cert):
|
||||
if hasattr(peer_cert, "to_cryptography"):
|
||||
cert = peer_cert.to_cryptography()
|
||||
else:
|
||||
# This is technically using private APIs, but should work across all
|
||||
# relevant versions before PyOpenSSL got a proper API for this.
|
||||
cert = _Certificate(openssl_backend, peer_cert._x509)
|
||||
der = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, peer_cert)
|
||||
cert = x509.load_der_x509_certificate(der, openssl_backend)
|
||||
|
||||
# We want to find the SAN extension. Ask Cryptography to locate it (it's
|
||||
# faster than looping in Python)
|
||||
@@ -400,7 +414,6 @@ if _fileobject: # Platform-specific: Python 2
|
||||
self._makefile_refs += 1
|
||||
return _fileobject(self, mode, bufsize, close=True)
|
||||
|
||||
|
||||
else: # Platform-specific: Python 3
|
||||
makefile = backport_makefile
|
||||
|
||||
@@ -450,9 +463,12 @@ class PyOpenSSLContext(object):
|
||||
cafile = cafile.encode("utf-8")
|
||||
if capath is not None:
|
||||
capath = capath.encode("utf-8")
|
||||
self._ctx.load_verify_locations(cafile, capath)
|
||||
if cadata is not None:
|
||||
self._ctx.load_verify_locations(BytesIO(cadata))
|
||||
try:
|
||||
self._ctx.load_verify_locations(cafile, capath)
|
||||
if cadata is not None:
|
||||
self._ctx.load_verify_locations(BytesIO(cadata))
|
||||
except OpenSSL.SSL.Error as e:
|
||||
raise ssl.SSLError("unable to load trusted certificates: %r" % e)
|
||||
|
||||
def load_cert_chain(self, certfile, keyfile=None, password=None):
|
||||
self._ctx.use_certificate_chain_file(certfile)
|
||||
@@ -462,6 +478,10 @@ class PyOpenSSLContext(object):
|
||||
self._ctx.set_passwd_cb(lambda *_: password)
|
||||
self._ctx.use_privatekey_file(keyfile or certfile)
|
||||
|
||||
def set_alpn_protocols(self, protocols):
|
||||
protocols = [six.ensure_binary(p) for p in protocols]
|
||||
return self._ctx.set_alpn_protos(protocols)
|
||||
|
||||
def wrap_socket(
|
||||
self,
|
||||
sock,
|
||||
|
||||
@@ -29,6 +29,8 @@ library. An enormous debt is owed to him for blazing this trail for us. For
|
||||
that reason, this code should be considered to be covered both by urllib3's
|
||||
license and by oscrypto's:
|
||||
|
||||
.. code-block::
|
||||
|
||||
Copyright (c) 2015-2016 Will Bond <will@wbond.net>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
@@ -58,16 +60,21 @@ import os.path
|
||||
import shutil
|
||||
import socket
|
||||
import ssl
|
||||
import struct
|
||||
import threading
|
||||
import weakref
|
||||
|
||||
from .. import util
|
||||
from ._securetransport.bindings import Security, SecurityConst, CoreFoundation
|
||||
from ..packages import six
|
||||
from ..util.ssl_ import PROTOCOL_TLS_CLIENT
|
||||
from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
|
||||
from ._securetransport.low_level import (
|
||||
_assert_no_error,
|
||||
_build_tls_unknown_ca_alert,
|
||||
_cert_array_from_pem,
|
||||
_temporary_keychain,
|
||||
_create_cfstring_array,
|
||||
_load_client_cert_chain,
|
||||
_temporary_keychain,
|
||||
)
|
||||
|
||||
try: # Platform-specific: Python 2
|
||||
@@ -144,13 +151,11 @@ CIPHER_SUITES = [
|
||||
]
|
||||
|
||||
# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
|
||||
# TLSv1 and a high of TLSv1.3. For everything else, we pin to that version.
|
||||
# TLSv1 to 1.2 are supported on macOS 10.8+ and TLSv1.3 is macOS 10.13+
|
||||
# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
|
||||
# TLSv1 to 1.2 are supported on macOS 10.8+
|
||||
_protocol_to_min_max = {
|
||||
util.PROTOCOL_TLS: (
|
||||
SecurityConst.kTLSProtocol1,
|
||||
SecurityConst.kTLSProtocolMaxSupported,
|
||||
)
|
||||
util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
|
||||
PROTOCOL_TLS_CLIENT: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
|
||||
}
|
||||
|
||||
if hasattr(ssl, "PROTOCOL_SSLv2"):
|
||||
@@ -377,16 +382,55 @@ class WrappedSocket(object):
|
||||
)
|
||||
_assert_no_error(result)
|
||||
|
||||
def _set_alpn_protocols(self, protocols):
|
||||
"""
|
||||
Sets up the ALPN protocols on the context.
|
||||
"""
|
||||
if not protocols:
|
||||
return
|
||||
protocols_arr = _create_cfstring_array(protocols)
|
||||
try:
|
||||
result = Security.SSLSetALPNProtocols(self.context, protocols_arr)
|
||||
_assert_no_error(result)
|
||||
finally:
|
||||
CoreFoundation.CFRelease(protocols_arr)
|
||||
|
||||
def _custom_validate(self, verify, trust_bundle):
|
||||
"""
|
||||
Called when we have set custom validation. We do this in two cases:
|
||||
first, when cert validation is entirely disabled; and second, when
|
||||
using a custom trust DB.
|
||||
Raises an SSLError if the connection is not trusted.
|
||||
"""
|
||||
# If we disabled cert validation, just say: cool.
|
||||
if not verify:
|
||||
return
|
||||
|
||||
successes = (
|
||||
SecurityConst.kSecTrustResultUnspecified,
|
||||
SecurityConst.kSecTrustResultProceed,
|
||||
)
|
||||
try:
|
||||
trust_result = self._evaluate_trust(trust_bundle)
|
||||
if trust_result in successes:
|
||||
return
|
||||
reason = "error code: %d" % (trust_result,)
|
||||
except Exception as e:
|
||||
# Do not trust on error
|
||||
reason = "exception: %r" % (e,)
|
||||
|
||||
# SecureTransport does not send an alert nor shuts down the connection.
|
||||
rec = _build_tls_unknown_ca_alert(self.version())
|
||||
self.socket.sendall(rec)
|
||||
# close the connection immediately
|
||||
# l_onoff = 1, activate linger
|
||||
# l_linger = 0, linger for 0 seoncds
|
||||
opts = struct.pack("ii", 1, 0)
|
||||
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
|
||||
self.close()
|
||||
raise ssl.SSLError("certificate verify failed, %s" % reason)
|
||||
|
||||
def _evaluate_trust(self, trust_bundle):
|
||||
# We want data in memory, so load it up.
|
||||
if os.path.isfile(trust_bundle):
|
||||
with open(trust_bundle, "rb") as f:
|
||||
@@ -424,15 +468,7 @@ class WrappedSocket(object):
|
||||
if cert_array is not None:
|
||||
CoreFoundation.CFRelease(cert_array)
|
||||
|
||||
# Ok, now we can look at what the result was.
|
||||
successes = (
|
||||
SecurityConst.kSecTrustResultUnspecified,
|
||||
SecurityConst.kSecTrustResultProceed,
|
||||
)
|
||||
if trust_result.value not in successes:
|
||||
raise ssl.SSLError(
|
||||
"certificate verify failed, error code: %d" % trust_result.value
|
||||
)
|
||||
return trust_result.value
|
||||
|
||||
def handshake(
|
||||
self,
|
||||
@@ -444,6 +480,7 @@ class WrappedSocket(object):
|
||||
client_cert,
|
||||
client_key,
|
||||
client_key_passphrase,
|
||||
alpn_protocols,
|
||||
):
|
||||
"""
|
||||
Actually performs the TLS handshake. This is run automatically by
|
||||
@@ -484,19 +521,14 @@ class WrappedSocket(object):
|
||||
# Setup the ciphers.
|
||||
self._set_ciphers()
|
||||
|
||||
# Setup the ALPN protocols.
|
||||
self._set_alpn_protocols(alpn_protocols)
|
||||
|
||||
# Set the minimum and maximum TLS versions.
|
||||
result = Security.SSLSetProtocolVersionMin(self.context, min_version)
|
||||
_assert_no_error(result)
|
||||
|
||||
# TLS 1.3 isn't necessarily enabled by the OS
|
||||
# so we have to detect when we error out and try
|
||||
# setting TLS 1.3 if it's allowed. kTLSProtocolMaxSupported
|
||||
# was added in macOS 10.13 along with kTLSProtocol13.
|
||||
result = Security.SSLSetProtocolVersionMax(self.context, max_version)
|
||||
if result != 0 and max_version == SecurityConst.kTLSProtocolMaxSupported:
|
||||
result = Security.SSLSetProtocolVersionMax(
|
||||
self.context, SecurityConst.kTLSProtocol12
|
||||
)
|
||||
_assert_no_error(result)
|
||||
|
||||
# If there's a trust DB, we need to use it. We do that by telling
|
||||
@@ -707,7 +739,7 @@ class WrappedSocket(object):
|
||||
)
|
||||
_assert_no_error(result)
|
||||
if protocol.value == SecurityConst.kTLSProtocol13:
|
||||
return "TLSv1.3"
|
||||
raise ssl.SSLError("SecureTransport does not support TLS 1.3")
|
||||
elif protocol.value == SecurityConst.kTLSProtocol12:
|
||||
return "TLSv1.2"
|
||||
elif protocol.value == SecurityConst.kTLSProtocol11:
|
||||
@@ -737,7 +769,6 @@ if _fileobject: # Platform-specific: Python 2
|
||||
self._makefile_refs += 1
|
||||
return _fileobject(self, mode, bufsize, close=True)
|
||||
|
||||
|
||||
else: # Platform-specific: Python 3
|
||||
|
||||
def makefile(self, mode="r", buffering=None, *args, **kwargs):
|
||||
@@ -765,6 +796,7 @@ class SecureTransportContext(object):
|
||||
self._client_cert = None
|
||||
self._client_key = None
|
||||
self._client_key_passphrase = None
|
||||
self._alpn_protocols = None
|
||||
|
||||
@property
|
||||
def check_hostname(self):
|
||||
@@ -830,6 +862,11 @@ class SecureTransportContext(object):
|
||||
if capath is not None:
|
||||
raise ValueError("SecureTransport does not support cert directories")
|
||||
|
||||
# Raise if cafile does not exist.
|
||||
if cafile is not None:
|
||||
with open(cafile):
|
||||
pass
|
||||
|
||||
self._trust_bundle = cafile or cadata
|
||||
|
||||
def load_cert_chain(self, certfile, keyfile=None, password=None):
|
||||
@@ -837,6 +874,18 @@ class SecureTransportContext(object):
|
||||
self._client_key = keyfile
|
||||
self._client_cert_passphrase = password
|
||||
|
||||
def set_alpn_protocols(self, protocols):
|
||||
"""
|
||||
Sets the ALPN protocols that will later be set on the context.
|
||||
|
||||
Raises a NotImplementedError if ALPN is not supported.
|
||||
"""
|
||||
if not hasattr(Security, "SSLSetALPNProtocols"):
|
||||
raise NotImplementedError(
|
||||
"SecureTransport supports ALPN only in macOS 10.12+"
|
||||
)
|
||||
self._alpn_protocols = [six.ensure_binary(p) for p in protocols]
|
||||
|
||||
def wrap_socket(
|
||||
self,
|
||||
sock,
|
||||
@@ -866,5 +915,6 @@ class SecureTransportContext(object):
|
||||
self._client_cert,
|
||||
self._client_key,
|
||||
self._client_key_passphrase,
|
||||
self._alpn_protocols,
|
||||
)
|
||||
return wrapped_socket
|
||||
|
||||
@@ -14,22 +14,26 @@ supports the following SOCKS features:
|
||||
- SOCKS5 with local DNS (``proxy_url='socks5://...``)
|
||||
- Usernames and passwords for the SOCKS proxy
|
||||
|
||||
.. note::
|
||||
It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
|
||||
your ``proxy_url`` to ensure that DNS resolution is done from the remote
|
||||
server instead of client-side when connecting to a domain name.
|
||||
.. note::
|
||||
It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
|
||||
your ``proxy_url`` to ensure that DNS resolution is done from the remote
|
||||
server instead of client-side when connecting to a domain name.
|
||||
|
||||
SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
|
||||
supports IPv4, IPv6, and domain names.
|
||||
|
||||
When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
|
||||
will be sent as the ``userid`` section of the SOCKS request::
|
||||
will be sent as the ``userid`` section of the SOCKS request:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
proxy_url="socks4a://<userid>@proxy-host"
|
||||
|
||||
When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
|
||||
of the ``proxy_url`` will be sent as the username/password to authenticate
|
||||
with the proxy::
|
||||
with the proxy:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
proxy_url="socks5h://<username>:<password>@proxy-host"
|
||||
|
||||
@@ -40,19 +44,21 @@ try:
|
||||
import socks
|
||||
except ImportError:
|
||||
import warnings
|
||||
|
||||
from ..exceptions import DependencyWarning
|
||||
|
||||
warnings.warn(
|
||||
(
|
||||
"SOCKS support in urllib3 requires the installation of optional "
|
||||
"dependencies: specifically, PySocks. For more information, see "
|
||||
"https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies"
|
||||
"https://urllib3.readthedocs.io/en/1.26.x/contrib.html#socks-proxies"
|
||||
),
|
||||
DependencyWarning,
|
||||
)
|
||||
raise
|
||||
|
||||
from socket import error as SocketError, timeout as SocketTimeout
|
||||
from socket import error as SocketError
|
||||
from socket import timeout as SocketTimeout
|
||||
|
||||
from ..connection import HTTPConnection, HTTPSConnection
|
||||
from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
|
||||
|
||||
@@ -1,21 +1,24 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead
|
||||
|
||||
# Base Exceptions
|
||||
|
||||
|
||||
class HTTPError(Exception):
|
||||
"Base exception used by this module."
|
||||
"""Base exception used by this module."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class HTTPWarning(Warning):
|
||||
"Base warning used by this module."
|
||||
"""Base warning used by this module."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class PoolError(HTTPError):
|
||||
"Base exception for errors caused within a pool."
|
||||
"""Base exception for errors caused within a pool."""
|
||||
|
||||
def __init__(self, pool, message):
|
||||
self.pool = pool
|
||||
@@ -27,7 +30,7 @@ class PoolError(HTTPError):
|
||||
|
||||
|
||||
class RequestError(PoolError):
|
||||
"Base exception for PoolErrors that have associated URLs."
|
||||
"""Base exception for PoolErrors that have associated URLs."""
|
||||
|
||||
def __init__(self, pool, url, message):
|
||||
self.url = url
|
||||
@@ -39,22 +42,28 @@ class RequestError(PoolError):
|
||||
|
||||
|
||||
class SSLError(HTTPError):
|
||||
"Raised when SSL certificate fails in an HTTPS connection."
|
||||
"""Raised when SSL certificate fails in an HTTPS connection."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ProxyError(HTTPError):
|
||||
"Raised when the connection to a proxy fails."
|
||||
pass
|
||||
"""Raised when the connection to a proxy fails."""
|
||||
|
||||
def __init__(self, message, error, *args):
|
||||
super(ProxyError, self).__init__(message, error, *args)
|
||||
self.original_error = error
|
||||
|
||||
|
||||
class DecodeError(HTTPError):
|
||||
"Raised when automatic decoding based on Content-Type fails."
|
||||
"""Raised when automatic decoding based on Content-Type fails."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ProtocolError(HTTPError):
|
||||
"Raised when something unexpected happens mid-request/response."
|
||||
"""Raised when something unexpected happens mid-request/response."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
@@ -84,7 +93,7 @@ class MaxRetryError(RequestError):
|
||||
|
||||
|
||||
class HostChangedError(RequestError):
|
||||
"Raised when an existing pool gets a request for a foreign host."
|
||||
"""Raised when an existing pool gets a request for a foreign host."""
|
||||
|
||||
def __init__(self, pool, url, retries=3):
|
||||
message = "Tried to open a foreign host with url: %s" % url
|
||||
@@ -93,13 +102,13 @@ class HostChangedError(RequestError):
|
||||
|
||||
|
||||
class TimeoutStateError(HTTPError):
|
||||
""" Raised when passing an invalid state to a timeout """
|
||||
"""Raised when passing an invalid state to a timeout"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class TimeoutError(HTTPError):
|
||||
""" Raised when a socket timeout error occurs.
|
||||
"""Raised when a socket timeout error occurs.
|
||||
|
||||
Catching this error will catch both :exc:`ReadTimeoutErrors
|
||||
<ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
|
||||
@@ -109,39 +118,45 @@ class TimeoutError(HTTPError):
|
||||
|
||||
|
||||
class ReadTimeoutError(TimeoutError, RequestError):
|
||||
"Raised when a socket timeout occurs while receiving data from a server"
|
||||
"""Raised when a socket timeout occurs while receiving data from a server"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
# This timeout error does not have a URL attached and needs to inherit from the
|
||||
# base HTTPError
|
||||
class ConnectTimeoutError(TimeoutError):
|
||||
"Raised when a socket timeout occurs while connecting to a server"
|
||||
"""Raised when a socket timeout occurs while connecting to a server"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class NewConnectionError(ConnectTimeoutError, PoolError):
|
||||
"Raised when we fail to establish a new connection. Usually ECONNREFUSED."
|
||||
"""Raised when we fail to establish a new connection. Usually ECONNREFUSED."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class EmptyPoolError(PoolError):
|
||||
"Raised when a pool runs out of connections and no more are allowed."
|
||||
"""Raised when a pool runs out of connections and no more are allowed."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ClosedPoolError(PoolError):
|
||||
"Raised when a request enters a pool after the pool has been closed."
|
||||
"""Raised when a request enters a pool after the pool has been closed."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class LocationValueError(ValueError, HTTPError):
|
||||
"Raised when there is something wrong with a given URL input."
|
||||
"""Raised when there is something wrong with a given URL input."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class LocationParseError(LocationValueError):
|
||||
"Raised when get_host or similar fails to parse the URL input."
|
||||
"""Raised when get_host or similar fails to parse the URL input."""
|
||||
|
||||
def __init__(self, location):
|
||||
message = "Failed to parse: %s" % location
|
||||
@@ -150,39 +165,56 @@ class LocationParseError(LocationValueError):
|
||||
self.location = location
|
||||
|
||||
|
||||
class URLSchemeUnknown(LocationValueError):
|
||||
"""Raised when a URL input has an unsupported scheme."""
|
||||
|
||||
def __init__(self, scheme):
|
||||
message = "Not supported URL scheme %s" % scheme
|
||||
super(URLSchemeUnknown, self).__init__(message)
|
||||
|
||||
self.scheme = scheme
|
||||
|
||||
|
||||
class ResponseError(HTTPError):
|
||||
"Used as a container for an error reason supplied in a MaxRetryError."
|
||||
"""Used as a container for an error reason supplied in a MaxRetryError."""
|
||||
|
||||
GENERIC_ERROR = "too many error responses"
|
||||
SPECIFIC_ERROR = "too many {status_code} error responses"
|
||||
|
||||
|
||||
class SecurityWarning(HTTPWarning):
|
||||
"Warned when performing security reducing actions"
|
||||
"""Warned when performing security reducing actions"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class SubjectAltNameWarning(SecurityWarning):
|
||||
"Warned when connecting to a host with a certificate missing a SAN."
|
||||
"""Warned when connecting to a host with a certificate missing a SAN."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class InsecureRequestWarning(SecurityWarning):
|
||||
"Warned when making an unverified HTTPS request."
|
||||
"""Warned when making an unverified HTTPS request."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class SystemTimeWarning(SecurityWarning):
|
||||
"Warned when system time is suspected to be wrong"
|
||||
"""Warned when system time is suspected to be wrong"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class InsecurePlatformWarning(SecurityWarning):
|
||||
"Warned when certain SSL configuration is not available on a platform."
|
||||
"""Warned when certain TLS/SSL configuration is not available on a platform."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class SNIMissingWarning(HTTPWarning):
|
||||
"Warned when making a HTTPS request without SNI available."
|
||||
"""Warned when making a HTTPS request without SNI available."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
@@ -196,14 +228,15 @@ class DependencyWarning(HTTPWarning):
|
||||
|
||||
|
||||
class ResponseNotChunked(ProtocolError, ValueError):
|
||||
"Response needs to be chunked in order to read it as chunks."
|
||||
"""Response needs to be chunked in order to read it as chunks."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class BodyNotHttplibCompatible(HTTPError):
|
||||
"""
|
||||
Body should be httplib.HTTPResponse like (have an fp attribute which
|
||||
returns raw chunks) for read_chunked().
|
||||
Body should be :class:`http.client.HTTPResponse` like
|
||||
(have an fp attribute which returns raw chunks) for read_chunked().
|
||||
"""
|
||||
|
||||
pass
|
||||
@@ -213,37 +246,71 @@ class IncompleteRead(HTTPError, httplib_IncompleteRead):
|
||||
"""
|
||||
Response length doesn't match expected Content-Length
|
||||
|
||||
Subclass of http_client.IncompleteRead to allow int value
|
||||
for `partial` to avoid creating large objects on streamed
|
||||
reads.
|
||||
Subclass of :class:`http.client.IncompleteRead` to allow int value
|
||||
for ``partial`` to avoid creating large objects on streamed reads.
|
||||
"""
|
||||
|
||||
def __init__(self, partial, expected):
|
||||
super(IncompleteRead, self).__init__(partial, expected)
|
||||
|
||||
def __repr__(self):
|
||||
return "IncompleteRead(%i bytes read, " "%i more expected)" % (
|
||||
return "IncompleteRead(%i bytes read, %i more expected)" % (
|
||||
self.partial,
|
||||
self.expected,
|
||||
)
|
||||
|
||||
|
||||
class InvalidChunkLength(HTTPError, httplib_IncompleteRead):
|
||||
"""Invalid chunk length in a chunked response."""
|
||||
|
||||
def __init__(self, response, length):
|
||||
super(InvalidChunkLength, self).__init__(
|
||||
response.tell(), response.length_remaining
|
||||
)
|
||||
self.response = response
|
||||
self.length = length
|
||||
|
||||
def __repr__(self):
|
||||
return "InvalidChunkLength(got length %r, %i bytes read)" % (
|
||||
self.length,
|
||||
self.partial,
|
||||
)
|
||||
|
||||
|
||||
class InvalidHeader(HTTPError):
|
||||
"The header provided was somehow invalid."
|
||||
"""The header provided was somehow invalid."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ProxySchemeUnknown(AssertionError, ValueError):
|
||||
"ProxyManager does not support the supplied scheme"
|
||||
class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
|
||||
"""ProxyManager does not support the supplied scheme"""
|
||||
|
||||
# TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
|
||||
|
||||
def __init__(self, scheme):
|
||||
message = "Not supported proxy scheme %s" % scheme
|
||||
# 'localhost' is here because our URL parser parses
|
||||
# localhost:8080 -> scheme=localhost, remove if we fix this.
|
||||
if scheme == "localhost":
|
||||
scheme = None
|
||||
if scheme is None:
|
||||
message = "Proxy URL had no scheme, should start with http:// or https://"
|
||||
else:
|
||||
message = (
|
||||
"Proxy URL had unsupported scheme %s, should use http:// or https://"
|
||||
% scheme
|
||||
)
|
||||
super(ProxySchemeUnknown, self).__init__(message)
|
||||
|
||||
|
||||
class ProxySchemeUnsupported(ValueError):
|
||||
"""Fetching HTTPS resources through HTTPS proxies is unsupported"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class HeaderParsingError(HTTPError):
|
||||
"Raised by assert_header_parsing, but we convert it to a log.warning statement."
|
||||
"""Raised by assert_header_parsing, but we convert it to a log.warning statement."""
|
||||
|
||||
def __init__(self, defects, unparsed_data):
|
||||
message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data)
|
||||
@@ -251,5 +318,6 @@ class HeaderParsingError(HTTPError):
|
||||
|
||||
|
||||
class UnrewindableBodyError(HTTPError):
|
||||
"urllib3 encountered an error when trying to rewind a body"
|
||||
"""urllib3 encountered an error when trying to rewind a body"""
|
||||
|
||||
pass
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import email.utils
|
||||
import mimetypes
|
||||
import re
|
||||
@@ -26,7 +27,8 @@ def format_header_param_rfc2231(name, value):
|
||||
strategy defined in RFC 2231.
|
||||
|
||||
Particularly useful for header parameters which might contain
|
||||
non-ASCII values, like file names. This follows RFC 2388 Section 4.4.
|
||||
non-ASCII values, like file names. This follows
|
||||
`RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_.
|
||||
|
||||
:param name:
|
||||
The name of the parameter, a string expected to be ASCII only.
|
||||
@@ -65,7 +67,6 @@ _HTML5_REPLACEMENTS = {
|
||||
u"\u0022": u"%22",
|
||||
# Replace "\" with "\\".
|
||||
u"\u005C": u"\u005C\u005C",
|
||||
u"\u005C": u"\u005C\u005C",
|
||||
}
|
||||
|
||||
# All control characters from 0x00 to 0x1F *except* 0x1B.
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import binascii
|
||||
import codecs
|
||||
import os
|
||||
|
||||
from io import BytesIO
|
||||
|
||||
from .fields import RequestField
|
||||
from .packages import six
|
||||
from .packages.six import b
|
||||
from .fields import RequestField
|
||||
|
||||
writer = codecs.lookup("utf-8")[3]
|
||||
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from . import ssl_match_hostname
|
||||
|
||||
__all__ = ("ssl_match_hostname",)
|
||||
|
||||
@@ -7,7 +7,6 @@ Backports the Python 3 ``socket.makefile`` method for use with anything that
|
||||
wants to create a "fake" socket object.
|
||||
"""
|
||||
import io
|
||||
|
||||
from socket import SocketIO
|
||||
|
||||
|
||||
|
||||
155
lib/urllib3/packages/backports/weakref_finalize.py
Normal file
155
lib/urllib3/packages/backports/weakref_finalize.py
Normal file
@@ -0,0 +1,155 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
backports.weakref_finalize
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Backports the Python 3 ``weakref.finalize`` method.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import itertools
|
||||
import sys
|
||||
from weakref import ref
|
||||
|
||||
__all__ = ["weakref_finalize"]
|
||||
|
||||
|
||||
class weakref_finalize(object):
|
||||
"""Class for finalization of weakrefable objects
|
||||
finalize(obj, func, *args, **kwargs) returns a callable finalizer
|
||||
object which will be called when obj is garbage collected. The
|
||||
first time the finalizer is called it evaluates func(*arg, **kwargs)
|
||||
and returns the result. After this the finalizer is dead, and
|
||||
calling it just returns None.
|
||||
When the program exits any remaining finalizers for which the
|
||||
atexit attribute is true will be run in reverse order of creation.
|
||||
By default atexit is true.
|
||||
"""
|
||||
|
||||
# Finalizer objects don't have any state of their own. They are
|
||||
# just used as keys to lookup _Info objects in the registry. This
|
||||
# ensures that they cannot be part of a ref-cycle.
|
||||
|
||||
__slots__ = ()
|
||||
_registry = {}
|
||||
_shutdown = False
|
||||
_index_iter = itertools.count()
|
||||
_dirty = False
|
||||
_registered_with_atexit = False
|
||||
|
||||
class _Info(object):
|
||||
__slots__ = ("weakref", "func", "args", "kwargs", "atexit", "index")
|
||||
|
||||
def __init__(self, obj, func, *args, **kwargs):
|
||||
if not self._registered_with_atexit:
|
||||
# We may register the exit function more than once because
|
||||
# of a thread race, but that is harmless
|
||||
import atexit
|
||||
|
||||
atexit.register(self._exitfunc)
|
||||
weakref_finalize._registered_with_atexit = True
|
||||
info = self._Info()
|
||||
info.weakref = ref(obj, self)
|
||||
info.func = func
|
||||
info.args = args
|
||||
info.kwargs = kwargs or None
|
||||
info.atexit = True
|
||||
info.index = next(self._index_iter)
|
||||
self._registry[self] = info
|
||||
weakref_finalize._dirty = True
|
||||
|
||||
def __call__(self, _=None):
|
||||
"""If alive then mark as dead and return func(*args, **kwargs);
|
||||
otherwise return None"""
|
||||
info = self._registry.pop(self, None)
|
||||
if info and not self._shutdown:
|
||||
return info.func(*info.args, **(info.kwargs or {}))
|
||||
|
||||
def detach(self):
|
||||
"""If alive then mark as dead and return (obj, func, args, kwargs);
|
||||
otherwise return None"""
|
||||
info = self._registry.get(self)
|
||||
obj = info and info.weakref()
|
||||
if obj is not None and self._registry.pop(self, None):
|
||||
return (obj, info.func, info.args, info.kwargs or {})
|
||||
|
||||
def peek(self):
|
||||
"""If alive then return (obj, func, args, kwargs);
|
||||
otherwise return None"""
|
||||
info = self._registry.get(self)
|
||||
obj = info and info.weakref()
|
||||
if obj is not None:
|
||||
return (obj, info.func, info.args, info.kwargs or {})
|
||||
|
||||
@property
|
||||
def alive(self):
|
||||
"""Whether finalizer is alive"""
|
||||
return self in self._registry
|
||||
|
||||
@property
|
||||
def atexit(self):
|
||||
"""Whether finalizer should be called at exit"""
|
||||
info = self._registry.get(self)
|
||||
return bool(info) and info.atexit
|
||||
|
||||
@atexit.setter
|
||||
def atexit(self, value):
|
||||
info = self._registry.get(self)
|
||||
if info:
|
||||
info.atexit = bool(value)
|
||||
|
||||
def __repr__(self):
|
||||
info = self._registry.get(self)
|
||||
obj = info and info.weakref()
|
||||
if obj is None:
|
||||
return "<%s object at %#x; dead>" % (type(self).__name__, id(self))
|
||||
else:
|
||||
return "<%s object at %#x; for %r at %#x>" % (
|
||||
type(self).__name__,
|
||||
id(self),
|
||||
type(obj).__name__,
|
||||
id(obj),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _select_for_exit(cls):
|
||||
# Return live finalizers marked for exit, oldest first
|
||||
L = [(f, i) for (f, i) in cls._registry.items() if i.atexit]
|
||||
L.sort(key=lambda item: item[1].index)
|
||||
return [f for (f, i) in L]
|
||||
|
||||
@classmethod
|
||||
def _exitfunc(cls):
|
||||
# At shutdown invoke finalizers for which atexit is true.
|
||||
# This is called once all other non-daemonic threads have been
|
||||
# joined.
|
||||
reenable_gc = False
|
||||
try:
|
||||
if cls._registry:
|
||||
import gc
|
||||
|
||||
if gc.isenabled():
|
||||
reenable_gc = True
|
||||
gc.disable()
|
||||
pending = None
|
||||
while True:
|
||||
if pending is None or weakref_finalize._dirty:
|
||||
pending = cls._select_for_exit()
|
||||
weakref_finalize._dirty = False
|
||||
if not pending:
|
||||
break
|
||||
f = pending.pop()
|
||||
try:
|
||||
# gc is disabled, so (assuming no daemonic
|
||||
# threads) the following is the only line in
|
||||
# this function which might trigger creation
|
||||
# of a new finalizer
|
||||
f()
|
||||
except Exception:
|
||||
sys.excepthook(*sys.exc_info())
|
||||
assert f not in cls._registry
|
||||
finally:
|
||||
# prevent any more finalizers from executing during shutdown
|
||||
weakref_finalize._shutdown = True
|
||||
if reenable_gc:
|
||||
gc.enable()
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2010-2019 Benjamin Peterson
|
||||
# Copyright (c) 2010-2020 Benjamin Peterson
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
@@ -29,7 +29,7 @@ import sys
|
||||
import types
|
||||
|
||||
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||
__version__ = "1.12.0"
|
||||
__version__ = "1.16.0"
|
||||
|
||||
|
||||
# Useful for very coarse version differentiation.
|
||||
@@ -71,6 +71,11 @@ else:
|
||||
MAXSIZE = int((1 << 63) - 1)
|
||||
del X
|
||||
|
||||
if PY34:
|
||||
from importlib.util import spec_from_loader
|
||||
else:
|
||||
spec_from_loader = None
|
||||
|
||||
|
||||
def _add_doc(func, doc):
|
||||
"""Add documentation to a function."""
|
||||
@@ -182,6 +187,11 @@ class _SixMetaPathImporter(object):
|
||||
return self
|
||||
return None
|
||||
|
||||
def find_spec(self, fullname, path, target=None):
|
||||
if fullname in self.known_modules:
|
||||
return spec_from_loader(fullname, self)
|
||||
return None
|
||||
|
||||
def __get_module(self, fullname):
|
||||
try:
|
||||
return self.known_modules[fullname]
|
||||
@@ -220,6 +230,12 @@ class _SixMetaPathImporter(object):
|
||||
|
||||
get_source = get_code # same as get_code
|
||||
|
||||
def create_module(self, spec):
|
||||
return self.load_module(spec.name)
|
||||
|
||||
def exec_module(self, module):
|
||||
pass
|
||||
|
||||
|
||||
_importer = _SixMetaPathImporter(__name__)
|
||||
|
||||
@@ -260,9 +276,19 @@ _moved_attributes = [
|
||||
),
|
||||
MovedModule("builtins", "__builtin__"),
|
||||
MovedModule("configparser", "ConfigParser"),
|
||||
MovedModule(
|
||||
"collections_abc",
|
||||
"collections",
|
||||
"collections.abc" if sys.version_info >= (3, 3) else "collections",
|
||||
),
|
||||
MovedModule("copyreg", "copy_reg"),
|
||||
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
|
||||
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
|
||||
MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
|
||||
MovedModule(
|
||||
"_dummy_thread",
|
||||
"dummy_thread",
|
||||
"_dummy_thread" if sys.version_info < (3, 9) else "_thread",
|
||||
),
|
||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||
@@ -307,7 +333,9 @@ _moved_attributes = [
|
||||
]
|
||||
# Add windows specific modules.
|
||||
if sys.platform == "win32":
|
||||
_moved_attributes += [MovedModule("winreg", "_winreg")]
|
||||
_moved_attributes += [
|
||||
MovedModule("winreg", "_winreg"),
|
||||
]
|
||||
|
||||
for attr in _moved_attributes:
|
||||
setattr(_MovedItems, attr.name, attr)
|
||||
@@ -476,7 +504,7 @@ class Module_six_moves_urllib_robotparser(_LazyModule):
|
||||
|
||||
|
||||
_urllib_robotparser_moved_attributes = [
|
||||
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser")
|
||||
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
|
||||
]
|
||||
for attr in _urllib_robotparser_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
|
||||
@@ -678,9 +706,11 @@ if PY3:
|
||||
if sys.version_info[1] <= 1:
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
_assertNotRegex = "assertNotRegexpMatches"
|
||||
else:
|
||||
_assertRaisesRegex = "assertRaisesRegex"
|
||||
_assertRegex = "assertRegex"
|
||||
_assertNotRegex = "assertNotRegex"
|
||||
else:
|
||||
|
||||
def b(s):
|
||||
@@ -707,6 +737,7 @@ else:
|
||||
_assertCountEqual = "assertItemsEqual"
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
_assertNotRegex = "assertNotRegexpMatches"
|
||||
_add_doc(b, """Byte literal""")
|
||||
_add_doc(u, """Text literal""")
|
||||
|
||||
@@ -723,6 +754,10 @@ def assertRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertNotRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertNotRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
if PY3:
|
||||
exec_ = getattr(moves.builtins, "exec")
|
||||
|
||||
@@ -737,7 +772,6 @@ if PY3:
|
||||
value = None
|
||||
tb = None
|
||||
|
||||
|
||||
else:
|
||||
|
||||
def exec_(_code_, _globs_=None, _locs_=None):
|
||||
@@ -750,7 +784,7 @@ else:
|
||||
del frame
|
||||
elif _locs_ is None:
|
||||
_locs_ = _globs_
|
||||
exec("""exec _code_ in _globs_, _locs_""")
|
||||
exec ("""exec _code_ in _globs_, _locs_""")
|
||||
|
||||
exec_(
|
||||
"""def reraise(tp, value, tb=None):
|
||||
@@ -762,18 +796,7 @@ else:
|
||||
)
|
||||
|
||||
|
||||
if sys.version_info[:2] == (3, 2):
|
||||
exec_(
|
||||
"""def raise_from(value, from_value):
|
||||
try:
|
||||
if from_value is None:
|
||||
raise value
|
||||
raise value from from_value
|
||||
finally:
|
||||
value = None
|
||||
"""
|
||||
)
|
||||
elif sys.version_info[:2] > (3, 2):
|
||||
if sys.version_info[:2] > (3,):
|
||||
exec_(
|
||||
"""def raise_from(value, from_value):
|
||||
try:
|
||||
@@ -863,19 +886,41 @@ if sys.version_info[:2] < (3, 3):
|
||||
_add_doc(reraise, """Reraise an exception.""")
|
||||
|
||||
if sys.version_info[0:2] < (3, 4):
|
||||
# This does exactly the same what the :func:`py3:functools.update_wrapper`
|
||||
# function does on Python versions after 3.2. It sets the ``__wrapped__``
|
||||
# attribute on ``wrapper`` object and it doesn't raise an error if any of
|
||||
# the attributes mentioned in ``assigned`` and ``updated`` are missing on
|
||||
# ``wrapped`` object.
|
||||
def _update_wrapper(
|
||||
wrapper,
|
||||
wrapped,
|
||||
assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||
updated=functools.WRAPPER_UPDATES,
|
||||
):
|
||||
for attr in assigned:
|
||||
try:
|
||||
value = getattr(wrapped, attr)
|
||||
except AttributeError:
|
||||
continue
|
||||
else:
|
||||
setattr(wrapper, attr, value)
|
||||
for attr in updated:
|
||||
getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
|
||||
wrapper.__wrapped__ = wrapped
|
||||
return wrapper
|
||||
|
||||
_update_wrapper.__doc__ = functools.update_wrapper.__doc__
|
||||
|
||||
def wraps(
|
||||
wrapped,
|
||||
assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||
updated=functools.WRAPPER_UPDATES,
|
||||
):
|
||||
def wrapper(f):
|
||||
f = functools.wraps(wrapped, assigned, updated)(f)
|
||||
f.__wrapped__ = wrapped
|
||||
return f
|
||||
|
||||
return wrapper
|
||||
return functools.partial(
|
||||
_update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated
|
||||
)
|
||||
|
||||
wraps.__doc__ = functools.wraps.__doc__
|
||||
|
||||
else:
|
||||
wraps = functools.wraps
|
||||
@@ -888,7 +933,15 @@ def with_metaclass(meta, *bases):
|
||||
# the actual metaclass.
|
||||
class metaclass(type):
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
if sys.version_info[:2] >= (3, 7):
|
||||
# This version introduced PEP 560 that requires a bit
|
||||
# of extra care (we mimic what is done by __build_class__).
|
||||
resolved_bases = types.resolve_bases(bases)
|
||||
if resolved_bases is not bases:
|
||||
d["__orig_bases__"] = bases
|
||||
else:
|
||||
resolved_bases = bases
|
||||
return meta(name, resolved_bases, d)
|
||||
|
||||
@classmethod
|
||||
def __prepare__(cls, name, this_bases):
|
||||
@@ -928,12 +981,11 @@ def ensure_binary(s, encoding="utf-8", errors="strict"):
|
||||
- `str` -> encoded to `bytes`
|
||||
- `bytes` -> `bytes`
|
||||
"""
|
||||
if isinstance(s, binary_type):
|
||||
return s
|
||||
if isinstance(s, text_type):
|
||||
return s.encode(encoding, errors)
|
||||
elif isinstance(s, binary_type):
|
||||
return s
|
||||
else:
|
||||
raise TypeError("not expecting type '%s'" % type(s))
|
||||
raise TypeError("not expecting type '%s'" % type(s))
|
||||
|
||||
|
||||
def ensure_str(s, encoding="utf-8", errors="strict"):
|
||||
@@ -947,12 +999,15 @@ def ensure_str(s, encoding="utf-8", errors="strict"):
|
||||
- `str` -> `str`
|
||||
- `bytes` -> decoded to `str`
|
||||
"""
|
||||
if not isinstance(s, (text_type, binary_type)):
|
||||
raise TypeError("not expecting type '%s'" % type(s))
|
||||
# Optimization: Fast return for the common case.
|
||||
if type(s) is str:
|
||||
return s
|
||||
if PY2 and isinstance(s, text_type):
|
||||
s = s.encode(encoding, errors)
|
||||
return s.encode(encoding, errors)
|
||||
elif PY3 and isinstance(s, binary_type):
|
||||
s = s.decode(encoding, errors)
|
||||
return s.decode(encoding, errors)
|
||||
elif not isinstance(s, (text_type, binary_type)):
|
||||
raise TypeError("not expecting type '%s'" % type(s))
|
||||
return s
|
||||
|
||||
|
||||
@@ -977,7 +1032,7 @@ def ensure_text(s, encoding="utf-8", errors="strict"):
|
||||
|
||||
def python_2_unicode_compatible(klass):
|
||||
"""
|
||||
A decorator that defines __unicode__ and __str__ methods under Python 2.
|
||||
A class decorator that defines __unicode__ and __str__ methods under Python 2.
|
||||
Under Python 3 it does nothing.
|
||||
|
||||
To support Python 2 and 3 with a single code base, define a __str__ method
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
import sys
|
||||
|
||||
try:
|
||||
# Our match_hostname function is the same as 3.5's, so we only want to
|
||||
# import the match_hostname function if it's at least that good.
|
||||
if sys.version_info < (3, 5):
|
||||
raise ImportError("Fallback to vendored code")
|
||||
|
||||
from ssl import CertificateError, match_hostname
|
||||
except ImportError:
|
||||
try:
|
||||
# Backport of the function from a pypi module
|
||||
from backports.ssl_match_hostname import CertificateError, match_hostname
|
||||
except ImportError:
|
||||
# Our vendored copy
|
||||
from ._implementation import CertificateError, match_hostname
|
||||
|
||||
# Not needed, but documenting what we provide.
|
||||
__all__ = ("CertificateError", "match_hostname")
|
||||
@@ -1,18 +1,24 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import collections
|
||||
import functools
|
||||
import logging
|
||||
|
||||
from ._collections import RecentlyUsedContainer
|
||||
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
|
||||
from .connectionpool import port_by_scheme
|
||||
from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown
|
||||
from ._collections import HTTPHeaderDict, RecentlyUsedContainer
|
||||
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
|
||||
from .exceptions import (
|
||||
LocationValueError,
|
||||
MaxRetryError,
|
||||
ProxySchemeUnknown,
|
||||
ProxySchemeUnsupported,
|
||||
URLSchemeUnknown,
|
||||
)
|
||||
from .packages import six
|
||||
from .packages.six.moves.urllib.parse import urljoin
|
||||
from .request import RequestMethods
|
||||
from .util.url import parse_url
|
||||
from .util.proxy import connection_requires_http_tunnel
|
||||
from .util.retry import Retry
|
||||
|
||||
from .util.url import parse_url
|
||||
|
||||
__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
|
||||
|
||||
@@ -28,6 +34,7 @@ SSL_KEYWORDS = (
|
||||
"ca_cert_dir",
|
||||
"ssl_context",
|
||||
"key_password",
|
||||
"server_hostname",
|
||||
)
|
||||
|
||||
# All known keyword arguments that could be provided to the pool manager, its
|
||||
@@ -53,6 +60,7 @@ _key_fields = (
|
||||
"key_headers", # dict
|
||||
"key__proxy", # parsed proxy url
|
||||
"key__proxy_headers", # dict
|
||||
"key__proxy_config", # class
|
||||
"key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples
|
||||
"key__socks_options", # dict
|
||||
"key_assert_hostname", # bool or string
|
||||
@@ -64,6 +72,9 @@ _key_fields = (
|
||||
#: All custom key schemes should include the fields in this key at a minimum.
|
||||
PoolKey = collections.namedtuple("PoolKey", _key_fields)
|
||||
|
||||
_proxy_config_fields = ("ssl_context", "use_forwarding_for_https")
|
||||
ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields)
|
||||
|
||||
|
||||
def _default_key_normalizer(key_class, request_context):
|
||||
"""
|
||||
@@ -155,11 +166,12 @@ class PoolManager(RequestMethods):
|
||||
"""
|
||||
|
||||
proxy = None
|
||||
proxy_config = None
|
||||
|
||||
def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
|
||||
RequestMethods.__init__(self, headers)
|
||||
self.connection_pool_kw = connection_pool_kw
|
||||
self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close())
|
||||
self.pools = RecentlyUsedContainer(num_pools)
|
||||
|
||||
# Locally set the pool classes and keys so other PoolManagers can
|
||||
# override them.
|
||||
@@ -176,7 +188,7 @@ class PoolManager(RequestMethods):
|
||||
|
||||
def _new_pool(self, scheme, host, port, request_context=None):
|
||||
"""
|
||||
Create a new :class:`ConnectionPool` based on host, port, scheme, and
|
||||
Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
|
||||
any additional pool keyword arguments.
|
||||
|
||||
If ``request_context`` is provided, it is provided as keyword arguments
|
||||
@@ -212,7 +224,7 @@ class PoolManager(RequestMethods):
|
||||
|
||||
def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
|
||||
"""
|
||||
Get a :class:`ConnectionPool` based on the host, port, and scheme.
|
||||
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
|
||||
|
||||
If ``port`` isn't given, it will be derived from the ``scheme`` using
|
||||
``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
|
||||
@@ -235,20 +247,22 @@ class PoolManager(RequestMethods):
|
||||
|
||||
def connection_from_context(self, request_context):
|
||||
"""
|
||||
Get a :class:`ConnectionPool` based on the request context.
|
||||
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
|
||||
|
||||
``request_context`` must at least contain the ``scheme`` key and its
|
||||
value must be a key in ``key_fn_by_scheme`` instance variable.
|
||||
"""
|
||||
scheme = request_context["scheme"].lower()
|
||||
pool_key_constructor = self.key_fn_by_scheme[scheme]
|
||||
pool_key_constructor = self.key_fn_by_scheme.get(scheme)
|
||||
if not pool_key_constructor:
|
||||
raise URLSchemeUnknown(scheme)
|
||||
pool_key = pool_key_constructor(request_context)
|
||||
|
||||
return self.connection_from_pool_key(pool_key, request_context=request_context)
|
||||
|
||||
def connection_from_pool_key(self, pool_key, request_context=None):
|
||||
"""
|
||||
Get a :class:`ConnectionPool` based on the provided pool key.
|
||||
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
|
||||
|
||||
``pool_key`` should be a namedtuple that only contains immutable
|
||||
objects. At a minimum it must have the ``scheme``, ``host``, and
|
||||
@@ -306,9 +320,39 @@ class PoolManager(RequestMethods):
|
||||
base_pool_kwargs[key] = value
|
||||
return base_pool_kwargs
|
||||
|
||||
def _proxy_requires_url_absolute_form(self, parsed_url):
|
||||
"""
|
||||
Indicates if the proxy requires the complete destination URL in the
|
||||
request. Normally this is only needed when not using an HTTP CONNECT
|
||||
tunnel.
|
||||
"""
|
||||
if self.proxy is None:
|
||||
return False
|
||||
|
||||
return not connection_requires_http_tunnel(
|
||||
self.proxy, self.proxy_config, parsed_url.scheme
|
||||
)
|
||||
|
||||
def _validate_proxy_scheme_url_selection(self, url_scheme):
|
||||
"""
|
||||
Validates that were not attempting to do TLS in TLS connections on
|
||||
Python2 or with unsupported SSL implementations.
|
||||
"""
|
||||
if self.proxy is None or url_scheme != "https":
|
||||
return
|
||||
|
||||
if self.proxy.scheme != "https":
|
||||
return
|
||||
|
||||
if six.PY2 and not self.proxy_config.use_forwarding_for_https:
|
||||
raise ProxySchemeUnsupported(
|
||||
"Contacting HTTPS destinations through HTTPS proxies "
|
||||
"'via CONNECT tunnels' is not supported in Python 2"
|
||||
)
|
||||
|
||||
def urlopen(self, method, url, redirect=True, **kw):
|
||||
"""
|
||||
Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
|
||||
Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
|
||||
with custom cross-host redirect logic and only sends the request-uri
|
||||
portion of the ``url``.
|
||||
|
||||
@@ -316,6 +360,8 @@ class PoolManager(RequestMethods):
|
||||
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
|
||||
"""
|
||||
u = parse_url(url)
|
||||
self._validate_proxy_scheme_url_selection(u.scheme)
|
||||
|
||||
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
|
||||
|
||||
kw["assert_same_host"] = False
|
||||
@@ -324,7 +370,7 @@ class PoolManager(RequestMethods):
|
||||
if "headers" not in kw:
|
||||
kw["headers"] = self.headers.copy()
|
||||
|
||||
if self.proxy is not None and u.scheme == "http":
|
||||
if self._proxy_requires_url_absolute_form(u):
|
||||
response = conn.urlopen(method, url, **kw)
|
||||
else:
|
||||
response = conn.urlopen(method, u.request_uri, **kw)
|
||||
@@ -336,9 +382,12 @@ class PoolManager(RequestMethods):
|
||||
# Support relative URLs for redirecting.
|
||||
redirect_location = urljoin(url, redirect_location)
|
||||
|
||||
# RFC 7231, Section 6.4.4
|
||||
if response.status == 303:
|
||||
# Change the method according to RFC 9110, Section 15.4.4.
|
||||
method = "GET"
|
||||
# And lose the body not to transfer anything sensitive.
|
||||
kw["body"] = None
|
||||
kw["headers"] = HTTPHeaderDict(kw["headers"])._prepare_for_method_change()
|
||||
|
||||
retries = kw.get("retries")
|
||||
if not isinstance(retries, Retry):
|
||||
@@ -359,6 +408,7 @@ class PoolManager(RequestMethods):
|
||||
retries = retries.increment(method, url, response=response, _pool=conn)
|
||||
except MaxRetryError:
|
||||
if retries.raise_on_redirect:
|
||||
response.drain_conn()
|
||||
raise
|
||||
return response
|
||||
|
||||
@@ -366,6 +416,8 @@ class PoolManager(RequestMethods):
|
||||
kw["redirect"] = redirect
|
||||
|
||||
log.info("Redirecting %s -> %s", url, redirect_location)
|
||||
|
||||
response.drain_conn()
|
||||
return self.urlopen(method, redirect_location, **kw)
|
||||
|
||||
|
||||
@@ -383,6 +435,19 @@ class ProxyManager(PoolManager):
|
||||
HTTPS/CONNECT case they are sent only once. Could be used for proxy
|
||||
authentication.
|
||||
|
||||
:param proxy_ssl_context:
|
||||
The proxy SSL context is used to establish the TLS connection to the
|
||||
proxy when using HTTPS proxies.
|
||||
|
||||
:param use_forwarding_for_https:
|
||||
(Defaults to False) If set to True will forward requests to the HTTPS
|
||||
proxy to be made on behalf of the client instead of creating a TLS
|
||||
tunnel via the CONNECT method. **Enabling this flag means that request
|
||||
and response headers and content will be visible from the HTTPS proxy**
|
||||
whereas tunneling keeps request and response headers and content
|
||||
private. IP address, target hostname, SNI, and port are always visible
|
||||
to an HTTPS proxy even when this flag is disabled.
|
||||
|
||||
Example:
|
||||
>>> proxy = urllib3.ProxyManager('http://localhost:3128/')
|
||||
>>> r1 = proxy.request('GET', 'http://google.com/')
|
||||
@@ -402,6 +467,8 @@ class ProxyManager(PoolManager):
|
||||
num_pools=10,
|
||||
headers=None,
|
||||
proxy_headers=None,
|
||||
proxy_ssl_context=None,
|
||||
use_forwarding_for_https=False,
|
||||
**connection_pool_kw
|
||||
):
|
||||
|
||||
@@ -412,18 +479,22 @@ class ProxyManager(PoolManager):
|
||||
proxy_url.port,
|
||||
)
|
||||
proxy = parse_url(proxy_url)
|
||||
if not proxy.port:
|
||||
port = port_by_scheme.get(proxy.scheme, 80)
|
||||
proxy = proxy._replace(port=port)
|
||||
|
||||
if proxy.scheme not in ("http", "https"):
|
||||
raise ProxySchemeUnknown(proxy.scheme)
|
||||
|
||||
if not proxy.port:
|
||||
port = port_by_scheme.get(proxy.scheme, 80)
|
||||
proxy = proxy._replace(port=port)
|
||||
|
||||
self.proxy = proxy
|
||||
self.proxy_headers = proxy_headers or {}
|
||||
self.proxy_ssl_context = proxy_ssl_context
|
||||
self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https)
|
||||
|
||||
connection_pool_kw["_proxy"] = self.proxy
|
||||
connection_pool_kw["_proxy_headers"] = self.proxy_headers
|
||||
connection_pool_kw["_proxy_config"] = self.proxy_config
|
||||
|
||||
super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)
|
||||
|
||||
@@ -455,11 +526,10 @@ class ProxyManager(PoolManager):
|
||||
def urlopen(self, method, url, redirect=True, **kw):
|
||||
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
|
||||
u = parse_url(url)
|
||||
|
||||
if u.scheme == "http":
|
||||
# For proxied HTTPS requests, httplib sets the necessary headers
|
||||
# on the CONNECT to the proxy. For HTTP, we'll definitely
|
||||
# need to set 'Host' at the very least.
|
||||
if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
|
||||
# For connections using HTTP CONNECT, httplib sets the necessary
|
||||
# headers on the CONNECT to the proxy. If we're not using CONNECT,
|
||||
# we'll definitely need to set 'Host' at the very least.
|
||||
headers = kw.get("headers", self.headers)
|
||||
kw["headers"] = self._set_proxy_headers(url, headers)
|
||||
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .filepost import encode_multipart_formdata
|
||||
from .packages.six.moves.urllib.parse import urlencode
|
||||
import sys
|
||||
|
||||
from .filepost import encode_multipart_formdata
|
||||
from .packages import six
|
||||
from .packages.six.moves.urllib.parse import urlencode
|
||||
|
||||
__all__ = ["RequestMethods"]
|
||||
|
||||
@@ -10,8 +12,8 @@ __all__ = ["RequestMethods"]
|
||||
class RequestMethods(object):
|
||||
"""
|
||||
Convenience mixin for classes who implement a :meth:`urlopen` method, such
|
||||
as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
|
||||
:class:`~urllib3.poolmanager.PoolManager`.
|
||||
as :class:`urllib3.HTTPConnectionPool` and
|
||||
:class:`urllib3.PoolManager`.
|
||||
|
||||
Provides behavior for making common types of HTTP request methods and
|
||||
decides which type of request field encoding to use.
|
||||
@@ -111,9 +113,9 @@ class RequestMethods(object):
|
||||
the body. This is useful for request methods like POST, PUT, PATCH, etc.
|
||||
|
||||
When ``encode_multipart=True`` (default), then
|
||||
:meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
|
||||
:func:`urllib3.encode_multipart_formdata` is used to encode
|
||||
the payload with the appropriate content type. Otherwise
|
||||
:meth:`urllib.urlencode` is used with the
|
||||
:func:`urllib.parse.urlencode` is used with the
|
||||
'application/x-www-form-urlencoded' content type.
|
||||
|
||||
Multipart encoding must be used when posting files, and it's reasonably
|
||||
@@ -169,3 +171,21 @@ class RequestMethods(object):
|
||||
extra_kw.update(urlopen_kw)
|
||||
|
||||
return self.urlopen(method, url, **extra_kw)
|
||||
|
||||
|
||||
if not six.PY2:
|
||||
|
||||
class RequestModule(sys.modules[__name__].__class__):
|
||||
def __call__(self, *args, **kwargs):
|
||||
"""
|
||||
If user tries to call this module directly urllib3 v2.x style raise an error to the user
|
||||
suggesting they may need urllib3 v2
|
||||
"""
|
||||
raise TypeError(
|
||||
"'module' object is not callable\n"
|
||||
"urllib3.request() method is not supported in this release, "
|
||||
"upgrade to urllib3 v2 to use it\n"
|
||||
"see https://urllib3.readthedocs.io/en/stable/v2-migration-guide.html"
|
||||
)
|
||||
|
||||
sys.modules[__name__].__class__ = RequestModule
|
||||
|
||||
@@ -1,29 +1,38 @@
|
||||
from __future__ import absolute_import
|
||||
from contextlib import contextmanager
|
||||
import zlib
|
||||
|
||||
import io
|
||||
import logging
|
||||
from socket import timeout as SocketTimeout
|
||||
import sys
|
||||
import warnings
|
||||
import zlib
|
||||
from contextlib import contextmanager
|
||||
from socket import error as SocketError
|
||||
from socket import timeout as SocketTimeout
|
||||
|
||||
try:
|
||||
import brotli
|
||||
try:
|
||||
import brotlicffi as brotli
|
||||
except ImportError:
|
||||
import brotli
|
||||
except ImportError:
|
||||
brotli = None
|
||||
|
||||
from . import util
|
||||
from ._collections import HTTPHeaderDict
|
||||
from .connection import BaseSSLError, HTTPException
|
||||
from .exceptions import (
|
||||
BodyNotHttplibCompatible,
|
||||
ProtocolError,
|
||||
DecodeError,
|
||||
HTTPError,
|
||||
IncompleteRead,
|
||||
InvalidChunkLength,
|
||||
InvalidHeader,
|
||||
ProtocolError,
|
||||
ReadTimeoutError,
|
||||
ResponseNotChunked,
|
||||
IncompleteRead,
|
||||
InvalidHeader,
|
||||
SSLError,
|
||||
)
|
||||
from .packages.six import string_types as basestring, PY3
|
||||
from .packages.six.moves import http_client as httplib
|
||||
from .connection import HTTPException, BaseSSLError
|
||||
from .packages import six
|
||||
from .util.response import is_fp_closed, is_response_to_head
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -106,11 +115,10 @@ if brotli is not None:
|
||||
# are for 'brotlipy' and bottom branches for 'Brotli'
|
||||
def __init__(self):
|
||||
self._obj = brotli.Decompressor()
|
||||
|
||||
def decompress(self, data):
|
||||
if hasattr(self._obj, "decompress"):
|
||||
return self._obj.decompress(data)
|
||||
return self._obj.process(data)
|
||||
self.decompress = self._obj.decompress
|
||||
else:
|
||||
self.decompress = self._obj.process
|
||||
|
||||
def flush(self):
|
||||
if hasattr(self._obj, "flush"):
|
||||
@@ -156,13 +164,13 @@ class HTTPResponse(io.IOBase):
|
||||
"""
|
||||
HTTP Response container.
|
||||
|
||||
Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
|
||||
Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
|
||||
loaded and decoded on-demand when the ``data`` property is accessed. This
|
||||
class is also compatible with the Python standard library's :mod:`io`
|
||||
module, and can hence be treated as a readable object in the context of that
|
||||
framework.
|
||||
|
||||
Extra parameters for behaviour not present in httplib.HTTPResponse:
|
||||
Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
|
||||
|
||||
:param preload_content:
|
||||
If True, the response's body will be preloaded during construction.
|
||||
@@ -172,7 +180,7 @@ class HTTPResponse(io.IOBase):
|
||||
'content-encoding' header.
|
||||
|
||||
:param original_response:
|
||||
When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
|
||||
When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
|
||||
object, it's convenient to include the original for debug purposes. It's
|
||||
otherwise unused.
|
||||
|
||||
@@ -232,7 +240,7 @@ class HTTPResponse(io.IOBase):
|
||||
self.msg = msg
|
||||
self._request_url = request_url
|
||||
|
||||
if body and isinstance(body, (basestring, bytes)):
|
||||
if body and isinstance(body, (six.string_types, bytes)):
|
||||
self._body = body
|
||||
|
||||
self._pool = pool
|
||||
@@ -277,9 +285,20 @@ class HTTPResponse(io.IOBase):
|
||||
self._pool._put_conn(self._connection)
|
||||
self._connection = None
|
||||
|
||||
def drain_conn(self):
|
||||
"""
|
||||
Read and discard any remaining HTTP response data in the response connection.
|
||||
|
||||
Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
|
||||
"""
|
||||
try:
|
||||
self.read()
|
||||
except (HTTPError, SocketError, BaseSSLError, HTTPException):
|
||||
pass
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
# For backwords-compat with earlier urllib3 0.4 and earlier.
|
||||
# For backwards-compat with earlier urllib3 0.4 and earlier.
|
||||
if self._body:
|
||||
return self._body
|
||||
|
||||
@@ -296,8 +315,8 @@ class HTTPResponse(io.IOBase):
|
||||
def tell(self):
|
||||
"""
|
||||
Obtain the number of bytes pulled over the wire so far. May differ from
|
||||
the amount of content returned by :meth:``HTTPResponse.read`` if bytes
|
||||
are encoded on the wire (e.g, compressed).
|
||||
the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
|
||||
if bytes are encoded on the wire (e.g, compressed).
|
||||
"""
|
||||
return self._fp_bytes_read
|
||||
|
||||
@@ -431,10 +450,9 @@ class HTTPResponse(io.IOBase):
|
||||
|
||||
except BaseSSLError as e:
|
||||
# FIXME: Is there a better way to differentiate between SSLErrors?
|
||||
if "read operation timed out" not in str(e): # Defensive:
|
||||
# This shouldn't happen but just in case we're missing an edge
|
||||
# case, let's avoid swallowing SSL errors.
|
||||
raise
|
||||
if "read operation timed out" not in str(e):
|
||||
# SSL errors related to framing/MAC get wrapped and reraised here
|
||||
raise SSLError(e)
|
||||
|
||||
raise ReadTimeoutError(self._pool, None, "Read timed out.")
|
||||
|
||||
@@ -466,9 +484,57 @@ class HTTPResponse(io.IOBase):
|
||||
if self._original_response and self._original_response.isclosed():
|
||||
self.release_conn()
|
||||
|
||||
def _fp_read(self, amt):
|
||||
"""
|
||||
Read a response with the thought that reading the number of bytes
|
||||
larger than can fit in a 32-bit int at a time via SSL in some
|
||||
known cases leads to an overflow error that has to be prevented
|
||||
if `amt` or `self.length_remaining` indicate that a problem may
|
||||
happen.
|
||||
|
||||
The known cases:
|
||||
* 3.8 <= CPython < 3.9.7 because of a bug
|
||||
https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900.
|
||||
* urllib3 injected with pyOpenSSL-backed SSL-support.
|
||||
* CPython < 3.10 only when `amt` does not fit 32-bit int.
|
||||
"""
|
||||
assert self._fp
|
||||
c_int_max = 2 ** 31 - 1
|
||||
if (
|
||||
(
|
||||
(amt and amt > c_int_max)
|
||||
or (self.length_remaining and self.length_remaining > c_int_max)
|
||||
)
|
||||
and not util.IS_SECURETRANSPORT
|
||||
and (util.IS_PYOPENSSL or sys.version_info < (3, 10))
|
||||
):
|
||||
buffer = io.BytesIO()
|
||||
# Besides `max_chunk_amt` being a maximum chunk size, it
|
||||
# affects memory overhead of reading a response by this
|
||||
# method in CPython.
|
||||
# `c_int_max` equal to 2 GiB - 1 byte is the actual maximum
|
||||
# chunk size that does not lead to an overflow error, but
|
||||
# 256 MiB is a compromise.
|
||||
max_chunk_amt = 2 ** 28
|
||||
while amt is None or amt != 0:
|
||||
if amt is not None:
|
||||
chunk_amt = min(amt, max_chunk_amt)
|
||||
amt -= chunk_amt
|
||||
else:
|
||||
chunk_amt = max_chunk_amt
|
||||
data = self._fp.read(chunk_amt)
|
||||
if not data:
|
||||
break
|
||||
buffer.write(data)
|
||||
del data # to reduce peak memory usage by `max_chunk_amt`.
|
||||
return buffer.getvalue()
|
||||
else:
|
||||
# StringIO doesn't like amt=None
|
||||
return self._fp.read(amt) if amt is not None else self._fp.read()
|
||||
|
||||
def read(self, amt=None, decode_content=None, cache_content=False):
|
||||
"""
|
||||
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
|
||||
Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
|
||||
parameters: ``decode_content`` and ``cache_content``.
|
||||
|
||||
:param amt:
|
||||
@@ -498,13 +564,11 @@ class HTTPResponse(io.IOBase):
|
||||
fp_closed = getattr(self._fp, "closed", False)
|
||||
|
||||
with self._error_catcher():
|
||||
data = self._fp_read(amt) if not fp_closed else b""
|
||||
if amt is None:
|
||||
# cStringIO doesn't like amt=None
|
||||
data = self._fp.read() if not fp_closed else b""
|
||||
flush_decoder = True
|
||||
else:
|
||||
cache_content = False
|
||||
data = self._fp.read(amt) if not fp_closed else b""
|
||||
if (
|
||||
amt != 0 and not data
|
||||
): # Platform-specific: Buggy versions of Python.
|
||||
@@ -569,7 +633,7 @@ class HTTPResponse(io.IOBase):
|
||||
@classmethod
|
||||
def from_httplib(ResponseCls, r, **response_kw):
|
||||
"""
|
||||
Given an :class:`httplib.HTTPResponse` instance ``r``, return a
|
||||
Given an :class:`http.client.HTTPResponse` instance ``r``, return a
|
||||
corresponding :class:`urllib3.response.HTTPResponse` object.
|
||||
|
||||
Remaining parameters are passed to the HTTPResponse constructor, along
|
||||
@@ -578,11 +642,11 @@ class HTTPResponse(io.IOBase):
|
||||
headers = r.msg
|
||||
|
||||
if not isinstance(headers, HTTPHeaderDict):
|
||||
if PY3:
|
||||
headers = HTTPHeaderDict(headers.items())
|
||||
else:
|
||||
if six.PY2:
|
||||
# Python 2.7
|
||||
headers = HTTPHeaderDict.from_httplib(headers)
|
||||
else:
|
||||
headers = HTTPHeaderDict(headers.items())
|
||||
|
||||
# HTTPResponse objects in Python 3 don't have a .strict attribute
|
||||
strict = getattr(r, "strict", 0)
|
||||
@@ -598,11 +662,23 @@ class HTTPResponse(io.IOBase):
|
||||
)
|
||||
return resp
|
||||
|
||||
# Backwards-compatibility methods for httplib.HTTPResponse
|
||||
# Backwards-compatibility methods for http.client.HTTPResponse
|
||||
def getheaders(self):
|
||||
warnings.warn(
|
||||
"HTTPResponse.getheaders() is deprecated and will be removed "
|
||||
"in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.",
|
||||
category=DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return self.headers
|
||||
|
||||
def getheader(self, name, default=None):
|
||||
warnings.warn(
|
||||
"HTTPResponse.getheader() is deprecated and will be removed "
|
||||
"in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).",
|
||||
category=DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return self.headers.get(name, default)
|
||||
|
||||
# Backwards compatibility for http.cookiejar
|
||||
@@ -668,8 +744,8 @@ class HTTPResponse(io.IOBase):
|
||||
def supports_chunked_reads(self):
|
||||
"""
|
||||
Checks if the underlying file-like object looks like a
|
||||
httplib.HTTPResponse object. We do this by testing for the fp
|
||||
attribute. If it is present we assume it returns raw chunks as
|
||||
:class:`http.client.HTTPResponse` object. We do this by testing for
|
||||
the fp attribute. If it is present we assume it returns raw chunks as
|
||||
processed by read_chunked().
|
||||
"""
|
||||
return hasattr(self._fp, "fp")
|
||||
@@ -686,7 +762,7 @@ class HTTPResponse(io.IOBase):
|
||||
except ValueError:
|
||||
# Invalid chunked protocol response, abort.
|
||||
self.close()
|
||||
raise httplib.IncompleteRead(line)
|
||||
raise InvalidChunkLength(self, line)
|
||||
|
||||
def _handle_chunk(self, amt):
|
||||
returned_chunk = None
|
||||
@@ -733,7 +809,7 @@ class HTTPResponse(io.IOBase):
|
||||
)
|
||||
if not self.supports_chunked_reads():
|
||||
raise BodyNotHttplibCompatible(
|
||||
"Body should be httplib.HTTPResponse like. "
|
||||
"Body should be http.client.HTTPResponse like. "
|
||||
"It should have have an fp attribute which returns raw chunks."
|
||||
)
|
||||
|
||||
@@ -792,7 +868,7 @@ class HTTPResponse(io.IOBase):
|
||||
return self._request_url
|
||||
|
||||
def __iter__(self):
|
||||
buffer = [b""]
|
||||
buffer = []
|
||||
for chunk in self.stream(decode_content=True):
|
||||
if b"\n" in chunk:
|
||||
chunk = chunk.split(b"\n")
|
||||
|
||||
@@ -2,23 +2,23 @@ from __future__ import absolute_import
|
||||
|
||||
# For backwards compatibility, provide imports that used to be here.
|
||||
from .connection import is_connection_dropped
|
||||
from .request import make_headers
|
||||
from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
|
||||
from .response import is_fp_closed
|
||||
from .retry import Retry
|
||||
from .ssl_ import (
|
||||
SSLContext,
|
||||
ALPN_PROTOCOLS,
|
||||
HAS_SNI,
|
||||
IS_PYOPENSSL,
|
||||
IS_SECURETRANSPORT,
|
||||
PROTOCOL_TLS,
|
||||
SSLContext,
|
||||
assert_fingerprint,
|
||||
resolve_cert_reqs,
|
||||
resolve_ssl_version,
|
||||
ssl_wrap_socket,
|
||||
PROTOCOL_TLS,
|
||||
)
|
||||
from .timeout import current_time, Timeout
|
||||
|
||||
from .retry import Retry
|
||||
from .url import get_host, parse_url, split_first, Url
|
||||
from .timeout import Timeout, current_time
|
||||
from .url import Url, get_host, parse_url, split_first
|
||||
from .wait import wait_for_read, wait_for_write
|
||||
|
||||
__all__ = (
|
||||
@@ -27,6 +27,7 @@ __all__ = (
|
||||
"IS_SECURETRANSPORT",
|
||||
"SSLContext",
|
||||
"PROTOCOL_TLS",
|
||||
"ALPN_PROTOCOLS",
|
||||
"Retry",
|
||||
"Timeout",
|
||||
"Url",
|
||||
@@ -43,4 +44,6 @@ __all__ = (
|
||||
"ssl_wrap_socket",
|
||||
"wait_for_read",
|
||||
"wait_for_write",
|
||||
"SKIP_HEADER",
|
||||
"SKIPPABLE_HEADERS",
|
||||
)
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import socket
|
||||
from .wait import NoWayToWaitForSocketError, wait_for_read
|
||||
|
||||
from ..contrib import _appengine_environ
|
||||
from ..exceptions import LocationParseError
|
||||
from ..packages import six
|
||||
from .wait import NoWayToWaitForSocketError, wait_for_read
|
||||
|
||||
|
||||
def is_connection_dropped(conn): # Platform-specific
|
||||
@@ -9,7 +13,7 @@ def is_connection_dropped(conn): # Platform-specific
|
||||
Returns True if the connection is dropped and should be closed.
|
||||
|
||||
:param conn:
|
||||
:class:`httplib.HTTPConnection` object.
|
||||
:class:`http.client.HTTPConnection` object.
|
||||
|
||||
Note: For platforms like AppEngine, this will always return ``False`` to
|
||||
let the platform handle connection recycling transparently for us.
|
||||
@@ -42,7 +46,7 @@ def create_connection(
|
||||
port)``) and return the socket object. Passing the optional
|
||||
*timeout* parameter will set the timeout on the socket instance
|
||||
before attempting to connect. If no *timeout* is supplied, the
|
||||
global default timeout setting returned by :func:`getdefaulttimeout`
|
||||
global default timeout setting returned by :func:`socket.getdefaulttimeout`
|
||||
is used. If *source_address* is set it must be a tuple of (host, port)
|
||||
for the socket to bind as a source address before making the connection.
|
||||
An host of '' or port 0 tells the OS to use the default.
|
||||
@@ -58,6 +62,13 @@ def create_connection(
|
||||
# The original create_connection function always returns all records.
|
||||
family = allowed_gai_family()
|
||||
|
||||
try:
|
||||
host.encode("idna")
|
||||
except UnicodeError:
|
||||
return six.raise_from(
|
||||
LocationParseError(u"'%s', label empty or too long" % host), None
|
||||
)
|
||||
|
||||
for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
|
||||
af, socktype, proto, canonname, sa = res
|
||||
sock = None
|
||||
@@ -106,7 +117,7 @@ def allowed_gai_family():
|
||||
|
||||
|
||||
def _has_ipv6(host):
|
||||
""" Returns True if the system can bind an IPv6 address. """
|
||||
"""Returns True if the system can bind an IPv6 address."""
|
||||
sock = None
|
||||
has_ipv6 = False
|
||||
|
||||
@@ -121,7 +132,7 @@ def _has_ipv6(host):
|
||||
# has_ipv6 returns true if cPython was compiled with IPv6 support.
|
||||
# It does not tell us if the system has IPv6 support enabled. To
|
||||
# determine that we must bind to an IPv6 address.
|
||||
# https://github.com/shazow/urllib3/pull/611
|
||||
# https://github.com/urllib3/urllib3/pull/611
|
||||
# https://bugs.python.org/issue658327
|
||||
try:
|
||||
sock = socket.socket(socket.AF_INET6)
|
||||
|
||||
57
lib/urllib3/util/proxy.py
Normal file
57
lib/urllib3/util/proxy.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version
|
||||
|
||||
|
||||
def connection_requires_http_tunnel(
|
||||
proxy_url=None, proxy_config=None, destination_scheme=None
|
||||
):
|
||||
"""
|
||||
Returns True if the connection requires an HTTP CONNECT through the proxy.
|
||||
|
||||
:param URL proxy_url:
|
||||
URL of the proxy.
|
||||
:param ProxyConfig proxy_config:
|
||||
Proxy configuration from poolmanager.py
|
||||
:param str destination_scheme:
|
||||
The scheme of the destination. (i.e https, http, etc)
|
||||
"""
|
||||
# If we're not using a proxy, no way to use a tunnel.
|
||||
if proxy_url is None:
|
||||
return False
|
||||
|
||||
# HTTP destinations never require tunneling, we always forward.
|
||||
if destination_scheme == "http":
|
||||
return False
|
||||
|
||||
# Support for forwarding with HTTPS proxies and HTTPS destinations.
|
||||
if (
|
||||
proxy_url.scheme == "https"
|
||||
and proxy_config
|
||||
and proxy_config.use_forwarding_for_https
|
||||
):
|
||||
return False
|
||||
|
||||
# Otherwise always use a tunnel.
|
||||
return True
|
||||
|
||||
|
||||
def create_proxy_ssl_context(
|
||||
ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None
|
||||
):
|
||||
"""
|
||||
Generates a default proxy ssl context if one hasn't been provided by the
|
||||
user.
|
||||
"""
|
||||
ssl_context = create_urllib3_context(
|
||||
ssl_version=resolve_ssl_version(ssl_version),
|
||||
cert_reqs=resolve_cert_reqs(cert_reqs),
|
||||
)
|
||||
|
||||
if (
|
||||
not ca_certs
|
||||
and not ca_cert_dir
|
||||
and not ca_cert_data
|
||||
and hasattr(ssl_context, "load_default_certs")
|
||||
):
|
||||
ssl_context.load_default_certs()
|
||||
|
||||
return ssl_context
|
||||
@@ -1,4 +1,5 @@
|
||||
import collections
|
||||
|
||||
from ..packages import six
|
||||
from ..packages.six.moves import queue
|
||||
|
||||
|
||||
@@ -1,12 +1,23 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from base64 import b64encode
|
||||
|
||||
from ..packages.six import b, integer_types
|
||||
from ..exceptions import UnrewindableBodyError
|
||||
from ..packages.six import b, integer_types
|
||||
|
||||
# Pass as a value within ``headers`` to skip
|
||||
# emitting some HTTP headers that are added automatically.
|
||||
# The only headers that are supported are ``Accept-Encoding``,
|
||||
# ``Host``, and ``User-Agent``.
|
||||
SKIP_HEADER = "@@@SKIP_HEADER@@@"
|
||||
SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
|
||||
|
||||
ACCEPT_ENCODING = "gzip,deflate"
|
||||
try:
|
||||
import brotli as _unused_module_brotli # noqa: F401
|
||||
try:
|
||||
import brotlicffi as _unused_module_brotli # noqa: F401
|
||||
except ImportError:
|
||||
import brotli as _unused_module_brotli # noqa: F401
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@@ -122,7 +133,7 @@ def rewind_body(body, body_pos):
|
||||
body_seek(body_pos)
|
||||
except (IOError, OSError):
|
||||
raise UnrewindableBodyError(
|
||||
"An error occurred when rewinding request " "body for redirect/retry."
|
||||
"An error occurred when rewinding request body for redirect/retry."
|
||||
)
|
||||
elif body_pos is _FAILEDTELL:
|
||||
raise UnrewindableBodyError(
|
||||
@@ -131,5 +142,5 @@ def rewind_body(body, body_pos):
|
||||
)
|
||||
else:
|
||||
raise ValueError(
|
||||
"body_pos must be of type integer, " "instead it was %s." % type(body_pos)
|
||||
"body_pos must be of type integer, instead it was %s." % type(body_pos)
|
||||
)
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
from __future__ import absolute_import
|
||||
from ..packages.six.moves import http_client as httplib
|
||||
|
||||
from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
|
||||
|
||||
from ..exceptions import HeaderParsingError
|
||||
from ..packages.six.moves import http_client as httplib
|
||||
|
||||
|
||||
def is_fp_closed(obj):
|
||||
@@ -42,8 +44,7 @@ def assert_header_parsing(headers):
|
||||
|
||||
Only works on Python 3.
|
||||
|
||||
:param headers: Headers to verify.
|
||||
:type headers: `httplib.HTTPMessage`.
|
||||
:param http.client.HTTPMessage headers: Headers to verify.
|
||||
|
||||
:raises urllib3.exceptions.HeaderParsingError:
|
||||
If parsing errors are found.
|
||||
@@ -66,6 +67,25 @@ def assert_header_parsing(headers):
|
||||
|
||||
if isinstance(payload, (bytes, str)):
|
||||
unparsed_data = payload
|
||||
if defects:
|
||||
# httplib is assuming a response body is available
|
||||
# when parsing headers even when httplib only sends
|
||||
# header data to parse_headers() This results in
|
||||
# defects on multipart responses in particular.
|
||||
# See: https://github.com/urllib3/urllib3/issues/800
|
||||
|
||||
# So we ignore the following defects:
|
||||
# - StartBoundaryNotFoundDefect:
|
||||
# The claimed start boundary was never found.
|
||||
# - MultipartInvariantViolationDefect:
|
||||
# A message claimed to be a multipart but no subparts were found.
|
||||
defects = [
|
||||
defect
|
||||
for defect in defects
|
||||
if not isinstance(
|
||||
defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect)
|
||||
)
|
||||
]
|
||||
|
||||
if defects or unparsed_data:
|
||||
raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
|
||||
@@ -76,8 +96,9 @@ def is_response_to_head(response):
|
||||
Checks whether the request of a response has been a HEAD-request.
|
||||
Handles the quirks of AppEngine.
|
||||
|
||||
:param conn:
|
||||
:type conn: :class:`httplib.HTTPResponse`
|
||||
:param http.client.HTTPResponse response:
|
||||
Response to check if the originating request
|
||||
used 'HEAD' as a method.
|
||||
"""
|
||||
# FIXME: Can we do this somehow without accessing private httplib _method?
|
||||
method = response._method
|
||||
|
||||
@@ -1,22 +1,24 @@
|
||||
from __future__ import absolute_import
|
||||
import time
|
||||
|
||||
import email
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
import warnings
|
||||
from collections import namedtuple
|
||||
from itertools import takewhile
|
||||
import email
|
||||
import re
|
||||
|
||||
from ..exceptions import (
|
||||
ConnectTimeoutError,
|
||||
InvalidHeader,
|
||||
MaxRetryError,
|
||||
ProtocolError,
|
||||
ProxyError,
|
||||
ReadTimeoutError,
|
||||
ResponseError,
|
||||
InvalidHeader,
|
||||
)
|
||||
from ..packages import six
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -26,8 +28,69 @@ RequestHistory = namedtuple(
|
||||
)
|
||||
|
||||
|
||||
# TODO: In v2 we can remove this sentinel and metaclass with deprecated options.
|
||||
_Default = object()
|
||||
|
||||
|
||||
class _RetryMeta(type):
|
||||
@property
|
||||
def DEFAULT_METHOD_WHITELIST(cls):
|
||||
warnings.warn(
|
||||
"Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
|
||||
"will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
return cls.DEFAULT_ALLOWED_METHODS
|
||||
|
||||
@DEFAULT_METHOD_WHITELIST.setter
|
||||
def DEFAULT_METHOD_WHITELIST(cls, value):
|
||||
warnings.warn(
|
||||
"Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
|
||||
"will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
cls.DEFAULT_ALLOWED_METHODS = value
|
||||
|
||||
@property
|
||||
def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls):
|
||||
warnings.warn(
|
||||
"Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
|
||||
"will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
|
||||
|
||||
@DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter
|
||||
def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value):
|
||||
warnings.warn(
|
||||
"Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
|
||||
"will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value
|
||||
|
||||
@property
|
||||
def BACKOFF_MAX(cls):
|
||||
warnings.warn(
|
||||
"Using 'Retry.BACKOFF_MAX' is deprecated and "
|
||||
"will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
return cls.DEFAULT_BACKOFF_MAX
|
||||
|
||||
@BACKOFF_MAX.setter
|
||||
def BACKOFF_MAX(cls, value):
|
||||
warnings.warn(
|
||||
"Using 'Retry.BACKOFF_MAX' is deprecated and "
|
||||
"will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
cls.DEFAULT_BACKOFF_MAX = value
|
||||
|
||||
|
||||
@six.add_metaclass(_RetryMeta)
|
||||
class Retry(object):
|
||||
""" Retry configuration.
|
||||
"""Retry configuration.
|
||||
|
||||
Each retry attempt will create a new Retry object with updated values, so
|
||||
they can be safely reused.
|
||||
@@ -53,8 +116,7 @@ class Retry(object):
|
||||
Total number of retries to allow. Takes precedence over other counts.
|
||||
|
||||
Set to ``None`` to remove this constraint and fall back on other
|
||||
counts. It's a good idea to set this to some sensibly-high value to
|
||||
account for unexpected edge cases and avoid infinite retry loops.
|
||||
counts.
|
||||
|
||||
Set to ``0`` to fail on the first retry.
|
||||
|
||||
@@ -95,18 +157,35 @@ class Retry(object):
|
||||
|
||||
Set to ``0`` to fail on the first retry of this type.
|
||||
|
||||
:param iterable method_whitelist:
|
||||
:param int other:
|
||||
How many times to retry on other errors.
|
||||
|
||||
Other errors are errors that are not connect, read, redirect or status errors.
|
||||
These errors might be raised after the request was sent to the server, so the
|
||||
request might have side-effects.
|
||||
|
||||
Set to ``0`` to fail on the first retry of this type.
|
||||
|
||||
If ``total`` is not set, it's a good idea to set this to 0 to account
|
||||
for unexpected edge cases and avoid infinite retry loops.
|
||||
|
||||
:param iterable allowed_methods:
|
||||
Set of uppercased HTTP method verbs that we should retry on.
|
||||
|
||||
By default, we only retry on methods which are considered to be
|
||||
idempotent (multiple requests with the same parameters end with the
|
||||
same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
|
||||
same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`.
|
||||
|
||||
Set to a ``False`` value to retry on any verb.
|
||||
|
||||
.. warning::
|
||||
|
||||
Previously this parameter was named ``method_whitelist``, that
|
||||
usage is deprecated in v1.26.0 and will be removed in v2.0.
|
||||
|
||||
:param iterable status_forcelist:
|
||||
A set of integer HTTP status codes that we should force a retry on.
|
||||
A retry is initiated if the request method is in ``method_whitelist``
|
||||
A retry is initiated if the request method is in ``allowed_methods``
|
||||
and the response status code is in ``status_forcelist``.
|
||||
|
||||
By default, this is disabled with ``None``.
|
||||
@@ -120,7 +199,7 @@ class Retry(object):
|
||||
|
||||
seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
|
||||
for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
|
||||
than :attr:`Retry.BACKOFF_MAX`.
|
||||
than :attr:`Retry.DEFAULT_BACKOFF_MAX`.
|
||||
|
||||
By default, backoff is disabled (set to 0).
|
||||
|
||||
@@ -147,16 +226,19 @@ class Retry(object):
|
||||
request.
|
||||
"""
|
||||
|
||||
DEFAULT_METHOD_WHITELIST = frozenset(
|
||||
#: Default methods to be used for ``allowed_methods``
|
||||
DEFAULT_ALLOWED_METHODS = frozenset(
|
||||
["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]
|
||||
)
|
||||
|
||||
#: Default status codes to be used for ``status_forcelist``
|
||||
RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
|
||||
|
||||
DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(["Authorization"])
|
||||
#: Default headers to be used for ``remove_headers_on_redirect``
|
||||
DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Cookie", "Authorization"])
|
||||
|
||||
#: Maximum backoff time.
|
||||
BACKOFF_MAX = 120
|
||||
DEFAULT_BACKOFF_MAX = 120
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -165,20 +247,43 @@ class Retry(object):
|
||||
read=None,
|
||||
redirect=None,
|
||||
status=None,
|
||||
method_whitelist=DEFAULT_METHOD_WHITELIST,
|
||||
other=None,
|
||||
allowed_methods=_Default,
|
||||
status_forcelist=None,
|
||||
backoff_factor=0,
|
||||
raise_on_redirect=True,
|
||||
raise_on_status=True,
|
||||
history=None,
|
||||
respect_retry_after_header=True,
|
||||
remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST,
|
||||
remove_headers_on_redirect=_Default,
|
||||
# TODO: Deprecated, remove in v2.0
|
||||
method_whitelist=_Default,
|
||||
):
|
||||
|
||||
if method_whitelist is not _Default:
|
||||
if allowed_methods is not _Default:
|
||||
raise ValueError(
|
||||
"Using both 'allowed_methods' and "
|
||||
"'method_whitelist' together is not allowed. "
|
||||
"Instead only use 'allowed_methods'"
|
||||
)
|
||||
warnings.warn(
|
||||
"Using 'method_whitelist' with Retry is deprecated and "
|
||||
"will be removed in v2.0. Use 'allowed_methods' instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
allowed_methods = method_whitelist
|
||||
if allowed_methods is _Default:
|
||||
allowed_methods = self.DEFAULT_ALLOWED_METHODS
|
||||
if remove_headers_on_redirect is _Default:
|
||||
remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
|
||||
|
||||
self.total = total
|
||||
self.connect = connect
|
||||
self.read = read
|
||||
self.status = status
|
||||
self.other = other
|
||||
|
||||
if redirect is False or total is False:
|
||||
redirect = 0
|
||||
@@ -186,7 +291,7 @@ class Retry(object):
|
||||
|
||||
self.redirect = redirect
|
||||
self.status_forcelist = status_forcelist or set()
|
||||
self.method_whitelist = method_whitelist
|
||||
self.allowed_methods = allowed_methods
|
||||
self.backoff_factor = backoff_factor
|
||||
self.raise_on_redirect = raise_on_redirect
|
||||
self.raise_on_status = raise_on_status
|
||||
@@ -203,7 +308,7 @@ class Retry(object):
|
||||
read=self.read,
|
||||
redirect=self.redirect,
|
||||
status=self.status,
|
||||
method_whitelist=self.method_whitelist,
|
||||
other=self.other,
|
||||
status_forcelist=self.status_forcelist,
|
||||
backoff_factor=self.backoff_factor,
|
||||
raise_on_redirect=self.raise_on_redirect,
|
||||
@@ -212,12 +317,29 @@ class Retry(object):
|
||||
remove_headers_on_redirect=self.remove_headers_on_redirect,
|
||||
respect_retry_after_header=self.respect_retry_after_header,
|
||||
)
|
||||
|
||||
# TODO: If already given in **kw we use what's given to us
|
||||
# If not given we need to figure out what to pass. We decide
|
||||
# based on whether our class has the 'method_whitelist' property
|
||||
# and if so we pass the deprecated 'method_whitelist' otherwise
|
||||
# we use 'allowed_methods'. Remove in v2.0
|
||||
if "method_whitelist" not in kw and "allowed_methods" not in kw:
|
||||
if "method_whitelist" in self.__dict__:
|
||||
warnings.warn(
|
||||
"Using 'method_whitelist' with Retry is deprecated and "
|
||||
"will be removed in v2.0. Use 'allowed_methods' instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
params["method_whitelist"] = self.allowed_methods
|
||||
else:
|
||||
params["allowed_methods"] = self.allowed_methods
|
||||
|
||||
params.update(kw)
|
||||
return type(self)(**params)
|
||||
|
||||
@classmethod
|
||||
def from_int(cls, retries, redirect=True, default=None):
|
||||
""" Backwards-compatibility for the old retries format."""
|
||||
"""Backwards-compatibility for the old retries format."""
|
||||
if retries is None:
|
||||
retries = default if default is not None else cls.DEFAULT
|
||||
|
||||
@@ -230,7 +352,7 @@ class Retry(object):
|
||||
return new_retries
|
||||
|
||||
def get_backoff_time(self):
|
||||
""" Formula for computing the current backoff
|
||||
"""Formula for computing the current backoff
|
||||
|
||||
:rtype: float
|
||||
"""
|
||||
@@ -244,17 +366,24 @@ class Retry(object):
|
||||
return 0
|
||||
|
||||
backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1))
|
||||
return min(self.BACKOFF_MAX, backoff_value)
|
||||
return min(self.DEFAULT_BACKOFF_MAX, backoff_value)
|
||||
|
||||
def parse_retry_after(self, retry_after):
|
||||
# Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
|
||||
if re.match(r"^\s*[0-9]+\s*$", retry_after):
|
||||
seconds = int(retry_after)
|
||||
else:
|
||||
retry_date_tuple = email.utils.parsedate(retry_after)
|
||||
retry_date_tuple = email.utils.parsedate_tz(retry_after)
|
||||
if retry_date_tuple is None:
|
||||
raise InvalidHeader("Invalid Retry-After header: %s" % retry_after)
|
||||
retry_date = time.mktime(retry_date_tuple)
|
||||
if retry_date_tuple[9] is None: # Python 2
|
||||
# Assume UTC if no timezone was specified
|
||||
# On Python2.7, parsedate_tz returns None for a timezone offset
|
||||
# instead of 0 if no timezone is given, where mktime_tz treats
|
||||
# a None timezone offset as local time.
|
||||
retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:]
|
||||
|
||||
retry_date = email.utils.mktime_tz(retry_date_tuple)
|
||||
seconds = retry_date - time.time()
|
||||
|
||||
if seconds < 0:
|
||||
@@ -263,9 +392,9 @@ class Retry(object):
|
||||
return seconds
|
||||
|
||||
def get_retry_after(self, response):
|
||||
""" Get the value of Retry-After in seconds. """
|
||||
"""Get the value of Retry-After in seconds."""
|
||||
|
||||
retry_after = response.getheader("Retry-After")
|
||||
retry_after = response.headers.get("Retry-After")
|
||||
|
||||
if retry_after is None:
|
||||
return None
|
||||
@@ -287,7 +416,7 @@ class Retry(object):
|
||||
time.sleep(backoff)
|
||||
|
||||
def sleep(self, response=None):
|
||||
""" Sleep between retry attempts.
|
||||
"""Sleep between retry attempts.
|
||||
|
||||
This method will respect a server's ``Retry-After`` response header
|
||||
and sleep the duration of the time requested. If that is not present, it
|
||||
@@ -303,28 +432,41 @@ class Retry(object):
|
||||
self._sleep_backoff()
|
||||
|
||||
def _is_connection_error(self, err):
|
||||
""" Errors when we're fairly sure that the server did not receive the
|
||||
"""Errors when we're fairly sure that the server did not receive the
|
||||
request, so it should be safe to retry.
|
||||
"""
|
||||
if isinstance(err, ProxyError):
|
||||
err = err.original_error
|
||||
return isinstance(err, ConnectTimeoutError)
|
||||
|
||||
def _is_read_error(self, err):
|
||||
""" Errors that occur after the request has been started, so we should
|
||||
"""Errors that occur after the request has been started, so we should
|
||||
assume that the server began processing it.
|
||||
"""
|
||||
return isinstance(err, (ReadTimeoutError, ProtocolError))
|
||||
|
||||
def _is_method_retryable(self, method):
|
||||
""" Checks if a given HTTP method should be retried upon, depending if
|
||||
it is included on the method whitelist.
|
||||
"""Checks if a given HTTP method should be retried upon, depending if
|
||||
it is included in the allowed_methods
|
||||
"""
|
||||
if self.method_whitelist and method.upper() not in self.method_whitelist:
|
||||
return False
|
||||
# TODO: For now favor if the Retry implementation sets its own method_whitelist
|
||||
# property outside of our constructor to avoid breaking custom implementations.
|
||||
if "method_whitelist" in self.__dict__:
|
||||
warnings.warn(
|
||||
"Using 'method_whitelist' with Retry is deprecated and "
|
||||
"will be removed in v2.0. Use 'allowed_methods' instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
allowed_methods = self.method_whitelist
|
||||
else:
|
||||
allowed_methods = self.allowed_methods
|
||||
|
||||
if allowed_methods and method.upper() not in allowed_methods:
|
||||
return False
|
||||
return True
|
||||
|
||||
def is_retry(self, method, status_code, has_retry_after=False):
|
||||
""" Is this method/status code retryable? (Based on whitelists and control
|
||||
"""Is this method/status code retryable? (Based on allowlists and control
|
||||
variables such as the number of total retries to allow, whether to
|
||||
respect the Retry-After header, whether this header is present, and
|
||||
whether the returned status code is on the list of status codes to
|
||||
@@ -344,8 +486,15 @@ class Retry(object):
|
||||
)
|
||||
|
||||
def is_exhausted(self):
|
||||
""" Are we out of retries? """
|
||||
retry_counts = (self.total, self.connect, self.read, self.redirect, self.status)
|
||||
"""Are we out of retries?"""
|
||||
retry_counts = (
|
||||
self.total,
|
||||
self.connect,
|
||||
self.read,
|
||||
self.redirect,
|
||||
self.status,
|
||||
self.other,
|
||||
)
|
||||
retry_counts = list(filter(None, retry_counts))
|
||||
if not retry_counts:
|
||||
return False
|
||||
@@ -361,7 +510,7 @@ class Retry(object):
|
||||
_pool=None,
|
||||
_stacktrace=None,
|
||||
):
|
||||
""" Return a new Retry object with incremented retry counters.
|
||||
"""Return a new Retry object with incremented retry counters.
|
||||
|
||||
:param response: A response object, or None, if the server did not
|
||||
return a response.
|
||||
@@ -383,6 +532,7 @@ class Retry(object):
|
||||
read = self.read
|
||||
redirect = self.redirect
|
||||
status_count = self.status
|
||||
other = self.other
|
||||
cause = "unknown"
|
||||
status = None
|
||||
redirect_location = None
|
||||
@@ -401,6 +551,11 @@ class Retry(object):
|
||||
elif read is not None:
|
||||
read -= 1
|
||||
|
||||
elif error:
|
||||
# Other retry?
|
||||
if other is not None:
|
||||
other -= 1
|
||||
|
||||
elif response and response.get_redirect_location():
|
||||
# Redirect retry?
|
||||
if redirect is not None:
|
||||
@@ -411,7 +566,7 @@ class Retry(object):
|
||||
|
||||
else:
|
||||
# Incrementing because of a server error like a 500 in
|
||||
# status_forcelist and a the given method is in the whitelist
|
||||
# status_forcelist and the given method is in the allowed_methods
|
||||
cause = ResponseError.GENERIC_ERROR
|
||||
if response and response.status:
|
||||
if status_count is not None:
|
||||
@@ -429,6 +584,7 @@ class Retry(object):
|
||||
read=read,
|
||||
redirect=redirect,
|
||||
status=status_count,
|
||||
other=other,
|
||||
history=history,
|
||||
)
|
||||
|
||||
@@ -445,6 +601,20 @@ class Retry(object):
|
||||
"read={self.read}, redirect={self.redirect}, status={self.status})"
|
||||
).format(cls=type(self), self=self)
|
||||
|
||||
def __getattr__(self, item):
|
||||
if item == "method_whitelist":
|
||||
# TODO: Remove this deprecated alias in v2.0
|
||||
warnings.warn(
|
||||
"Using 'method_whitelist' with Retry is deprecated and "
|
||||
"will be removed in v2.0. Use 'allowed_methods' instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
return self.allowed_methods
|
||||
try:
|
||||
return getattr(super(Retry, self), item)
|
||||
except AttributeError:
|
||||
return getattr(Retry, item)
|
||||
|
||||
|
||||
# For backwards compatibility (equivalent to pre-v1.9):
|
||||
Retry.DEFAULT = Retry(3)
|
||||
|
||||
@@ -1,21 +1,27 @@
|
||||
from __future__ import absolute_import
|
||||
import errno
|
||||
import warnings
|
||||
import hmac
|
||||
import sys
|
||||
|
||||
import hmac
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from binascii import hexlify, unhexlify
|
||||
from hashlib import md5, sha1, sha256
|
||||
|
||||
from .url import IPV4_RE, BRACELESS_IPV6_ADDRZ_RE
|
||||
from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
|
||||
from ..exceptions import (
|
||||
InsecurePlatformWarning,
|
||||
ProxySchemeUnsupported,
|
||||
SNIMissingWarning,
|
||||
SSLError,
|
||||
)
|
||||
from ..packages import six
|
||||
|
||||
from .url import BRACELESS_IPV6_ADDRZ_RE, IPV4_RE
|
||||
|
||||
SSLContext = None
|
||||
SSLTransport = None
|
||||
HAS_SNI = False
|
||||
IS_PYOPENSSL = False
|
||||
IS_SECURETRANSPORT = False
|
||||
ALPN_PROTOCOLS = ["http/1.1"]
|
||||
|
||||
# Maps the length of a digest to a possible hash function producing this digest
|
||||
HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256}
|
||||
@@ -29,8 +35,8 @@ def _const_compare_digest_backport(a, b):
|
||||
Returns True if the digests match, and False otherwise.
|
||||
"""
|
||||
result = abs(len(a) - len(b))
|
||||
for l, r in zip(bytearray(a), bytearray(b)):
|
||||
result |= l ^ r
|
||||
for left, right in zip(bytearray(a), bytearray(b)):
|
||||
result |= left ^ right
|
||||
return result == 0
|
||||
|
||||
|
||||
@@ -38,11 +44,21 @@ _const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_ba
|
||||
|
||||
try: # Test for SSL features
|
||||
import ssl
|
||||
from ssl import wrap_socket, CERT_REQUIRED
|
||||
from ssl import CERT_REQUIRED, wrap_socket
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
from ssl import HAS_SNI # Has SNI?
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
from .ssltransport import SSLTransport
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
try: # Platform-specific: Python 3.6
|
||||
from ssl import PROTOCOL_TLS
|
||||
|
||||
@@ -55,14 +71,25 @@ except ImportError:
|
||||
except ImportError:
|
||||
PROTOCOL_SSLv23 = PROTOCOL_TLS = 2
|
||||
|
||||
try:
|
||||
from ssl import PROTOCOL_TLS_CLIENT
|
||||
except ImportError:
|
||||
PROTOCOL_TLS_CLIENT = PROTOCOL_TLS
|
||||
|
||||
|
||||
try:
|
||||
from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION
|
||||
from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3
|
||||
except ImportError:
|
||||
OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
|
||||
OP_NO_COMPRESSION = 0x20000
|
||||
|
||||
|
||||
try: # OP_NO_TICKET was added in Python 3.6
|
||||
from ssl import OP_NO_TICKET
|
||||
except ImportError:
|
||||
OP_NO_TICKET = 0x4000
|
||||
|
||||
|
||||
# A secure default.
|
||||
# Sources for more information on TLS ciphers:
|
||||
#
|
||||
@@ -119,12 +146,15 @@ except ImportError:
|
||||
self.certfile = certfile
|
||||
self.keyfile = keyfile
|
||||
|
||||
def load_verify_locations(self, cafile=None, capath=None):
|
||||
def load_verify_locations(self, cafile=None, capath=None, cadata=None):
|
||||
self.ca_certs = cafile
|
||||
|
||||
if capath is not None:
|
||||
raise SSLError("CA directories not supported in older Pythons")
|
||||
|
||||
if cadata is not None:
|
||||
raise SSLError("CA data not supported in older Pythons")
|
||||
|
||||
def set_ciphers(self, cipher_suite):
|
||||
self.ciphers = cipher_suite
|
||||
|
||||
@@ -134,7 +164,7 @@ except ImportError:
|
||||
"urllib3 from configuring SSL appropriately and may cause "
|
||||
"certain SSL connections to fail. You can upgrade to a newer "
|
||||
"version of Python to solve this. For more information, see "
|
||||
"https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
|
||||
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
|
||||
"#ssl-warnings",
|
||||
InsecurePlatformWarning,
|
||||
)
|
||||
@@ -182,7 +212,7 @@ def resolve_cert_reqs(candidate):
|
||||
"""
|
||||
Resolves the argument to a numeric constant, which can be passed to
|
||||
the wrap_socket function/method from the ssl module.
|
||||
Defaults to :data:`ssl.CERT_NONE`.
|
||||
Defaults to :data:`ssl.CERT_REQUIRED`.
|
||||
If given a string it is assumed to be the name of the constant in the
|
||||
:mod:`ssl` module or its abbreviation.
|
||||
(So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
|
||||
@@ -246,14 +276,18 @@ def create_urllib3_context(
|
||||
``ssl.CERT_REQUIRED``.
|
||||
:param options:
|
||||
Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
|
||||
``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
|
||||
``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``.
|
||||
:param ciphers:
|
||||
Which cipher suites to allow the server to select.
|
||||
:returns:
|
||||
Constructed SSLContext object with specified options
|
||||
:rtype: SSLContext
|
||||
"""
|
||||
context = SSLContext(ssl_version or PROTOCOL_TLS)
|
||||
# PROTOCOL_TLS is deprecated in Python 3.10
|
||||
if not ssl_version or ssl_version == PROTOCOL_TLS:
|
||||
ssl_version = PROTOCOL_TLS_CLIENT
|
||||
|
||||
context = SSLContext(ssl_version)
|
||||
|
||||
context.set_ciphers(ciphers or DEFAULT_CIPHERS)
|
||||
|
||||
@@ -269,6 +303,11 @@ def create_urllib3_context(
|
||||
# Disable compression to prevent CRIME attacks for OpenSSL 1.0+
|
||||
# (issue #309)
|
||||
options |= OP_NO_COMPRESSION
|
||||
# TLSv1.2 only. Unless set explicitly, do not request tickets.
|
||||
# This may save some bandwidth on wire, and although the ticket is encrypted,
|
||||
# there is a risk associated with it being on wire,
|
||||
# if the server is not rotating its ticketing keys properly.
|
||||
options |= OP_NO_TICKET
|
||||
|
||||
context.options |= options
|
||||
|
||||
@@ -283,13 +322,33 @@ def create_urllib3_context(
|
||||
) is not None:
|
||||
context.post_handshake_auth = True
|
||||
|
||||
context.verify_mode = cert_reqs
|
||||
if (
|
||||
getattr(context, "check_hostname", None) is not None
|
||||
): # Platform-specific: Python 3.2
|
||||
# We do our own verification, including fingerprints and alternative
|
||||
# hostnames. So disable it here
|
||||
context.check_hostname = False
|
||||
def disable_check_hostname():
|
||||
if (
|
||||
getattr(context, "check_hostname", None) is not None
|
||||
): # Platform-specific: Python 3.2
|
||||
# We do our own verification, including fingerprints and alternative
|
||||
# hostnames. So disable it here
|
||||
context.check_hostname = False
|
||||
|
||||
# The order of the below lines setting verify_mode and check_hostname
|
||||
# matter due to safe-guards SSLContext has to prevent an SSLContext with
|
||||
# check_hostname=True, verify_mode=NONE/OPTIONAL. This is made even more
|
||||
# complex because we don't know whether PROTOCOL_TLS_CLIENT will be used
|
||||
# or not so we don't know the initial state of the freshly created SSLContext.
|
||||
if cert_reqs == ssl.CERT_REQUIRED:
|
||||
context.verify_mode = cert_reqs
|
||||
disable_check_hostname()
|
||||
else:
|
||||
disable_check_hostname()
|
||||
context.verify_mode = cert_reqs
|
||||
|
||||
# Enable logging of TLS session keys via defacto standard environment variable
|
||||
# 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
|
||||
if hasattr(context, "keylog_filename"):
|
||||
sslkeylogfile = os.environ.get("SSLKEYLOGFILE")
|
||||
if sslkeylogfile:
|
||||
context.keylog_filename = sslkeylogfile
|
||||
|
||||
return context
|
||||
|
||||
|
||||
@@ -305,6 +364,8 @@ def ssl_wrap_socket(
|
||||
ssl_context=None,
|
||||
ca_cert_dir=None,
|
||||
key_password=None,
|
||||
ca_cert_data=None,
|
||||
tls_in_tls=False,
|
||||
):
|
||||
"""
|
||||
All arguments except for server_hostname, ssl_context, and ca_cert_dir have
|
||||
@@ -323,6 +384,11 @@ def ssl_wrap_socket(
|
||||
SSLContext.load_verify_locations().
|
||||
:param key_password:
|
||||
Optional password if the keyfile is encrypted.
|
||||
:param ca_cert_data:
|
||||
Optional string containing CA certificates in PEM format suitable for
|
||||
passing as the cadata parameter to SSLContext.load_verify_locations()
|
||||
:param tls_in_tls:
|
||||
Use SSLTransport to wrap the existing socket.
|
||||
"""
|
||||
context = ssl_context
|
||||
if context is None:
|
||||
@@ -331,17 +397,11 @@ def ssl_wrap_socket(
|
||||
# this code.
|
||||
context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
|
||||
|
||||
if ca_certs or ca_cert_dir:
|
||||
if ca_certs or ca_cert_dir or ca_cert_data:
|
||||
try:
|
||||
context.load_verify_locations(ca_certs, ca_cert_dir)
|
||||
except IOError as e: # Platform-specific: Python 2.7
|
||||
context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
|
||||
except (IOError, OSError) as e:
|
||||
raise SSLError(e)
|
||||
# Py33 raises FileNotFoundError which subclasses OSError
|
||||
# These are not equivalent unless we check the errno attribute
|
||||
except OSError as e: # Platform-specific: Python 3.3 and beyond
|
||||
if e.errno == errno.ENOENT:
|
||||
raise SSLError(e)
|
||||
raise
|
||||
|
||||
elif ssl_context is None and hasattr(context, "load_default_certs"):
|
||||
# try to load OS default certs; works well on Windows (require Python3.4+)
|
||||
@@ -359,28 +419,39 @@ def ssl_wrap_socket(
|
||||
else:
|
||||
context.load_cert_chain(certfile, keyfile, key_password)
|
||||
|
||||
try:
|
||||
if hasattr(context, "set_alpn_protocols"):
|
||||
context.set_alpn_protocols(ALPN_PROTOCOLS)
|
||||
except NotImplementedError: # Defensive: in CI, we always have set_alpn_protocols
|
||||
pass
|
||||
|
||||
# If we detect server_hostname is an IP address then the SNI
|
||||
# extension should not be used according to RFC3546 Section 3.1
|
||||
# We shouldn't warn the user if SNI isn't available but we would
|
||||
# not be using SNI anyways due to IP address for server_hostname.
|
||||
if (
|
||||
server_hostname is not None and not is_ipaddress(server_hostname)
|
||||
) or IS_SECURETRANSPORT:
|
||||
if HAS_SNI and server_hostname is not None:
|
||||
return context.wrap_socket(sock, server_hostname=server_hostname)
|
||||
|
||||
use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
|
||||
# SecureTransport uses server_hostname in certificate verification.
|
||||
send_sni = (use_sni_hostname and HAS_SNI) or (
|
||||
IS_SECURETRANSPORT and server_hostname
|
||||
)
|
||||
# Do not warn the user if server_hostname is an invalid SNI hostname.
|
||||
if not HAS_SNI and use_sni_hostname:
|
||||
warnings.warn(
|
||||
"An HTTPS request has been made, but the SNI (Server Name "
|
||||
"Indication) extension to TLS is not available on this platform. "
|
||||
"This may cause the server to present an incorrect TLS "
|
||||
"certificate, which can cause validation failures. You can upgrade to "
|
||||
"a newer version of Python to solve this. For more information, see "
|
||||
"https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
|
||||
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
|
||||
"#ssl-warnings",
|
||||
SNIMissingWarning,
|
||||
)
|
||||
|
||||
return context.wrap_socket(sock)
|
||||
if send_sni:
|
||||
ssl_sock = _ssl_wrap_socket_impl(
|
||||
sock, context, tls_in_tls, server_hostname=server_hostname
|
||||
)
|
||||
else:
|
||||
ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls)
|
||||
return ssl_sock
|
||||
|
||||
|
||||
def is_ipaddress(hostname):
|
||||
@@ -405,3 +476,20 @@ def _is_key_file_encrypted(key_file):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None):
|
||||
if tls_in_tls:
|
||||
if not SSLTransport:
|
||||
# Import error, ssl is not available.
|
||||
raise ProxySchemeUnsupported(
|
||||
"TLS in TLS requires support for the 'ssl' module"
|
||||
)
|
||||
|
||||
SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
|
||||
return SSLTransport(sock, ssl_context, server_hostname)
|
||||
|
||||
if server_hostname:
|
||||
return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
|
||||
else:
|
||||
return ssl_context.wrap_socket(sock)
|
||||
|
||||
@@ -9,7 +9,7 @@ import sys
|
||||
# ipaddress has been backported to 2.6+ in pypi. If it is installed on the
|
||||
# system, use it to handle IPAddress ServerAltnames (this was added in
|
||||
# python-3.5) otherwise only do DNS matching. This allows
|
||||
# backports.ssl_match_hostname to continue to be used in Python 2.7.
|
||||
# util.ssl_match_hostname to continue to be used in Python 2.7.
|
||||
try:
|
||||
import ipaddress
|
||||
except ImportError:
|
||||
@@ -78,7 +78,8 @@ def _dnsname_match(dn, hostname, max_wildcards=1):
|
||||
|
||||
def _to_unicode(obj):
|
||||
if isinstance(obj, str) and sys.version_info < (3,):
|
||||
obj = unicode(obj, encoding="ascii", errors="strict")
|
||||
# ignored flake8 # F821 to support python 2.7 function
|
||||
obj = unicode(obj, encoding="ascii", errors="strict") # noqa: F821
|
||||
return obj
|
||||
|
||||
|
||||
@@ -111,11 +112,9 @@ def match_hostname(cert, hostname):
|
||||
try:
|
||||
# Divergence from upstream: ipaddress can't handle byte str
|
||||
host_ip = ipaddress.ip_address(_to_unicode(hostname))
|
||||
except ValueError:
|
||||
# Not an IP address (common case)
|
||||
host_ip = None
|
||||
except UnicodeError:
|
||||
# Divergence from upstream: Have to deal with ipaddress not taking
|
||||
except (UnicodeError, ValueError):
|
||||
# ValueError: Not an IP address (common case)
|
||||
# UnicodeError: Divergence from upstream: Have to deal with ipaddress not taking
|
||||
# byte strings. addresses should be all ascii, so we consider it not
|
||||
# an ipaddress in this case
|
||||
host_ip = None
|
||||
@@ -123,7 +122,7 @@ def match_hostname(cert, hostname):
|
||||
# Divergence from upstream: Make ipaddress library optional
|
||||
if ipaddress is None:
|
||||
host_ip = None
|
||||
else:
|
||||
else: # Defensive
|
||||
raise
|
||||
dnsnames = []
|
||||
san = cert.get("subjectAltName", ())
|
||||
@@ -153,10 +152,8 @@ def match_hostname(cert, hostname):
|
||||
"doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames)))
|
||||
)
|
||||
elif len(dnsnames) == 1:
|
||||
raise CertificateError(
|
||||
"hostname %r " "doesn't match %r" % (hostname, dnsnames[0])
|
||||
)
|
||||
raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0]))
|
||||
else:
|
||||
raise CertificateError(
|
||||
"no appropriate commonName or " "subjectAltName fields were found"
|
||||
"no appropriate commonName or subjectAltName fields were found"
|
||||
)
|
||||
221
lib/urllib3/util/ssltransport.py
Normal file
221
lib/urllib3/util/ssltransport.py
Normal file
@@ -0,0 +1,221 @@
|
||||
import io
|
||||
import socket
|
||||
import ssl
|
||||
|
||||
from ..exceptions import ProxySchemeUnsupported
|
||||
from ..packages import six
|
||||
|
||||
SSL_BLOCKSIZE = 16384
|
||||
|
||||
|
||||
class SSLTransport:
|
||||
"""
|
||||
The SSLTransport wraps an existing socket and establishes an SSL connection.
|
||||
|
||||
Contrary to Python's implementation of SSLSocket, it allows you to chain
|
||||
multiple TLS connections together. It's particularly useful if you need to
|
||||
implement TLS within TLS.
|
||||
|
||||
The class supports most of the socket API operations.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def _validate_ssl_context_for_tls_in_tls(ssl_context):
|
||||
"""
|
||||
Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
|
||||
for TLS in TLS.
|
||||
|
||||
The only requirement is that the ssl_context provides the 'wrap_bio'
|
||||
methods.
|
||||
"""
|
||||
|
||||
if not hasattr(ssl_context, "wrap_bio"):
|
||||
if six.PY2:
|
||||
raise ProxySchemeUnsupported(
|
||||
"TLS in TLS requires SSLContext.wrap_bio() which isn't "
|
||||
"supported on Python 2"
|
||||
)
|
||||
else:
|
||||
raise ProxySchemeUnsupported(
|
||||
"TLS in TLS requires SSLContext.wrap_bio() which isn't "
|
||||
"available on non-native SSLContext"
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True
|
||||
):
|
||||
"""
|
||||
Create an SSLTransport around socket using the provided ssl_context.
|
||||
"""
|
||||
self.incoming = ssl.MemoryBIO()
|
||||
self.outgoing = ssl.MemoryBIO()
|
||||
|
||||
self.suppress_ragged_eofs = suppress_ragged_eofs
|
||||
self.socket = socket
|
||||
|
||||
self.sslobj = ssl_context.wrap_bio(
|
||||
self.incoming, self.outgoing, server_hostname=server_hostname
|
||||
)
|
||||
|
||||
# Perform initial handshake.
|
||||
self._ssl_io_loop(self.sslobj.do_handshake)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *_):
|
||||
self.close()
|
||||
|
||||
def fileno(self):
|
||||
return self.socket.fileno()
|
||||
|
||||
def read(self, len=1024, buffer=None):
|
||||
return self._wrap_ssl_read(len, buffer)
|
||||
|
||||
def recv(self, len=1024, flags=0):
|
||||
if flags != 0:
|
||||
raise ValueError("non-zero flags not allowed in calls to recv")
|
||||
return self._wrap_ssl_read(len)
|
||||
|
||||
def recv_into(self, buffer, nbytes=None, flags=0):
|
||||
if flags != 0:
|
||||
raise ValueError("non-zero flags not allowed in calls to recv_into")
|
||||
if buffer and (nbytes is None):
|
||||
nbytes = len(buffer)
|
||||
elif nbytes is None:
|
||||
nbytes = 1024
|
||||
return self.read(nbytes, buffer)
|
||||
|
||||
def sendall(self, data, flags=0):
|
||||
if flags != 0:
|
||||
raise ValueError("non-zero flags not allowed in calls to sendall")
|
||||
count = 0
|
||||
with memoryview(data) as view, view.cast("B") as byte_view:
|
||||
amount = len(byte_view)
|
||||
while count < amount:
|
||||
v = self.send(byte_view[count:])
|
||||
count += v
|
||||
|
||||
def send(self, data, flags=0):
|
||||
if flags != 0:
|
||||
raise ValueError("non-zero flags not allowed in calls to send")
|
||||
response = self._ssl_io_loop(self.sslobj.write, data)
|
||||
return response
|
||||
|
||||
def makefile(
|
||||
self, mode="r", buffering=None, encoding=None, errors=None, newline=None
|
||||
):
|
||||
"""
|
||||
Python's httpclient uses makefile and buffered io when reading HTTP
|
||||
messages and we need to support it.
|
||||
|
||||
This is unfortunately a copy and paste of socket.py makefile with small
|
||||
changes to point to the socket directly.
|
||||
"""
|
||||
if not set(mode) <= {"r", "w", "b"}:
|
||||
raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
|
||||
|
||||
writing = "w" in mode
|
||||
reading = "r" in mode or not writing
|
||||
assert reading or writing
|
||||
binary = "b" in mode
|
||||
rawmode = ""
|
||||
if reading:
|
||||
rawmode += "r"
|
||||
if writing:
|
||||
rawmode += "w"
|
||||
raw = socket.SocketIO(self, rawmode)
|
||||
self.socket._io_refs += 1
|
||||
if buffering is None:
|
||||
buffering = -1
|
||||
if buffering < 0:
|
||||
buffering = io.DEFAULT_BUFFER_SIZE
|
||||
if buffering == 0:
|
||||
if not binary:
|
||||
raise ValueError("unbuffered streams must be binary")
|
||||
return raw
|
||||
if reading and writing:
|
||||
buffer = io.BufferedRWPair(raw, raw, buffering)
|
||||
elif reading:
|
||||
buffer = io.BufferedReader(raw, buffering)
|
||||
else:
|
||||
assert writing
|
||||
buffer = io.BufferedWriter(raw, buffering)
|
||||
if binary:
|
||||
return buffer
|
||||
text = io.TextIOWrapper(buffer, encoding, errors, newline)
|
||||
text.mode = mode
|
||||
return text
|
||||
|
||||
def unwrap(self):
|
||||
self._ssl_io_loop(self.sslobj.unwrap)
|
||||
|
||||
def close(self):
|
||||
self.socket.close()
|
||||
|
||||
def getpeercert(self, binary_form=False):
|
||||
return self.sslobj.getpeercert(binary_form)
|
||||
|
||||
def version(self):
|
||||
return self.sslobj.version()
|
||||
|
||||
def cipher(self):
|
||||
return self.sslobj.cipher()
|
||||
|
||||
def selected_alpn_protocol(self):
|
||||
return self.sslobj.selected_alpn_protocol()
|
||||
|
||||
def selected_npn_protocol(self):
|
||||
return self.sslobj.selected_npn_protocol()
|
||||
|
||||
def shared_ciphers(self):
|
||||
return self.sslobj.shared_ciphers()
|
||||
|
||||
def compression(self):
|
||||
return self.sslobj.compression()
|
||||
|
||||
def settimeout(self, value):
|
||||
self.socket.settimeout(value)
|
||||
|
||||
def gettimeout(self):
|
||||
return self.socket.gettimeout()
|
||||
|
||||
def _decref_socketios(self):
|
||||
self.socket._decref_socketios()
|
||||
|
||||
def _wrap_ssl_read(self, len, buffer=None):
|
||||
try:
|
||||
return self._ssl_io_loop(self.sslobj.read, len, buffer)
|
||||
except ssl.SSLError as e:
|
||||
if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs:
|
||||
return 0 # eof, return 0.
|
||||
else:
|
||||
raise
|
||||
|
||||
def _ssl_io_loop(self, func, *args):
|
||||
"""Performs an I/O loop between incoming/outgoing and the socket."""
|
||||
should_loop = True
|
||||
ret = None
|
||||
|
||||
while should_loop:
|
||||
errno = None
|
||||
try:
|
||||
ret = func(*args)
|
||||
except ssl.SSLError as e:
|
||||
if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE):
|
||||
# WANT_READ, and WANT_WRITE are expected, others are not.
|
||||
raise e
|
||||
errno = e.errno
|
||||
|
||||
buf = self.outgoing.read()
|
||||
self.socket.sendall(buf)
|
||||
|
||||
if errno is None:
|
||||
should_loop = False
|
||||
elif errno == ssl.SSL_ERROR_WANT_READ:
|
||||
buf = self.socket.recv(SSL_BLOCKSIZE)
|
||||
if buf:
|
||||
self.incoming.write(buf)
|
||||
else:
|
||||
self.incoming.write_eof()
|
||||
return ret
|
||||
@@ -1,10 +1,10 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
# The default socket timeout, used by httplib to indicate that no timeout was
|
||||
# specified by the user
|
||||
from socket import _GLOBAL_DEFAULT_TIMEOUT
|
||||
import time
|
||||
|
||||
# The default socket timeout, used by httplib to indicate that no timeout was; specified by the user
|
||||
from socket import _GLOBAL_DEFAULT_TIMEOUT, getdefaulttimeout
|
||||
|
||||
from ..exceptions import TimeoutStateError
|
||||
|
||||
# A sentinel value to indicate that no timeout was specified by the user in
|
||||
@@ -17,22 +17,28 @@ current_time = getattr(time, "monotonic", time.time)
|
||||
|
||||
|
||||
class Timeout(object):
|
||||
""" Timeout configuration.
|
||||
"""Timeout configuration.
|
||||
|
||||
Timeouts can be defined as a default for a pool::
|
||||
Timeouts can be defined as a default for a pool:
|
||||
|
||||
timeout = Timeout(connect=2.0, read=7.0)
|
||||
http = PoolManager(timeout=timeout)
|
||||
response = http.request('GET', 'http://example.com/')
|
||||
.. code-block:: python
|
||||
|
||||
Or per-request (which overrides the default for the pool)::
|
||||
timeout = Timeout(connect=2.0, read=7.0)
|
||||
http = PoolManager(timeout=timeout)
|
||||
response = http.request('GET', 'http://example.com/')
|
||||
|
||||
response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
|
||||
Or per-request (which overrides the default for the pool):
|
||||
|
||||
Timeouts can be disabled by setting all the parameters to ``None``::
|
||||
.. code-block:: python
|
||||
|
||||
no_timeout = Timeout(connect=None, read=None)
|
||||
response = http.request('GET', 'http://example.com/, timeout=no_timeout)
|
||||
response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
|
||||
|
||||
Timeouts can be disabled by setting all the parameters to ``None``:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
no_timeout = Timeout(connect=None, read=None)
|
||||
response = http.request('GET', 'http://example.com/, timeout=no_timeout)
|
||||
|
||||
|
||||
:param total:
|
||||
@@ -43,7 +49,7 @@ class Timeout(object):
|
||||
|
||||
Defaults to None.
|
||||
|
||||
:type total: integer, float, or None
|
||||
:type total: int, float, or None
|
||||
|
||||
:param connect:
|
||||
The maximum amount of time (in seconds) to wait for a connection
|
||||
@@ -53,7 +59,7 @@ class Timeout(object):
|
||||
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
|
||||
None will set an infinite timeout for connection attempts.
|
||||
|
||||
:type connect: integer, float, or None
|
||||
:type connect: int, float, or None
|
||||
|
||||
:param read:
|
||||
The maximum amount of time (in seconds) to wait between consecutive
|
||||
@@ -63,7 +69,7 @@ class Timeout(object):
|
||||
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
|
||||
None will set an infinite timeout.
|
||||
|
||||
:type read: integer, float, or None
|
||||
:type read: int, float, or None
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -98,7 +104,7 @@ class Timeout(object):
|
||||
self.total = self._validate_timeout(total, "total")
|
||||
self._start_connect = None
|
||||
|
||||
def __str__(self):
|
||||
def __repr__(self):
|
||||
return "%s(connect=%r, read=%r, total=%r)" % (
|
||||
type(self).__name__,
|
||||
self._connect,
|
||||
@@ -106,9 +112,16 @@ class Timeout(object):
|
||||
self.total,
|
||||
)
|
||||
|
||||
# __str__ provided for backwards compatibility
|
||||
__str__ = __repr__
|
||||
|
||||
@classmethod
|
||||
def resolve_default_timeout(cls, timeout):
|
||||
return getdefaulttimeout() if timeout is cls.DEFAULT_TIMEOUT else timeout
|
||||
|
||||
@classmethod
|
||||
def _validate_timeout(cls, value, name):
|
||||
""" Check that a timeout attribute is valid.
|
||||
"""Check that a timeout attribute is valid.
|
||||
|
||||
:param value: The timeout value to validate
|
||||
:param name: The name of the timeout attribute to validate. This is
|
||||
@@ -154,7 +167,7 @@ class Timeout(object):
|
||||
|
||||
@classmethod
|
||||
def from_float(cls, timeout):
|
||||
""" Create a new Timeout from a legacy timeout value.
|
||||
"""Create a new Timeout from a legacy timeout value.
|
||||
|
||||
The timeout value used by httplib.py sets the same timeout on the
|
||||
connect(), and recv() socket requests. This creates a :class:`Timeout`
|
||||
@@ -169,7 +182,7 @@ class Timeout(object):
|
||||
return Timeout(read=timeout, connect=timeout)
|
||||
|
||||
def clone(self):
|
||||
""" Create a copy of the timeout object
|
||||
"""Create a copy of the timeout object
|
||||
|
||||
Timeout properties are stored per-pool but each request needs a fresh
|
||||
Timeout object to ensure each one has its own start/stop configured.
|
||||
@@ -183,7 +196,7 @@ class Timeout(object):
|
||||
return Timeout(connect=self._connect, read=self._read, total=self.total)
|
||||
|
||||
def start_connect(self):
|
||||
""" Start the timeout clock, used during a connect() attempt
|
||||
"""Start the timeout clock, used during a connect() attempt
|
||||
|
||||
:raises urllib3.exceptions.TimeoutStateError: if you attempt
|
||||
to start a timer that has been started already.
|
||||
@@ -194,7 +207,7 @@ class Timeout(object):
|
||||
return self._start_connect
|
||||
|
||||
def get_connect_duration(self):
|
||||
""" Gets the time elapsed since the call to :meth:`start_connect`.
|
||||
"""Gets the time elapsed since the call to :meth:`start_connect`.
|
||||
|
||||
:return: Elapsed time in seconds.
|
||||
:rtype: float
|
||||
@@ -203,13 +216,13 @@ class Timeout(object):
|
||||
"""
|
||||
if self._start_connect is None:
|
||||
raise TimeoutStateError(
|
||||
"Can't get connect duration for timer " "that has not started."
|
||||
"Can't get connect duration for timer that has not started."
|
||||
)
|
||||
return current_time() - self._start_connect
|
||||
|
||||
@property
|
||||
def connect_timeout(self):
|
||||
""" Get the value to use when setting a connection timeout.
|
||||
"""Get the value to use when setting a connection timeout.
|
||||
|
||||
This will be a positive float or integer, the value None
|
||||
(never timeout), or the default system timeout.
|
||||
@@ -227,7 +240,7 @@ class Timeout(object):
|
||||
|
||||
@property
|
||||
def read_timeout(self):
|
||||
""" Get the value for the read timeout.
|
||||
"""Get the value for the read timeout.
|
||||
|
||||
This assumes some time has elapsed in the connection timeout and
|
||||
computes the read timeout appropriately.
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
from collections import namedtuple
|
||||
|
||||
from ..exceptions import LocationParseError
|
||||
from ..packages import six
|
||||
|
||||
|
||||
url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"]
|
||||
|
||||
# We only want to normalize urls with an HTTP(S) scheme.
|
||||
@@ -18,7 +18,7 @@ PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}")
|
||||
SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)")
|
||||
URI_RE = re.compile(
|
||||
r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?"
|
||||
r"(?://([^/?#]*))?"
|
||||
r"(?://([^\\/?#]*))?"
|
||||
r"([^?#]*)"
|
||||
r"(?:\?([^#]*))?"
|
||||
r"(?:#(.*))?$",
|
||||
@@ -50,12 +50,12 @@ _variations = [
|
||||
"(?:(?:%(hex)s:){0,6}%(hex)s)?::",
|
||||
]
|
||||
|
||||
UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~"
|
||||
UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._\-~"
|
||||
IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")"
|
||||
ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
|
||||
IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]"
|
||||
REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*"
|
||||
TARGET_RE = re.compile(r"^(/[^?]*)(?:\?([^#]+))?(?:#(.*))?$")
|
||||
TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$")
|
||||
|
||||
IPV4_RE = re.compile("^" + IPV4_PAT + "$")
|
||||
IPV6_RE = re.compile("^" + IPV6_PAT + "$")
|
||||
@@ -63,12 +63,12 @@ IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$")
|
||||
BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$")
|
||||
ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$")
|
||||
|
||||
SUBAUTHORITY_PAT = (u"^(?:(.*)@)?(%s|%s|%s)(?::([0-9]{0,5}))?$") % (
|
||||
_HOST_PORT_PAT = ("^(%s|%s|%s)(?::0*?(|0|[1-9][0-9]{0,4}))?$") % (
|
||||
REG_NAME_PAT,
|
||||
IPV4_PAT,
|
||||
IPV6_ADDRZ_PAT,
|
||||
)
|
||||
SUBAUTHORITY_RE = re.compile(SUBAUTHORITY_PAT, re.UNICODE | re.DOTALL)
|
||||
_HOST_PORT_RE = re.compile(_HOST_PORT_PAT, re.UNICODE | re.DOTALL)
|
||||
|
||||
UNRESERVED_CHARS = set(
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~"
|
||||
@@ -216,18 +216,15 @@ def _encode_invalid_chars(component, allowed_chars, encoding="utf-8"):
|
||||
|
||||
component = six.ensure_text(component)
|
||||
|
||||
# Normalize existing percent-encoded bytes.
|
||||
# Try to see if the component we're encoding is already percent-encoded
|
||||
# so we can skip all '%' characters but still encode all others.
|
||||
percent_encodings = PERCENT_RE.findall(component)
|
||||
|
||||
# Normalize existing percent-encoded bytes.
|
||||
for enc in percent_encodings:
|
||||
if not enc.isupper():
|
||||
component = component.replace(enc, enc.upper())
|
||||
component, percent_encodings = PERCENT_RE.subn(
|
||||
lambda match: match.group(0).upper(), component
|
||||
)
|
||||
|
||||
uri_bytes = component.encode("utf-8", "surrogatepass")
|
||||
is_percent_encoded = len(percent_encodings) == uri_bytes.count(b"%")
|
||||
|
||||
is_percent_encoded = percent_encodings == uri_bytes.count(b"%")
|
||||
encoded_component = bytearray()
|
||||
|
||||
for i in range(0, len(uri_bytes)):
|
||||
@@ -237,7 +234,7 @@ def _encode_invalid_chars(component, allowed_chars, encoding="utf-8"):
|
||||
if (is_percent_encoded and byte == b"%") or (
|
||||
byte_ord < 128 and byte.decode() in allowed_chars
|
||||
):
|
||||
encoded_component.extend(byte)
|
||||
encoded_component += byte
|
||||
continue
|
||||
encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper()))
|
||||
|
||||
@@ -282,6 +279,9 @@ def _normalize_host(host, scheme):
|
||||
if scheme in NORMALIZABLE_SCHEMES:
|
||||
is_ipv6 = IPV6_ADDRZ_RE.match(host)
|
||||
if is_ipv6:
|
||||
# IPv6 hosts of the form 'a::b%zone' are encoded in a URL as
|
||||
# such per RFC 6874: 'a::b%25zone'. Unquote the ZoneID
|
||||
# separator as necessary to return a valid RFC 4007 scoped IP.
|
||||
match = ZONE_ID_RE.search(host)
|
||||
if match:
|
||||
start, end = match.span(1)
|
||||
@@ -303,7 +303,7 @@ def _normalize_host(host, scheme):
|
||||
|
||||
|
||||
def _idna_encode(name):
|
||||
if name and any([ord(x) > 128 for x in name]):
|
||||
if name and any(ord(x) >= 128 for x in name):
|
||||
try:
|
||||
import idna
|
||||
except ImportError:
|
||||
@@ -322,17 +322,11 @@ def _idna_encode(name):
|
||||
|
||||
def _encode_target(target):
|
||||
"""Percent-encodes a request target so that there are no invalid characters"""
|
||||
if not target.startswith("/"):
|
||||
return target
|
||||
|
||||
path, query, fragment = TARGET_RE.match(target).groups()
|
||||
path, query = TARGET_RE.match(target).groups()
|
||||
target = _encode_invalid_chars(path, PATH_CHARS)
|
||||
query = _encode_invalid_chars(query, QUERY_CHARS)
|
||||
fragment = _encode_invalid_chars(fragment, FRAGMENT_CHARS)
|
||||
if query is not None:
|
||||
target += "?" + query
|
||||
if fragment is not None:
|
||||
target += "#" + target
|
||||
return target
|
||||
|
||||
|
||||
@@ -340,7 +334,7 @@ def parse_url(url):
|
||||
"""
|
||||
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
|
||||
performed to parse incomplete urls. Fields not provided will be None.
|
||||
This parser is RFC 3986 compliant.
|
||||
This parser is RFC 3986 and RFC 6874 compliant.
|
||||
|
||||
The parser logic and helper functions are based heavily on
|
||||
work done in the ``rfc3986`` module.
|
||||
@@ -374,7 +368,9 @@ def parse_url(url):
|
||||
scheme = scheme.lower()
|
||||
|
||||
if authority:
|
||||
auth, host, port = SUBAUTHORITY_RE.match(authority).groups()
|
||||
auth, _, host_port = authority.rpartition("@")
|
||||
auth = auth or None
|
||||
host, port = _HOST_PORT_RE.match(host_port).groups()
|
||||
if auth and normalize_uri:
|
||||
auth = _encode_invalid_chars(auth, USERINFO_CHARS)
|
||||
if port == "":
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import errno
|
||||
from functools import partial
|
||||
import select
|
||||
import sys
|
||||
from functools import partial
|
||||
|
||||
try:
|
||||
from time import monotonic
|
||||
@@ -42,7 +42,6 @@ if sys.version_info >= (3, 5):
|
||||
def _retry_on_intr(fn, timeout):
|
||||
return fn(timeout)
|
||||
|
||||
|
||||
else:
|
||||
# Old and broken Pythons.
|
||||
def _retry_on_intr(fn, timeout):
|
||||
@@ -140,14 +139,14 @@ def wait_for_socket(*args, **kwargs):
|
||||
|
||||
|
||||
def wait_for_read(sock, timeout=None):
|
||||
""" Waits for reading to be available on a given socket.
|
||||
"""Waits for reading to be available on a given socket.
|
||||
Returns True if the socket is readable, or False if the timeout expired.
|
||||
"""
|
||||
return wait_for_socket(sock, read=True, timeout=timeout)
|
||||
|
||||
|
||||
def wait_for_write(sock, timeout=None):
|
||||
""" Waits for writing to be available on a given socket.
|
||||
"""Waits for writing to be available on a given socket.
|
||||
Returns True if the socket is readable, or False if the timeout expired.
|
||||
"""
|
||||
return wait_for_socket(sock, write=True, timeout=timeout)
|
||||
|
||||
Reference in New Issue
Block a user