Skip to content

Commit

Permalink
Upgrade to Requests 2.32.0
Browse files Browse the repository at this point in the history
  • Loading branch information
nateprewitt committed May 20, 2024
1 parent 612515d commit 1dabdf2
Show file tree
Hide file tree
Showing 17 changed files with 268 additions and 122 deletions.
1 change: 1 addition & 0 deletions news/requests.vendor.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Upgrade Requests to 2.32.0
9 changes: 3 additions & 6 deletions src/pip/_vendor/requests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,11 +44,7 @@

from .exceptions import RequestsDependencyWarning

try:
from pip._vendor.charset_normalizer import __version__ as charset_normalizer_version
except ImportError:
charset_normalizer_version = None

charset_normalizer_version = None
chardet_version = None


Expand Down Expand Up @@ -80,7 +76,8 @@ def check_compatibility(urllib3_version, chardet_version, charset_normalizer_ver
# charset_normalizer >= 2.0.0 < 4.0.0
assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0)
else:
raise Exception("You need either charset_normalizer or chardet installed")
# pip does not need or use character detection
pass


def _check_cryptography(cryptography_version):
Expand Down
6 changes: 3 additions & 3 deletions src/pip/_vendor/requests/__version__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
__title__ = "requests"
__description__ = "Python HTTP for Humans."
__url__ = "https://requests.readthedocs.io"
__version__ = "2.31.0"
__build__ = 0x023100
__version__ = "2.32.0"
__build__ = 0x023200
__author__ = "Kenneth Reitz"
__author_email__ = "[email protected]"
__license__ = "Apache 2.0"
__license__ = "Apache-2.0"
__copyright__ = "Copyright Kenneth Reitz"
__cake__ = "\u2728 \U0001f370 \u2728"
114 changes: 96 additions & 18 deletions src/pip/_vendor/requests/adapters.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

import os.path
import socket # noqa: F401
import typing

from pip._vendor.urllib3.exceptions import ClosedPoolError, ConnectTimeoutError
from pip._vendor.urllib3.exceptions import HTTPError as _HTTPError
Expand All @@ -25,6 +26,7 @@
from pip._vendor.urllib3.util import Timeout as TimeoutSauce
from pip._vendor.urllib3.util import parse_url
from pip._vendor.urllib3.util.retry import Retry
from pip._vendor.urllib3.util.ssl_ import create_urllib3_context

from .auth import _basic_auth_str
from .compat import basestring, urlparse
Expand Down Expand Up @@ -61,11 +63,57 @@ def SOCKSProxyManager(*args, **kwargs):
raise InvalidSchema("Missing dependencies for SOCKS support.")


if typing.TYPE_CHECKING:
from .models import PreparedRequest


DEFAULT_POOLBLOCK = False
DEFAULT_POOLSIZE = 10
DEFAULT_RETRIES = 0
DEFAULT_POOL_TIMEOUT = None

_preloaded_ssl_context = create_urllib3_context()
_preloaded_ssl_context.load_verify_locations(
extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
)


def _urllib3_request_context(
request: "PreparedRequest",
verify: "bool | str | None",
client_cert: "typing.Tuple[str, str] | str | None",
) -> "(typing.Dict[str, typing.Any], typing.Dict[str, typing.Any])":
host_params = {}
pool_kwargs = {}
parsed_request_url = urlparse(request.url)
scheme = parsed_request_url.scheme.lower()
port = parsed_request_url.port
cert_reqs = "CERT_REQUIRED"
if verify is False:
cert_reqs = "CERT_NONE"
elif verify is True:
pool_kwargs["ssl_context"] = _preloaded_ssl_context
elif isinstance(verify, str):
if not os.path.isdir(verify):
pool_kwargs["ca_certs"] = verify
else:
pool_kwargs["ca_cert_dir"] = verify
pool_kwargs["cert_reqs"] = cert_reqs
if client_cert is not None:
if isinstance(client_cert, tuple) and len(client_cert) == 2:
pool_kwargs["cert_file"] = client_cert[0]
pool_kwargs["key_file"] = client_cert[1]
else:
# According to our docs, we allow users to specify just the client
# cert path
pool_kwargs["cert_file"] = client_cert
host_params = {
"scheme": scheme,
"host": parsed_request_url.hostname,
"port": port,
}
return host_params, pool_kwargs


class BaseAdapter:
"""The Base Transport Adapter"""
Expand Down Expand Up @@ -247,28 +295,26 @@ def cert_verify(self, conn, url, verify, cert):
:param cert: The SSL certificate to verify.
"""
if url.lower().startswith("https") and verify:
conn.cert_reqs = "CERT_REQUIRED"

cert_loc = None

# Allow self-specified cert location.
# Only load the CA certificates if 'verify' is a string indicating the CA bundle to use.
# Otherwise, if verify is a boolean, we don't load anything since
# the connection will be using a context with the default certificates already loaded,
# and this avoids a call to the slow load_verify_locations()
if verify is not True:
# `verify` must be a str with a path then
cert_loc = verify

if not cert_loc:
cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
if not os.path.exists(cert_loc):
raise OSError(
f"Could not find a suitable TLS CA certificate bundle, "
f"invalid path: {cert_loc}"
)

if not cert_loc or not os.path.exists(cert_loc):
raise OSError(
f"Could not find a suitable TLS CA certificate bundle, "
f"invalid path: {cert_loc}"
)

conn.cert_reqs = "CERT_REQUIRED"

if not os.path.isdir(cert_loc):
conn.ca_certs = cert_loc
else:
conn.ca_cert_dir = cert_loc
if not os.path.isdir(cert_loc):
conn.ca_certs = cert_loc
else:
conn.ca_cert_dir = cert_loc
else:
conn.cert_reqs = "CERT_NONE"
conn.ca_certs = None
Expand Down Expand Up @@ -328,6 +374,35 @@ def build_response(self, req, resp):

return response

def _get_connection(self, request, verify, proxies=None, cert=None):
# Replace the existing get_connection without breaking things and
# ensure that TLS settings are considered when we interact with
# urllib3 HTTP Pools
proxy = select_proxy(request.url, proxies)
try:
host_params, pool_kwargs = _urllib3_request_context(request, verify, cert)
except ValueError as e:
raise InvalidURL(e, request=request)
if proxy:
proxy = prepend_scheme_if_needed(proxy, "http")
proxy_url = parse_url(proxy)
if not proxy_url.host:
raise InvalidProxyURL(
"Please check proxy URL. It is malformed "
"and could be missing the host."
)
proxy_manager = self.proxy_manager_for(proxy)
conn = proxy_manager.connection_from_host(
**host_params, pool_kwargs=pool_kwargs
)
else:
# Only scheme should be lower case
conn = self.poolmanager.connection_from_host(
**host_params, pool_kwargs=pool_kwargs
)

return conn

def get_connection(self, url, proxies=None):
"""Returns a urllib3 connection for the given URL. This should not be
called from user code, and is only exposed for use when subclassing the
Expand Down Expand Up @@ -391,6 +466,9 @@ def request_url(self, request, proxies):
using_socks_proxy = proxy_scheme.startswith("socks")

url = request.path_url
if url.startswith("//"): # Don't confuse urllib3
url = f"/{url.lstrip('/')}"

if is_proxied_http_request and not using_socks_proxy:
url = urldefragauth(request.url)

Expand Down Expand Up @@ -451,7 +529,7 @@ def send(
"""

try:
conn = self.get_connection(request.url, proxies)
conn = self._get_connection(request, verify, proxies=proxies, cert=cert)
except LocationValueError as e:
raise InvalidURL(e, request=request)

Expand Down
2 changes: 1 addition & 1 deletion src/pip/_vendor/requests/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def request(method, url, **kwargs):
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string
or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content_type'`` is a string
defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
to add for the file.
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
Expand Down
1 change: 0 additions & 1 deletion src/pip/_vendor/requests/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,6 @@ def handle_401(self, r, **kwargs):
s_auth = r.headers.get("www-authenticate", "")

if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2:

self._thread_local.num_401_calls += 1
pat = re.compile(r"digest ", flags=re.IGNORECASE)
self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1))
Expand Down
15 changes: 13 additions & 2 deletions src/pip/_vendor/requests/compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,21 @@
compatibility until the next major version.
"""

from pip._vendor import charset_normalizer as chardet

import sys

# -------------------
# Character Detection
# -------------------


def _resolve_char_detection():
"""Find supported character detection libraries."""
chardet = None
return chardet


chardet = _resolve_char_detection()

# -------
# Pythons
# -------
Expand Down
16 changes: 8 additions & 8 deletions src/pip/_vendor/requests/cookies.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
requests.cookies
~~~~~~~~~~~~~~~~
Compatibility code to be able to use `cookielib.CookieJar` with requests.
Compatibility code to be able to use `http.cookiejar.CookieJar` with requests.
requests.utils imports from here, so be careful with imports.
"""
Expand All @@ -23,7 +23,7 @@
class MockRequest:
"""Wraps a `requests.Request` to mimic a `urllib2.Request`.
The code in `cookielib.CookieJar` expects this interface in order to correctly
The code in `http.cookiejar.CookieJar` expects this interface in order to correctly
manage cookie policies, i.e., determine whether a cookie can be set, given the
domains of the request and the cookie.
Expand Down Expand Up @@ -76,7 +76,7 @@ def get_header(self, name, default=None):
return self._r.headers.get(name, self._new_headers.get(name, default))

def add_header(self, key, val):
"""cookielib has no legitimate use for this method; add it back if you find one."""
"""cookiejar has no legitimate use for this method; add it back if you find one."""
raise NotImplementedError(
"Cookie headers should be added with add_unredirected_header()"
)
Expand Down Expand Up @@ -104,11 +104,11 @@ class MockResponse:
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
...what? Basically, expose the parsed HTTP headers from the server response
the way `cookielib` expects to see them.
the way `http.cookiejar` expects to see them.
"""

def __init__(self, headers):
"""Make a MockResponse for `cookielib` to read.
"""Make a MockResponse for `cookiejar` to read.
:param headers: a httplib.HTTPMessage or analogous carrying the headers
"""
Expand All @@ -124,7 +124,7 @@ def getheaders(self, name):
def extract_cookies_to_jar(jar, request, response):
"""Extract the cookies from the response into a CookieJar.
:param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
:param jar: http.cookiejar.CookieJar (not necessarily a RequestsCookieJar)
:param request: our own requests.Request object
:param response: urllib3.HTTPResponse object
"""
Expand Down Expand Up @@ -174,7 +174,7 @@ class CookieConflictError(RuntimeError):


class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
"""Compatibility class; is a cookielib.CookieJar, but exposes a dict
"""Compatibility class; is a http.cookiejar.CookieJar, but exposes a dict
interface.
This is the CookieJar we create by default for requests and sessions that
Expand Down Expand Up @@ -341,7 +341,7 @@ def __setitem__(self, name, value):
self.set(name, value)

def __delitem__(self, name):
"""Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
"""Deletes a cookie given a name. Wraps ``http.cookiejar.CookieJar``'s
``remove_cookie_by_name()``.
"""
remove_cookie_by_name(self, name)
Expand Down
10 changes: 10 additions & 0 deletions src/pip/_vendor/requests/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,16 @@ def __init__(self, *args, **kwargs):
CompatJSONDecodeError.__init__(self, *args)
InvalidJSONError.__init__(self, *self.args, **kwargs)

def __reduce__(self):
"""
The __reduce__ method called when pickling the object must
be the one from the JSONDecodeError (be it json/simplejson)
as it expects all the arguments for instantiation, not just
one like the IOError, and the MRO would by default call the
__reduce__ method from the IOError due to the inheritance order.
"""
return CompatJSONDecodeError.__reduce__(self)


class HTTPError(RequestException):
"""An HTTP error occurred."""
Expand Down
6 changes: 1 addition & 5 deletions src/pip/_vendor/requests/help.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,7 @@

from . import __version__ as requests_version

try:
from pip._vendor import charset_normalizer
except ImportError:
charset_normalizer = None

charset_normalizer = None
chardet = None

try:
Expand Down
13 changes: 8 additions & 5 deletions src/pip/_vendor/requests/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def _encode_files(files, data):
)
)

for (k, v) in files:
for k, v in files:
# support for explicit filename
ft = None
fh = None
Expand Down Expand Up @@ -268,7 +268,6 @@ def __init__(
hooks=None,
json=None,
):

# Default empty dicts for dict params.
data = [] if data is None else data
files = [] if files is None else files
Expand All @@ -277,7 +276,7 @@ def __init__(
hooks = {} if hooks is None else hooks

self.hooks = default_hooks()
for (k, v) in list(hooks.items()):
for k, v in list(hooks.items()):
self.register_hook(event=k, hook=v)

self.method = method
Expand Down Expand Up @@ -790,7 +789,12 @@ def next(self):
@property
def apparent_encoding(self):
"""The apparent encoding, provided by the charset_normalizer or chardet libraries."""
return chardet.detect(self.content)["encoding"]
if chardet is not None:
return chardet.detect(self.content)["encoding"]
else:
# If no character detection library is available, we'll fall back
# to a standard Python utf-8 str.
return "utf-8"

def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the
Expand Down Expand Up @@ -865,7 +869,6 @@ def iter_lines(
for chunk in self.iter_content(
chunk_size=chunk_size, decode_unicode=decode_unicode
):

if pending is not None:
chunk = pending + chunk

Expand Down
Loading

0 comments on commit 1dabdf2

Please sign in to comment.