Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
2.15.902 (2026-02-03)
=====================

- Fixed multiplexing mixing issue under specific concurrency condition. (#309)
- Backported "GHSA-38jv-5279-wg99" security patch for "decompression-bomb safeguards of the streaming API were bypassed when HTTP redirects were followed" from upstream.
- Backported "Started treating Retry-After times greater than 6 hours as 6 hours by default" from upstream (https://github.com/urllib3/urllib3/pull/3743).


2.15.901 (2025-12-22)
=====================

Expand Down
15 changes: 14 additions & 1 deletion dummyserver/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,11 +202,24 @@ def redirect(self, request: httputil.HTTPServerRequest) -> Response: # type: ig
params = request_params(request)
target = params.get("target", "/")
status = params.get("status", b"303 See Other").decode("latin-1")
compressed = params.get("compressed", b"false") == b"true"

if len(status) == 3:
status = f"{status} Redirect"

headers = [("Location", target)]
return Response(status=status, headers=headers)

if compressed:
headers.append(("Content-Encoding", "gzip"))
data = gzip.compress(b"foo")
else:
data = b""

return Response(
status=status,
headers=headers,
body=data,
)

def not_found(self, request: httputil.HTTPServerRequest) -> Response:
return Response("Not found", status="404 Not Found")
Expand Down
1 change: 1 addition & 0 deletions src/urllib3/_async/connectionpool.py
Original file line number Diff line number Diff line change
Expand Up @@ -1384,6 +1384,7 @@ async def _make_request(
response = await conn.getresponse(
police_officer=self.pool,
early_response_callback=on_early_response,
promise=rp,
)
except (BaseSSLError, OSError) as e:
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
Expand Down
6 changes: 5 additions & 1 deletion src/urllib3/_async/response.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,11 @@ async def drain_conn(self) -> None: # type: ignore[override]
Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
"""
try:
await self.read()
await self.read(
# Do not spend resources decoding the content unless
# decoding has already been initiated.
decode_content=self._has_decoded_content,
)
except (HTTPError, OSError, BaseSSLError):
pass

Expand Down
2 changes: 1 addition & 1 deletion src/urllib3/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# This file is protected via CODEOWNERS
from __future__ import annotations

__version__ = "2.15.901"
__version__ = "2.15.902"
1 change: 1 addition & 0 deletions src/urllib3/connectionpool.py
Original file line number Diff line number Diff line change
Expand Up @@ -1375,6 +1375,7 @@ def _make_request(
response = conn.getresponse(
police_officer=self.pool,
early_response_callback=on_early_response,
promise=rp,
)
except (BaseSSLError, OSError) as e:
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
Expand Down
6 changes: 5 additions & 1 deletion src/urllib3/response.py
Original file line number Diff line number Diff line change
Expand Up @@ -561,7 +561,11 @@ def drain_conn(self) -> None:
Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
"""
try:
self.read()
self.read(
# Do not spend resources decoding the content unless
# decoding has already been initiated.
decode_content=self._has_decoded_content,
)
except (HTTPError, OSError, BaseSSLError):
pass

Expand Down
16 changes: 16 additions & 0 deletions src/urllib3/util/retry.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,11 @@ class Retry:
Sequence of headers to remove from the request when a response
indicating a redirect is returned before firing off the redirected
request.

:param int retry_after_max: Number of seconds to allow as the maximum for
Retry-After headers. Defaults to :attr:`Retry.DEFAULT_RETRY_AFTER_MAX`.
Any Retry-After headers larger than this value will be limited to this
value.
"""

#: Default methods to be used for ``allowed_methods``
Expand All @@ -198,6 +203,10 @@ class Retry:
#: Default maximum backoff time.
DEFAULT_BACKOFF_MAX = 120

# This is undocumented in the RFC. Setting to 6 hours matches other popular libraries.
#: Default maximum allowed value for Retry-After headers in seconds
DEFAULT_RETRY_AFTER_MAX: typing.Final[int] = 21600

# Backward compatibility; assigned outside of the class.
DEFAULT: typing.ClassVar[Retry]

Expand All @@ -221,6 +230,7 @@ def __init__(
str
] = DEFAULT_REMOVE_HEADERS_ON_REDIRECT,
backoff_jitter: float = 0.0,
retry_after_max: int = DEFAULT_RETRY_AFTER_MAX,
) -> None:
self.total = total
self.connect = connect
Expand All @@ -245,6 +255,7 @@ def __init__(
h.lower() for h in remove_headers_on_redirect
)
self.backoff_jitter = backoff_jitter
self.retry_after_max = retry_after_max

def new(self, **kw: typing.Any) -> Retry:
params = dict(
Expand All @@ -264,6 +275,7 @@ def new(self, **kw: typing.Any) -> Retry:
remove_headers_on_redirect=self.remove_headers_on_redirect,
respect_retry_after_header=self.respect_retry_after_header,
backoff_jitter=self.backoff_jitter,
retry_after_max=self.retry_after_max,
)

params.update(kw)
Expand Down Expand Up @@ -322,6 +334,10 @@ def parse_retry_after(self, retry_after: str) -> float:

seconds = max(seconds, 0)

# Check the seconds do not exceed the specified maximum
if seconds > self.retry_after_max:
seconds = self.retry_after_max

return seconds

def get_retry_after(self, response: HTTPResponse) -> float | None:
Expand Down
12 changes: 12 additions & 0 deletions test/test_retry.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,18 @@ def test_configurable_backoff_max(self) -> None:
retry = retry.increment(method="GET")
assert retry.get_backoff_time() == max_backoff

def test_configurable_retry_after_max(self) -> None:
"""Configurable retry after is computed correctly"""
max_retry_after = Retry.DEFAULT_RETRY_AFTER_MAX

retry = Retry()
assert retry.parse_retry_after(str(max_retry_after)) == max_retry_after
assert retry.parse_retry_after(str(max_retry_after + 1)) == max_retry_after

retry = Retry(retry_after_max=1)
assert retry.parse_retry_after(str(1)) == 1
assert retry.parse_retry_after(str(2)) == 1

def test_backoff_jitter(self) -> None:
"""Backoff with jitter is computed correctly"""
max_backoff = 1
Expand Down
20 changes: 20 additions & 0 deletions test/with_dummyserver/asynchronous/test_connectionpool.py
Original file line number Diff line number Diff line change
Expand Up @@ -576,6 +576,26 @@ async def test_redirect(self) -> None:
assert r.status == 200
assert await r.data == b"Dummy server!"

@mock.patch("urllib3.response.GzipDecoder.decompress")
async def test_no_decoding_with_redirect_when_preload_disabled(
self, gzip_decompress: mock.MagicMock
) -> None:
"""
Test that urllib3 does not attempt to decode a gzipped redirect
response when `preload_content` is set to `False`.
"""
async with AsyncHTTPConnectionPool(self.host, self.port) as pool:
# Three requests are expected: two redirects and one final / 200 OK.
response = await pool.request(
"GET",
"/redirect",
fields={"target": "/redirect?compressed=true", "compressed": "true"},
preload_content=False,
)

assert response.status == 200
gzip_decompress.assert_not_called()

async def test_303_redirect_makes_request_lose_body(self) -> None:
async with AsyncHTTPConnectionPool(self.host, self.port) as pool:
response = await pool.request(
Expand Down
21 changes: 21 additions & 0 deletions test/with_dummyserver/test_connectionpool.py
Original file line number Diff line number Diff line change
Expand Up @@ -481,6 +481,27 @@ def test_redirect(self) -> None:
assert r.status == 200
assert r.data == b"Dummy server!"

@mock.patch("urllib3.response.GzipDecoder.decompress")
def test_no_decoding_with_redirect_when_preload_disabled(
self, gzip_decompress: mock.MagicMock
) -> None:
"""
Test that urllib3 does not attempt to decode a gzipped redirect
response when `preload_content` is set to `False`.
"""

with HTTPConnectionPool(self.host, self.port) as pool:
# Three requests are expected: two redirects and one final / 200 OK.
response = pool.request(
"GET",
"/redirect",
fields={"target": "/redirect?compressed=true", "compressed": "true"},
preload_content=False,
)

assert response.status == 200
gzip_decompress.assert_not_called()

def test_303_redirect_makes_request_lose_body(self) -> None:
with HTTPConnectionPool(self.host, self.port) as pool:
response = pool.request(
Expand Down
Loading