Skip to content

Commit

Permalink
Release 3.12.2 (#206)
Browse files Browse the repository at this point in the history
3.12.2 (2025-01-22)
-------------------

**Fixed**
- Parsing of special scheme that exceed 9 characters on rare custom
adapters.

**Changed**
- Default `Content-Type` for json payloads changed from
`application/json; charset="utf-8"` to `application/json;charset=utf-8`.
While the previous default was valid, this is the preferred value
according to RFC9110. (#204)

**Misc**
- Removed a useless hasattr control to support older version of
urllib3-future (<2.5).
- Updated our pre-commit configuration and reformatted files
accordingly.
  • Loading branch information
Ousret authored Jan 22, 2025
2 parents 42176d9 + 16a2a08 commit ee4f225
Show file tree
Hide file tree
Showing 35 changed files with 707 additions and 1,275 deletions.
13 changes: 4 additions & 9 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,28 +2,23 @@ exclude: 'docs/|ext/'

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v5.0.0
hooks:
- id: check-yaml
- id: debug-statements
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/asottile/pyupgrade
rev: v3.15.1
hooks:
- id: pyupgrade
args: [--py37-plus]
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.3.2
rev: v0.9.1
hooks:
# Run the linter.
- id: ruff
args: [ --fix ]
args: [ --fix, --target-version=py37 ]
# Run the formatter.
- id: ruff-format
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.12.1
rev: v1.14.1
hooks:
- id: mypy
args: [--check-untyped-defs]
Expand Down
14 changes: 14 additions & 0 deletions HISTORY.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,20 @@
Release History
===============

3.12.2 (2025-01-22)
-------------------

**Fixed**
- Parsing of special scheme that exceed 9 characters on rare custom adapters.

**Changed**
- Default `Content-Type` for json payloads changed from `application/json; charset="utf-8"` to `application/json;charset=utf-8`.
While the previous default was valid, this is the preferred value according to RFC9110. (#204)

**Misc**
- Removed a useless hasattr control to support older version of urllib3-future (<2.5).
- Updated our pre-commit configuration and reformatted files accordingly.

3.12.1 (2025-01-03)
-------------------

Expand Down
19 changes: 14 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -94,11 +94,20 @@ packages = [
"src/niquests",
]

[tool.isort]
profile = "black"
src_paths = ["src/niquests", "tests"]
honor_noqa = true
add_imports = "from __future__ import annotations"
[tool.ruff]
line-length = 128

[tool.ruff.lint]
select = [
"E", # pycodestyle
"F", # Pyflakes
"W", # pycodestyle
"I", # isort
"U", # pyupgrade
]

[tool.ruff.lint.isort]
required-imports = ["from __future__ import annotations"]

[tool.pytest.ini_options]
addopts = "--doctest-modules"
Expand Down
11 changes: 7 additions & 4 deletions src/niquests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,14 +48,17 @@
from ._compat import HAS_LEGACY_URLLIB3

if HAS_LEGACY_URLLIB3 is False:
from urllib3 import Retry as RetryConfiguration
from urllib3 import Timeout as TimeoutConfiguration
from urllib3.exceptions import DependencyWarning
from urllib3 import Timeout as TimeoutConfiguration, Retry as RetryConfiguration
else:
from urllib3_future.exceptions import DependencyWarning # type: ignore[assignment]
from urllib3_future import ( # type: ignore[assignment]
Timeout as TimeoutConfiguration,
Retry as RetryConfiguration,
)
from urllib3_future import ( # type: ignore[assignment]
Timeout as TimeoutConfiguration,
)
from urllib3_future.exceptions import DependencyWarning # type: ignore[assignment]

# urllib3's DependencyWarnings should be silenced.
warnings.simplefilter("ignore", DependencyWarning)
Expand Down Expand Up @@ -89,7 +92,7 @@
TooManyRedirects,
URLRequired,
)
from .models import PreparedRequest, Request, Response, AsyncResponse
from .models import AsyncResponse, PreparedRequest, Request, Response
from .sessions import Session
from .status_codes import codes

Expand Down
4 changes: 2 additions & 2 deletions src/niquests/__version__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@
__url__: str = "https://niquests.readthedocs.io"

__version__: str
__version__ = "3.12.1"
__version__ = "3.12.2"

__build__: int = 0x031201
__build__: int = 0x031202
__author__: str = "Kenneth Reitz"
__author_email__: str = "[email protected]"
__license__: str = "Apache-2.0"
Expand Down
125 changes: 50 additions & 75 deletions src/niquests/_async.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
from __future__ import annotations

import os
import typing
import warnings
import sys
import time
from http.cookiejar import CookieJar
import typing
import warnings
from collections import OrderedDict
from datetime import timedelta
from http.cookiejar import CookieJar
from urllib.parse import urljoin, urlparse

from .status_codes import codes

if typing.TYPE_CHECKING:
Expand All @@ -22,17 +23,26 @@
from urllib3.contrib.webextensions._async import load_extension
else: # Defensive: tested in separate/isolated CI
from urllib3_future import ConnectionInfo # type: ignore[assignment]
from urllib3_future.contrib.resolver._async import AsyncBaseResolver # type: ignore[assignment]
from urllib3_future.contrib.webextensions._async import load_extension # type: ignore[assignment]
from urllib3_future.contrib.resolver._async import ( # type: ignore[assignment]
AsyncBaseResolver,
)
from urllib3_future.contrib.webextensions._async import ( # type: ignore[assignment]
load_extension,
)

from ._constant import (
DEFAULT_POOLSIZE,
DEFAULT_RETRIES,
READ_DEFAULT_TIMEOUT,
WRITE_DEFAULT_TIMEOUT,
DEFAULT_RETRIES,
DEFAULT_POOLSIZE,
)
from ._typing import (
AsyncBodyType,
AsyncHookType,
AsyncHttpAuthenticationType,
AsyncResolverType,
BodyType,
CacheLayerAltSvcType,
CookiesType,
HeadersType,
HookType,
Expand All @@ -42,51 +52,46 @@
MultiPartFilesType,
ProxyType,
QueryParameterType,
RetryType,
TimeoutType,
TLSClientCertType,
TLSVerifyType,
AsyncResolverType,
CacheLayerAltSvcType,
RetryType,
AsyncHookType,
AsyncBodyType,
AsyncHttpAuthenticationType,
)
from .adapters import AsyncBaseAdapter, AsyncHTTPAdapter
from .cookies import (
RequestsCookieJar,
cookiejar_from_dict,
extract_cookies_to_jar,
merge_cookies,
)
from .exceptions import (
ChunkedEncodingError,
ContentDecodingError,
TooManyRedirects,
InvalidSchema,
TooManyRedirects,
)
from .hooks import async_dispatch_hook, default_hooks
from .models import (
DEFAULT_REDIRECT_LIMIT,
AsyncResponse,
PreparedRequest,
Request,
Response,
DEFAULT_REDIRECT_LIMIT,
TransferProgress,
AsyncResponse,
)
from .sessions import Session
from .structures import AsyncQuicSharedCache
from .utils import (
_deepcopy_ci,
_swap_context,
create_async_resolver,
default_headers,
is_ocsp_capable,
parse_scheme,
requote_uri,
resolve_proxies,
rewind_body,
requote_uri,
_swap_context,
_deepcopy_ci,
parse_scheme,
is_ocsp_capable,
)
from .cookies import (
RequestsCookieJar,
cookiejar_from_dict,
extract_cookies_to_jar,
merge_cookies,
)
from .structures import AsyncQuicSharedCache
from .adapters import AsyncBaseAdapter, AsyncHTTPAdapter

# Preferred clock, based on which one is more accurate on a given system.
if sys.platform == "win32":
Expand Down Expand Up @@ -163,9 +168,7 @@ def __init__(
self.proxies: ProxyType = {}

#: Event-handling hooks.
self.hooks: AsyncHookType[PreparedRequest | Response | AsyncResponse] = (
default_hooks() # type: ignore[assignment]
)
self.hooks: AsyncHookType[PreparedRequest | Response | AsyncResponse] = default_hooks() # type: ignore[assignment]

#: Dictionary of querystring data to attach to each
#: :class:`Request <Request>`. The dictionary values may be lists for
Expand Down Expand Up @@ -232,18 +235,12 @@ def __init__(
#: session. By default it is a
#: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
#: may be any other ``cookielib.CookieJar`` compatible object.
self.cookies: RequestsCookieJar | CookieJar = cookiejar_from_dict(
{}, thread_free=True
)
self.cookies: RequestsCookieJar | CookieJar = cookiejar_from_dict({}, thread_free=True)

#: A simple dict that allows us to persist which server support QUIC
#: It is simply forwarded to urllib3.future that handle the caching logic.
#: Can be any mutable mapping.
self.quic_cache_layer = (
quic_cache_layer
if quic_cache_layer is not None
else AsyncQuicSharedCache(max_size=12_288)
)
self.quic_cache_layer = quic_cache_layer if quic_cache_layer is not None else AsyncQuicSharedCache(max_size=12_288)

#: Don't try to manipulate this object.
#: It cannot be pickled and accessing this object may cause
Expand Down Expand Up @@ -291,9 +288,7 @@ def __init__(
)

def __enter__(self) -> typing.NoReturn:
raise SyntaxError(
'You probably meant "async with". Did you forget to prepend the "async" keyword?'
)
raise SyntaxError('You probably meant "async with". Did you forget to prepend the "async" keyword?')

async def __aenter__(self) -> AsyncSession:
return self
Expand Down Expand Up @@ -367,10 +362,7 @@ def get_adapter(self, url: str) -> AsyncBaseAdapter: # type: ignore[override]
try:
extension = load_extension(scheme, implementation=implementation)
for prefix, adapter in self.adapters.items():
if (
scheme in extension.supported_schemes()
and extension.scheme_to_http_scheme(scheme) == parse_scheme(prefix)
):
if scheme in extension.supported_schemes() and extension.scheme_to_http_scheme(scheme) == parse_scheme(prefix):
return adapter
except ImportError:
pass
Expand All @@ -383,9 +375,7 @@ def get_adapter(self, url: str) -> AsyncBaseAdapter: # type: ignore[override]
additional_hint = ""

# Nothing matches :-/
raise InvalidSchema(
f"No connection adapters were found for {url!r}{additional_hint}"
)
raise InvalidSchema(f"No connection adapters were found for {url!r}{additional_hint}")

async def send( # type: ignore[override]
self, request: PreparedRequest, **kwargs: typing.Any
Expand Down Expand Up @@ -427,21 +417,16 @@ async def on_post_connection(conn_info: ConnectionInfo) -> None:
nonlocal ptr_request, request, kwargs
ptr_request.conn_info = conn_info

if (
ptr_request.url
and ptr_request.url.startswith("https://")
and kwargs["verify"]
and is_ocsp_capable(conn_info)
):
strict_ocsp_enabled: bool = (
os.environ.get("NIQUESTS_STRICT_OCSP", "0") != "0"
)
if ptr_request.url and ptr_request.url.startswith("https://") and kwargs["verify"] and is_ocsp_capable(conn_info):
strict_ocsp_enabled: bool = os.environ.get("NIQUESTS_STRICT_OCSP", "0") != "0"

try:
from .extensions._async_ocsp import (
verify as ocsp_verify,
InMemoryRevocationStatus,
)
from .extensions._async_ocsp import (
verify as ocsp_verify,
)
except ImportError:
pass
else:
Expand Down Expand Up @@ -608,9 +593,7 @@ async def _redirect_method_ref(x, y):
# Resolve redirects if allowed.
if allow_redirects:
# Redirect resolving generator.
gen = self.resolve_redirects(
r, request, yield_requests_trail=True, **kwargs
)
gen = self.resolve_redirects(r, request, yield_requests_trail=True, **kwargs)
history = []

async for resp_or_req in gen:
Expand Down Expand Up @@ -693,9 +676,7 @@ async def resolve_redirects( # type: ignore[override]
await resp.raw.read(decode_content=False)

if len(resp.history) >= self.max_redirects:
raise TooManyRedirects(
f"Exceeded {self.max_redirects} redirects.", response=resp
)
raise TooManyRedirects(f"Exceeded {self.max_redirects} redirects.", response=resp)

# Release the connection back into the pool.
if isinstance(resp, AsyncResponse):
Expand All @@ -715,9 +696,7 @@ async def resolve_redirects( # type: ignore[override]
parsed = urlparse(url)
if parsed.fragment == "" and previous_fragment:
parsed = parsed._replace(
fragment=previous_fragment
if isinstance(previous_fragment, str)
else previous_fragment.decode("utf-8")
fragment=previous_fragment if isinstance(previous_fragment, str) else previous_fragment.decode("utf-8")
)
elif parsed.fragment:
previous_fragment = parsed.fragment
Expand All @@ -728,9 +707,7 @@ async def resolve_redirects( # type: ignore[override]
# Compliant with RFC3986, we percent encode the url.
if not parsed.netloc:
url = urljoin(resp.url, requote_uri(url)) # type: ignore[type-var]
assert isinstance(
url, str
), f"urljoin produced {type(url)} instead of str"
assert isinstance(url, str), f"urljoin produced {type(url)} instead of str"
else:
url = requote_uri(url)

Expand Down Expand Up @@ -910,9 +887,7 @@ async def request( # type: ignore[override]

proxies = proxies or {}

settings = self.merge_environment_settings(
prep.url, proxies, stream, verify, cert
)
settings = self.merge_environment_settings(prep.url, proxies, stream, verify, cert)

# Send the request.
send_kwargs = {
Expand Down
4 changes: 1 addition & 3 deletions src/niquests/_compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,7 @@
urllib3 = None # type: ignore[assignment]


if (urllib3 is None and urllib3_future is None) or (
HAS_LEGACY_URLLIB3 and urllib3_future is None
):
if (urllib3 is None and urllib3_future is None) or (HAS_LEGACY_URLLIB3 and urllib3_future is None):
raise RuntimeError( # Defensive: tested in separate CI
"This is awkward but your environment is missing urllib3-future. "
"Your environment seems broken. "
Expand Down
Loading

0 comments on commit ee4f225

Please sign in to comment.