diff --git a/src/_packagedcode/pypi.py b/src/_packagedcode/pypi.py index e8fe5171..8a309785 100644 --- a/src/_packagedcode/pypi.py +++ b/src/_packagedcode/pypi.py @@ -534,7 +534,7 @@ def parse(cls, location): for metapath in path.iterdir(): if not metapath.name.endswith('METADATA'): continue - + yield parse_metadata( location=metapath, datasource_id=cls.datasource_id, @@ -945,6 +945,8 @@ def get_requirements_txt_dependencies(location, include_nested=False): purl = purl and purl.to_string() or None + hash_options = req.hash_options or [] + req.hash_options = [] requirement = req.dumps() if location.endswith( @@ -973,7 +975,7 @@ def get_requirements_txt_dependencies(location, include_nested=False): extra_data=dict( is_editable=req.is_editable, link=req.link and req.link.url or None, - hash_options=req.hash_options or [], + hash_options=hash_options, is_constraint=req.is_constraint, is_archive=req.is_archive, is_wheel=req.is_wheel, diff --git a/tests/data/azure-devops.req-310-expected.json b/tests/data/azure-devops.req-310-expected.json index a6504156..5611f91c 100644 --- a/tests/data/azure-devops.req-310-expected.json +++ b/tests/data/azure-devops.req-310-expected.json @@ -126,12 +126,12 @@ "type": "pypi", "namespace": null, "name": "azure-core", - "version": "1.35.0", + "version": "1.35.1", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Microsoft Azure Core Library for Python\n# Azure Core shared client library for Python\n\nAzure core provides shared exceptions and modules for Python SDK client libraries.\nThese libraries follow the [Azure SDK Design Guidelines for Python](https://azure.github.io/azure-sdk/python/guidelines/index.html) .\n\nIf you are a client library developer, please reference [client library developer reference](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/CLIENT_LIBRARY_DEVELOPER.md) for more information.\n\n[Source code](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/)\n| [Package (Pypi)][package]\n| [Package (Conda)](https://anaconda.org/microsoft/azure-core/)\n| [API reference documentation](https://learn.microsoft.com/python/api/overview/azure/core-readme)\n\n## Getting started\n\nTypically, you will not need to install azure core;\nit will be installed when you install one of the client libraries using it.\nIn case you want to install it explicitly (to implement your own client library, for example),\nyou can find it [here](https://pypi.org/project/azure-core/).\n\n## Key concepts\n\n### Azure Core Library Exceptions\n\n#### AzureError\n\nAzureError is the base exception for all errors.\n\n```python\nclass AzureError(Exception):\n def __init__(self, message, *args, **kwargs):\n self.inner_exception = kwargs.get(\"error\")\n self.exc_type, self.exc_value, self.exc_traceback = sys.exc_info()\n self.exc_type = self.exc_type.__name__ if self.exc_type else type(self.inner_exception)\n self.exc_msg = \"{}, {}: {}\".format(message, self.exc_type, self.exc_value) # type: ignore\n self.message = str(message)\n self.continuation_token = kwargs.get(\"continuation_token\")\n super(AzureError, self).__init__(self.message, *args)\n```\n\n*message* is any message (str) to be associated with the exception.\n\n*args* are any additional args to be included with exception.\n\n*kwargs* are keyword arguments to include with the exception. Use the keyword *error* to pass in an internal exception and *continuation_token* for a token reference to continue an incomplete operation.\n\n**The following exceptions inherit from AzureError:**\n\n#### ServiceRequestError\n\nAn error occurred while attempt to make a request to the service. No request was sent.\n\n#### ServiceResponseError\n\nThe request was sent, but the client failed to understand the response.\nThe connection may have timed out. These errors can be retried for idempotent or safe operations.\n\n#### HttpResponseError\n\nA request was made, and a non-success status code was received from the service.\n\n```python\nclass HttpResponseError(AzureError):\n def __init__(self, message=None, response=None, **kwargs):\n self.reason = None\n self.response = response\n if response:\n self.reason = response.reason\n self.status_code = response.status_code\n self.error = self._parse_odata_body(ODataV4Format, response) # type: Optional[ODataV4Format]\n if self.error:\n message = str(self.error)\n else:\n message = message or \"Operation returned an invalid status '{}'\".format(\n self.reason\n )\n\n super(HttpResponseError, self).__init__(message=message, **kwargs)\n```\n\n*message* is the HTTP response error message (optional)\n\n*response* is the HTTP response (optional).\n\n*kwargs* are keyword arguments to include with the exception.\n\n**The following exceptions inherit from HttpResponseError:**\n\n#### DecodeError\n\nAn error raised during response de-serialization.\n\n#### IncompleteReadError\n\nAn error raised if peer closes the connection before we have received the complete message body.\n\n#### ResourceExistsError\n\nAn error response with status code 4xx. This will not be raised directly by the Azure core pipeline.\n\n#### ResourceNotFoundError\n\nAn error response, typically triggered by a 412 response (for update) or 404 (for get/post).\n\n#### ResourceModifiedError\n\nAn error response with status code 4xx, typically 412 Conflict. This will not be raised directly by the Azure core pipeline.\n\n#### ResourceNotModifiedError\n\nAn error response with status code 304. This will not be raised directly by the Azure core pipeline.\n\n#### ClientAuthenticationError\n\nAn error response with status code 4xx. This will not be raised directly by the Azure core pipeline.\n\n#### TooManyRedirectsError\n\nAn error raised when the maximum number of redirect attempts is reached. The maximum amount of redirects can be configured in the RedirectPolicy.\n\n```python\nclass TooManyRedirectsError(HttpResponseError):\n def __init__(self, history, *args, **kwargs):\n self.history = history\n message = \"Reached maximum redirect attempts.\"\n super(TooManyRedirectsError, self).__init__(message, *args, **kwargs)\n```\n\n*history* is used to document the requests/responses that resulted in redirected requests.\n\n*args* are any additional args to be included with exception.\n\n*kwargs* are keyword arguments to include with the exception.\n\n#### StreamConsumedError\n\nAn error thrown if you try to access the stream of `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` once\nthe response stream has been consumed.\n\n#### StreamClosedError\n\nAn error thrown if you try to access the stream of the `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` once\nthe response stream has been closed.\n\n#### ResponseNotReadError\n\nAn error thrown if you try to access the `content` of `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` before\nreading in the response's bytes first.\n\n### Configurations\n\nWhen calling the methods, some properties can be configured by passing in as kwargs arguments.\n\n| Parameters | Description |\n| --- | --- |\n| headers | The HTTP Request headers. |\n| request_id | The request id to be added into header. |\n| user_agent | If specified, this will be added in front of the user agent string. |\n| logging_enable| Use to enable per operation. Defaults to `False`. |\n| logger | If specified, it will be used to log information. |\n| response_encoding | The encoding to use if known for this service (will disable auto-detection). |\n| raw_request_hook | Callback function. Will be invoked on request. |\n| raw_response_hook | Callback function. Will be invoked on response. |\n| network_span_namer | A callable to customize the span name. |\n| tracing_attributes | Attributes to set on all created spans. |\n| permit_redirects | Whether the client allows redirects. Defaults to `True`. |\n| redirect_max | The maximum allowed redirects. Defaults to `30`. |\n| retry_total | Total number of retries to allow. Takes precedence over other counts. Default value is `10`. |\n| retry_connect | How many connection-related errors to retry on. These are errors raised before the request is sent to the remote server, which we assume has not triggered the server to process the request. Default value is `3`. |\n| retry_read | How many times to retry on read errors. These errors are raised after the request was sent to the server, so the request may have side-effects. Default value is `3`. |\n| retry_status | How many times to retry on bad status codes. Default value is `3`. |\n| retry_backoff_factor | A backoff factor to apply between attempts after the second try (most errors are resolved immediately by a second try without a delay). Retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))` seconds. If the backoff_factor is 0.1, then the retry will sleep for [0.0s, 0.2s, 0.4s, ...] between retries. The default value is `0.8`. |\n| retry_backoff_max | The maximum back off time. Default value is `120` seconds (2 minutes). |\n| retry_mode | Fixed or exponential delay between attempts, default is `Exponential`. |\n| timeout | Timeout setting for the operation in seconds, default is `604800`s (7 days). |\n| connection_timeout | A single float in seconds for the connection timeout. Defaults to `300` seconds. |\n| read_timeout | A single float in seconds for the read timeout. Defaults to `300` seconds. |\n| connection_verify | SSL certificate verification. Enabled by default. Set to False to disable, alternatively can be set to the path to a CA_BUNDLE file or directory with certificates of trusted CAs. |\n| connection_cert | Client-side certificates. You can specify a local cert to use as client side certificate, as a single file (containing the private key and the certificate) or as a tuple of both files' paths. |\n| proxies | Dictionary mapping protocol or protocol and hostname to the URL of the proxy. |\n| cookies | Dict or CookieJar object to send with the `Request`. |\n| connection_data_block_size | The block size of data sent over the connection. Defaults to `4096` bytes. |\n\n### Async transport\n\nThe async transport is designed to be opt-in. [AioHttp](https://pypi.org/project/aiohttp/) is one of the supported implementations of async transport. It is not installed by default. You need to install it separately.\n\n### Shared modules\n\n#### MatchConditions\n\nMatchConditions is an enum to describe match conditions.\n\n```python\nclass MatchConditions(Enum):\n Unconditionally = 1 # Matches any condition\n IfNotModified = 2 # If the target object is not modified. Usually it maps to etag=\n IfModified = 3 # Only if the target object is modified. Usually it maps to etag!=\n IfPresent = 4 # If the target object exists. Usually it maps to etag='*'\n IfMissing = 5 # If the target object does not exist. Usually it maps to etag!='*'\n```\n\n#### CaseInsensitiveEnumMeta\n\nA metaclass to support case-insensitive enums.\n\n```python\nfrom enum import Enum\n\nfrom azure.core import CaseInsensitiveEnumMeta\n\nclass MyCustomEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta):\n FOO = 'foo'\n BAR = 'bar'\n```\n\n#### Null Sentinel Value\n\nA falsy sentinel object which is supposed to be used to specify attributes\nwith no data. This gets serialized to `null` on the wire.\n\n```python\nfrom azure.core.serialization import NULL\n\nassert bool(NULL) is False\n\nfoo = Foo(\n attr=NULL\n)\n```\n\n## Contributing\n\nThis project welcomes contributions and suggestions. Most contributions require\nyou to agree to a Contributor License Agreement (CLA) declaring that you have\nthe right to, and actually do, grant us the rights to use your contribution.\nFor details, visit [https://cla.microsoft.com](https://cla.microsoft.com).\n\nWhen you submit a pull request, a CLA-bot will automatically determine whether\nyou need to provide a CLA and decorate the PR appropriately (e.g., label,\ncomment). Simply follow the instructions provided by the bot. You will only\nneed to do this once across all repos using our CLA.\n\nThis project has adopted the\n[Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).\nFor more information, see the\n[Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)\nor contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any\nadditional questions or comments.\n\n\n[package]: https://pypi.org/project/azure-core/\n\n\n# Release History\n\n## 1.35.0 (2025-07-02)\n\n### Features Added\n\n- Added a `start_time` keyword argument to the `start_span` and `start_as_current_span` methods in the `OpenTelemetryTracer` class. This allows users to specify a custom start time for created spans. #41106\n- Added a `context` keyword argument to the `start_span` and `start_as_current_span` methods in the `OpenTelemetryTracer` class. This allows users to specify a custom parent context for created spans. #41511\n- Added method `as_attribute_dict` to `azure.core.serialization` for backcompat migration purposes. Will return a generated model as a dictionary where the keys are in attribute syntax.\n- Added `is_generated_model` method to `azure.core.serialization`. Returns whether a given input is a model from one of our generated sdks. #41445\n- Added `attribute_list` method to `azure.core.serialization`. Returns all of the attributes of a given model from one of our generated sdks. #41571\n\n### Other Changes\n\n- A timeout error when using the `aiohttp` transport (the default for async SDKs) will now be raised as a `azure.core.exceptions.ServiceResponseTimeoutError`, a subtype of the previously raised `ServiceResponseError`.\n- When using with `aiohttp` 3.10 or later, a connection timeout error will now be raised as a `azure.core.exceptions.ServiceRequestTimeoutError`, which can be retried.\n- The default implementation of `on_challenge` in `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` will now cache the retrieved token. #41857\n\n## 1.34.0 (2025-05-01)\n\n### Features Added\n\n- Added a `set_span_error_status` method to the `OpenTelemetryTracer` class. This method allows users to set the status of a span to `ERROR` after it has been created. #40703\n\n### Other Changes\n\n- Python 3.8 is no longer supported. Please use Python version 3.9 or later.\n\n## 1.33.0 (2025-04-03)\n\n### Features Added\n\n- Added native OpenTelemetry tracing to Azure Core which enables users to use OpenTelemetry to trace Azure SDK operations without needing to install a plugin. #39563\n - To enable native OpenTelemetry tracing, users need to:\n 1. Have `opentelemetry-api` installed.\n 2. Ensure that `settings.tracing_implementation` is not set.\n 3. Ensure that `settings.tracing_enabled` is set to `True`.\n - If `setting.tracing_implementation` is set, the tracing plugin will be used instead of the native tracing.\n - If `settings.tracing_enabled` is set to `False`, tracing will be disabled.\n - The `OpenTelemetryTracer` class was added to the `azure.core.tracing.opentelemetry` module. This is a wrapper around the OpenTelemetry tracer that is used to create spans for Azure SDK operations.\n - Added a `get_tracer` method to the new `azure.core.instrumentation` module. This method returns an instance of the `OpenTelemetryTracer` class if OpenTelemetry is available.\n - A `TracingOptions` TypedDict class was added to define the options that SDK users can use to configure tracing per-operation. These options include the ability to enable or disable tracing and set additional attributes on spans.\n - Example usage: `client.method(tracing_options={\"enabled\": True, \"attributes\": {\"foo\": \"bar\"}})`\n - The `DistributedTracingPolicy` and `distributed_trace`/`distributed_trace_async` decorators now uses the OpenTelemetry tracer if it is available and native tracing is enabled.\n - SDK clients can define an `_instrumentation_config` class variable to configure the OpenTelemetry tracer used in method span creation. Possible configuration options are `library_name`, `library_version`, `schema_url`, and `attributes`.\n - `DistributedTracingPolicy` now accepts a `instrumentation_config` keyword argument to configure the OpenTelemetry tracer used in HTTP span creation.\n\n### Breaking Changes\n\n- Removed automatic tracing enablement for the OpenTelemetry plugin if `opentelemetry` was imported. To enable tracing with the plugin, please import `azure.core.settings.settings` and set `settings.tracing_implementation` to `\"opentelemetry\"`. #39563\n- In `DistributedTracingPolicy`, the default span name is now just the HTTP method (e.g., \"GET\", \"POST\") and no longer includes the URL path. This change was made to converge with the OpenTelemetry HTTP semantic conventions. The full URL is still included in the span attributes.\n- Renamed span attributes in `DistributedTracingPolicy`:\n - \"x-ms-client-request-id\" is now \"az.client_request_id\"\n - \"x-ms-request-id\" is now \"az.service_request_id\"\n\n### Bugs Fixed\n\n- Fixed an issue where the `traceparent` header was not being set correctly in the `DistributedTracingPolicy`. The `traceparent` header will now set based on the context of the HTTP client span. #40074\n\n### Other Changes\n\n- Added `opentelemetry-api` as an optional dependency for tracing. This can be installed with `pip install azure-core[tracing]`. #39563\n\n## 1.32.0 (2024-10-31)\n\n### Features Added\n\n- Added a default implementation to handle token challenges in `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy`.\n\n### Bugs Fixed\n\n- Fixed an issue where the `tracing_attributes` keyword argument wasn't being handled at the request/method level. #38164\n\n### Other Changes\n\n- Log \"x-vss-e2eid\" and \"x-msedge-ref\" headers in `HttpLoggingPolicy`.\n\n## 1.31.0 (2024-09-12)\n\n### Features Added\n\n- Added azure.core.AzureClouds enum to represent the different Azure clouds.\n- Added two new credential protocol classes, `SupportsTokenInfo` and `AsyncSupportsTokenInfo`, to offer more extensibility in supporting various token acquisition scenarios. #36565\n - Each new protocol class defines a `get_token_info` method that returns an `AccessTokenInfo` object.\n- Added a new `TokenRequestOptions` class, which is a `TypedDict` with optional parameters, that can be used to define options for token requests through the `get_token_info` method. #36565\n- Added a new `AccessTokenInfo` class, which is returned by `get_token_info` implementations. This class contains the token, its expiration time, and optional additional information like when a token should be refreshed. #36565\n- `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` now first check if a credential has the `get_token_info` method defined. If so, the `get_token_info` method is used to acquire a token. Otherwise, the `get_token` method is used. #36565\n - These policies now also check the `refresh_on` attribute when determining if a new token request should be made.\n\n### Other Changes\n\n- The Azure Core OpenTelemetry tracing plugin will now be the preferred tracing plugin over the OpenCensus plugin. If both plugins are installed and `opentelemetry` is imported, then OpenTelemetry will be used to trace Azure SDK operations. #35050\n\n## 1.30.2 (2024-06-06)\n\n### Features Added\n\n- Tracing: `DistributedTracingPolicy` will now set an attribute, `http.request.resend_count`, on HTTP spans for resent requests to indicate the resend attempt number. #35069\n\n### Bugs Fixed\n\n- Raise correct exception if transport is used while already closed #35559\n\n### Other Changes\n\n- HTTP tracing spans will now include an `error.type` attribute if an error status code is returned. #34619\n- Minimum required Python version is now 3.8\n\n## 1.30.1 (2024-02-29)\n\n### Other Changes\n\n- Accept float for `retry_after` header. #34203\n\n## 1.30.0 (2024-02-01)\n\n### Features Added\n\n- Support tuple input for file values to `azure.core.rest.HttpRequest` #33948\n- Support tuple input to `files` with duplicate field names `azure.core.rest.HttpRequest` #34021\n\n## 1.29.7 (2024-01-18)\n\n### Other Changes\n\n- Removed dependency on `anyio`. #33282\n\n## 1.29.6 (2023-12-14)\n\n### Bugs Fixed\n\n- Adjusted `AsyncBearerTokenCredentialPolicy` to work properly with `trio` concurrency mechanisms. ([#33307](https://github.com/Azure/azure-sdk-for-python/pull/33307))\n\n### Other Changes\n\n- Added dependency on `anyio` >=3.0,<5.0\n- Bumped minimum dependency on `requests` to 2.21.0.\n\n## 1.29.5 (2023-10-19)\n\n### Bugs Fixed\n\n- Fixed an issue with `multipart/form-data` in the async transport where `data` was not getting encoded into the request body. #32473\n\n### Other Changes\n\n- Use ssl context from aiohttp by default.\n\n## 1.29.4 (2023-09-07)\n\n### Bugs Fixed\n\n- Fixed the issue that some urls trigger an infinite loop. #31346\n- Fixed issue where IndexError was raised if multipart responses did not match the number of requests. #31471\n- Fixed issue unbound variable exception if dict is invalid in CloudEvent.from_dict. #31835\n- Fixed issue asyncBearerTokenCredentialPolicy is not backward compatible with SansIOHTTPPolicy. #31836\n- Fixed issue mypy complains with new version of azure-core. #31564\n\n## 1.29.3 (2023-08-22)\n\n### Bugs Fixed\n\n- Typing fix: `message` cannot be `None` in `AzureError`. #31564\n\n## 1.29.2 (2023-08-14)\n\n### Bugs Fixed\n\n- Added a default implementation for `AsyncTokenCredential.__aexit__()` #31573\n\n### Other Changes\n\n- Bumped `typing-extensions` version to 4.6.0.\n\n## 1.29.1 (2023-08-09)\n\n### Bugs Fixed\n\n- Not pass `enabled_cae` unless it is explicitly enabled.\n\n## 1.29.0 (2023-08-03)\n\n### Features Added\n\n- A keyword argument `enable_cae` was added to the `get_token` method of the `TokenCredential` protocol. #31012\n- `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` now accept `enable_cae` keyword arguments in their constructors. This is used in determining if [Continuous Access Evaluation (CAE)](https://learn.microsoft.com/azure/active-directory/conditional-access/concept-continuous-access-evaluation) should be enabled for each `get_token` request. #31012\n\n## 1.28.0 (2023-07-06)\n\n### Features Added\n\n- Added header name parameter to `RequestIdPolicy`. #30772\n- Added `SensitiveHeaderCleanupPolicy` that cleans up sensitive headers if a redirect happens and the new destination is in another domain. #28349\n\n### Other Changes\n\n- Catch aiohttp errors and translate them into azure-core errors.\n\n## 1.27.1 (2023-06-13)\n\n### Bugs Fixed\n\n- Fix url building for some complex query parameters scenarios #30707\n\n## 1.27.0 (2023-06-01)\n\n### Features Added\n\n- Added support to use sync credentials in `AsyncBearerTokenCredentialPolicy`. #30381\n- Added \"prefix\" parameter to AzureKeyCredentialPolicy #29901\n\n### Bugs Fixed\n\n- Improve error message when providing the wrong credential type for AzureKeyCredential #30380\n\n## 1.26.4 (2023-04-06)\n\n### Features Added\n\n- Updated settings to include OpenTelemetry as a tracer provider. #29095\n\n### Other Changes\n\n- Improved typing\n\n## 1.26.3 (2023-02-02)\n\n### Bugs Fixed\n\n- Fixed deflate decompression for aiohttp #28483\n\n## 1.26.2 (2023-01-05)\n\n### Bugs Fixed\n\n- Fix 'ClientSession' object has no attribute 'auto_decompress' (thanks to @mghextreme for the contribution)\n\n### Other Changes\n\n- Add \"x-ms-error-code\" as secure header to log\n- Rename \"DEFAULT_HEADERS_WHITELIST\" to \"DEFAULT_HEADERS_ALLOWLIST\". Added a backward compatible alias.\n\n## 1.26.1 (2022-11-03)\n\n### Other Changes\n\n- Added example of RequestsTransport with custom session. (thanks to @inirudebwoy for the contribution) #26768\n- Added Python 3.11 support.\n\n## 1.26.0 (2022-10-06)\n\n### Other Changes\n\n- LRO polling will not wait anymore before doing the first status check #26376\n- Added extra dependency for [aio]. pip install azure-core[aio] installs aiohttp too.\n\n## 1.25.1 (2022-09-01)\n\n### Bugs Fixed\n\n- Added @runtime_checkable to `TokenCredential` protocol definitions #25187\n\n## 1.25.0 (2022-08-04)\n\nAzure-core is supported on Python 3.7 or later. For more details, please read our page on [Azure SDK for Python version support policy](https://github.com/Azure/azure-sdk-for-python/wiki/Azure-SDKs-Python-version-support-policy).\n\n### Features Added\n\n- Added `CaseInsensitiveDict` implementation in `azure.core.utils` removing dependency on `requests` and `aiohttp`\n\n## 1.24.2 (2022-06-30)\n\n### Bugs Fixed\n\n- Fixed the bug that azure-core could not be imported under Python 3.11.0b3 #24928\n- `ContentDecodePolicy` can now correctly deserialize more JSON bodies with different mime types #22410\n\n## 1.24.1 (2022-06-01)\n\n### Bugs Fixed\n\n- Declare method level span as INTERNAL by default #24492\n- Fixed type hints for `azure.core.paging.ItemPaged` #24548\n\n## 1.24.0 (2022-05-06)\n\n### Features Added\n\n- Add `SerializationError` and `DeserializationError` in `azure.core.exceptions` for errors raised during serialization / deserialization #24312\n\n## 1.23.1 (2022-03-31)\n\n### Bugs Fixed\n\n- Allow stream inputs to the `content` kwarg of `azure.core.rest.HttpRequest` from objects with a `read` method #23578\n\n## 1.23.0 (2022-03-03)\n\n### Features Added\n\n- Improve intellisense type hinting for service client methods. #22891\n\n- Add a case insensitive dict `case_insensitive_dict` in `azure.core.utils`. #23206\n\n### Bugs Fixed\n\n- Use \"\\n\" rather than \"/n\" for new line in log. #23261\n\n### Other Changes\n\n- Log \"WWW-Authenticate\" header in `HttpLoggingPolicy` #22990\n- Added dependency on `typing-extensions` >= 4.0.1\n\n## 1.22.1 (2022-02-09)\n\n### Bugs Fixed\n\n- Limiting `final-state-via` scope to POST until consuming SDKs has been fixed to use this option properly on PUT. #22989\n\n## 1.22.0 (2022-02-03)\n_[**This version is deprecated.**]_\n\n### Features Added\n\n- Add support for `final-state-via` LRO option in core. #22713\n\n### Bugs Fixed\n\n- Add response body to string representation of `HttpResponseError` if we're not able to parse out information #22302\n- Raise `AttributeError` when calling azure.core.pipeline.transport.\\_\\_bases__ #22469\n\n### Other Changes\n\n- Python 2.7 is no longer supported. Please use Python version 3.6 or later.\n\n## 1.21.1 (2021-12-06)\n\n### Other Changes\n\n- Revert change in str method #22023\n\n## 1.21.0 (2021-12-02)\n\n### Breaking Changes\n\n- Sync stream downloading now raises `azure.core.exceptions.DecodeError` rather than `requests.exceptions.ContentDecodingError`\n\n### Bugs Fixed\n\n- Add response body to string representation of `HttpResponseError` if we're not able to parse out information #21800\n\n## 1.20.1 (2021-11-08)\n\n### Bugs Fixed\n\n- Correctly set response's content to decompressed body when users are using aiohttp transport with decompression headers #21620\n\n## 1.20.0 (2021-11-04)\n\n### Features Added\n\n- GA `send_request` onto the `azure.core.PipelineClient` and `azure.core.AsyncPipelineClient`. This method takes in\nrequests and sends them through our pipelines.\n- GA `azure.core.rest`. `azure.core.rest` is our new public simple HTTP library in `azure.core` that users will use to create requests, and consume responses.\n- GA errors `StreamConsumedError`, `StreamClosedError`, and `ResponseNotReadError` to `azure.core.exceptions`. These errors\nare thrown if you mishandle streamed responses from the `azure.core.rest` module\n- add kwargs to the methods for `iter_raw` and `iter_bytes` #21529\n- no longer raise JSON errors if users pass in file descriptors of JSON to the `json` kwarg in `HttpRequest` #21504\n- Added new error type `IncompleteReadError` which is raised if peer closes the connection before we have received the complete message body.\n\n### Breaking Changes\n\n- SansIOHTTPPolicy.on_exception returns None instead of bool.\n\n### Bugs Fixed\n\n- The `Content-Length` header in a http response is strictly checked against the actual number of bytes in the body,\n rather than silently truncating data in case the underlying tcp connection is closed prematurely.\n (thanks to @jochen-ott-by for the contribution) #20412\n- UnboundLocalError when SansIOHTTPPolicy handles an exception #15222\n- Add default content type header of `text/plain` and content length header for users who pass unicode strings to the `content` kwarg of `HttpRequest` in 2.7 #21550\n\n## 1.19.1 (2021-11-01)\n\n### Bugs Fixed\n\n- respect text encoding specified in argument (thanks to @ryohji for the contribution) #20796\n- Fix \"coroutine x.read() was never awaited\" warning from `ContentDecodePolicy` #21318\n- fix type check for `data` input to `azure.core.rest` for python 2.7 users #21341\n- use `charset_normalizer` if `chardet` is not installed to migrate aiohttp 3.8.0 changes.\n\n### Other Changes\n\n- Refactor AzureJSONEncoder (thanks to @Codejune for the contribution) #21028\n\n## 1.19.0 (2021-09-30)\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse` are now abstract base classes. They should not be initialized directly, instead\nyour transport responses should inherit from them and implement them.\n- The properties of the `azure.core.rest` responses are now all read-only\n\n- HttpLoggingPolicy integrates logs into one record #19925\n\n## 1.18.0 (2021-09-02)\n\n### Features Added\n\n- `azure.core.serialization.AzureJSONEncoder` (introduced in 1.17.0) serializes `datetime.datetime` objects in ISO 8601 format, conforming to RFC 3339's specification. #20190\n- We now use `azure.core.serialization.AzureJSONEncoder` to serialize `json` input to `azure.core.rest.HttpRequest`.\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- The `text` property on `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse` has changed to a method, which also takes\nan `encoding` parameter.\n- Removed `iter_text` and `iter_lines` from `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse`\n\n### Bugs Fixed\n\n- The behaviour of the headers returned in `azure.core.rest` responses now aligns across sync and async. Items can now be checked case-insensitively and without raising an error for format.\n\n## 1.17.0 (2021-08-05)\n\n### Features Added\n\n- Cut hard dependency on requests library\n- Added a `from_json` method which now accepts storage QueueMessage, eventhub's EventData or ServiceBusMessage or simply json bytes to return a `CloudEvent`\n\n### Fixed\n\n- Not override \"x-ms-client-request-id\" if it already exists in the header. #17757\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- `azure.core.rest` will not try to guess the `charset` anymore if it was impossible to extract it from `HttpResponse` analysis. This removes our dependency on `charset`.\n\n## 1.16.0 (2021-07-01)\n\n### Features Added\n\n- Add new ***provisional*** methods `send_request` onto the `azure.core.PipelineClient` and `azure.core.AsyncPipelineClient`. This method takes in\nrequests and sends them through our pipelines.\n- Add new ***provisional*** module `azure.core.rest`. `azure.core.rest` is our new public simple HTTP library in `azure.core` that users will use to create requests, and consume responses.\n- Add new ***provisional*** errors `StreamConsumedError`, `StreamClosedError`, and `ResponseNotReadError` to `azure.core.exceptions`. These errors\nare thrown if you mishandle streamed responses from the provisional `azure.core.rest` module\n\n### Fixed\n\n- Improved error message in the `from_dict` method of `CloudEvent` when a wrong schema is sent.\n\n## 1.15.0 (2021-06-04)\n\n### New Features\n\n- Added `BearerTokenCredentialPolicy.on_challenge` and `.authorize_request` to allow subclasses to optionally handle authentication challenges\n\n### Bug Fixes\n\n- Retry policies don't sleep after operations time out\n- The `from_dict` methhod in the `CloudEvent` can now convert a datetime string to datetime object when microsecond exceeds the python limitation\n\n## 1.14.0 (2021-05-13)\n\n### New Features\n\n- Added `azure.core.credentials.AzureNamedKeyCredential` credential #17548.\n- Added `decompress` parameter for `stream_download` method. If it is set to `False`, will not do decompression upon the stream. #17920\n\n## 1.13.0 (2021-04-02)\n\nAzure core requires Python 2.7 or Python 3.6+ since this release.\n\n### New Features\n\n- Added `azure.core.utils.parse_connection_string` function to parse connection strings across SDKs, with common validation and support for case insensitive keys.\n- Supported adding custom policies #16519\n- Added `~azure.core.tracing.Link` that should be used while passing `Links` to `AbstractSpan`.\n- `AbstractSpan` constructor can now take in additional keyword only args.\n\n### Bug fixes\n\n- Make NetworkTraceLoggingPolicy show the auth token in plain text. #14191\n- Fixed RetryPolicy overriding default connection timeout with an extreme value #17481\n\n## 1.12.0 (2021-03-08)\n\nThis version will be the last version to officially support Python 3.5, future versions will require Python 2.7 or Python 3.6+.\n\n### Features\n\n- Added `azure.core.messaging.CloudEvent` model that follows the cloud event spec.\n- Added `azure.core.serialization.NULL` sentinel value\n- Improve `repr`s for `HttpRequest` and `HttpResponse`s #16972\n\n### Bug Fixes\n\n- Disable retry in stream downloading. (thanks to @jochen-ott-by @hoffmann for the contribution) #16723\n\n## 1.11.0 (2021-02-08)\n\n### Features\n\n- Added `CaseInsensitiveEnumMeta` class for case-insensitive enums. #16316\n- Add `raise_for_status` method onto `HttpResponse`. Calling `response.raise_for_status()` on a response with an error code\nwill raise an `HttpResponseError`. Calling it on a good response will do nothing #16399\n\n### Bug Fixes\n\n- Update conn.conn_kw rather than overriding it when setting block size. (thanks for @jiasli for the contribution) #16587\n\n## 1.10.0 (2021-01-11)\n\n### Features\n\n- Added `AzureSasCredential` and its respective policy. #15946\n\n## 1.9.0 (2020-11-09)\n\n### Features\n\n- Add a `continuation_token` attribute to the base `AzureError` exception, and set this value for errors raised\n during paged or long-running operations.\n\n### Bug Fixes\n\n- Set retry_interval to 1 second instead of 1000 seconds (thanks **vbarbaresi** for contributing) #14357\n\n\n## 1.8.2 (2020-10-05)\n\n### Bug Fixes\n\n- Fixed bug to allow polling in the case of parameterized endpoints with relative polling urls #14097\n\n\n## 1.8.1 (2020-09-08)\n\n### Bug fixes\n\n- SAS credential replicated \"/\" fix #13159\n\n## 1.8.0 (2020-08-10)\n\n### Features\n\n- Support params as list for exploding parameters #12410\n\n\n## 1.7.0 (2020-07-06)\n\n### Bug fixes\n\n- `AzureKeyCredentialPolicy` will now accept (and ignore) passed in kwargs #11963\n- Better error messages if passed endpoint is incorrect #12106\n- Do not JSON encore a string if content type is \"text\" #12137\n\n### Features\n\n- Added `http_logging_policy` property on the `Configuration` object, allowing users to individually\nset the http logging policy of the config #12218\n\n## 1.6.0 (2020-06-03)\n\n### Bug fixes\n\n- Fixed deadlocks in AsyncBearerTokenCredentialPolicy #11543\n- Fix AttributeException in StreamDownloadGenerator #11462\n\n### Features\n\n- Added support for changesets as part of multipart message support #10485\n- Add AsyncLROPoller in azure.core.polling #10801\n- Add get_continuation_token/from_continuation_token/polling_method methods in pollers (sync and async) #10801\n- HttpResponse and PipelineContext objects are now pickable #10801\n\n## 1.5.0 (2020-05-04)\n\n### Features\n\n- Support \"x-ms-retry-after-ms\" in response header #10743\n- `link` and `link_from_headers` now accepts attributes #10765\n\n### Bug fixes\n\n- Not retry if the status code is less than 400 #10778\n- \"x-ms-request-id\" is not considered safe header for logging #10967\n\n## 1.4.0 (2020-04-06)\n\n### Features\n\n- Support a default error type in map_error #9773\n- Added `AzureKeyCredential` and its respective policy. #10509\n- Added `azure.core.polling.base_polling` module with a \"Microsoft One API\" polling implementation #10090\n Also contains the async version in `azure.core.polling.async_base_polling`\n- Support kwarg `enforce_https` to disable HTTPS check on authentication #9821\n- Support additional kwargs in `HttpRequest.set_multipart_mixed` that will be passed into pipeline context.\n\n## 1.3.0 (2020-03-09)\n\n### Bug fixes\n\n- Appended RequestIdPolicy to the default pipeline #9841\n- Rewind the body position in async_retry #10117\n\n### Features\n\n- Add raw_request_hook support in custom_hook_policy #9958\n- Add timeout support in retry_policy #10011\n- Add OdataV4 error format auto-parsing in all exceptions ('error' attribute) #9738\n\n## 1.2.2 (2020-02-10)\n\n### Bug fixes\n\n- Fixed a bug that sends None as request_id #9545\n- Enable mypy for customers #9572\n- Handle TypeError in deep copy #9620\n- Fix text/plain content-type in decoder #9589\n\n## 1.2.1 (2020-01-14)\n\n### Bug fixes\n\n- Fixed a regression in 1.2.0 that was incompatible with azure-keyvault-* 4.0.0\n[#9462](https://github.com/Azure/azure-sdk-for-python/issues/9462)\n\n\n## 1.2.0 (2020-01-14)\n\n### Features\n\n- Add user_agent & sdk_moniker kwargs in UserAgentPolicy init #9355\n- Support OPTIONS HTTP verb #9322\n- Add tracing_attributes to tracing decorator #9297\n- Support auto_request_id in RequestIdPolicy #9163\n- Support fixed retry #6419\n- Support \"retry-after-ms\" in response header #9240\n\n### Bug fixes\n\n- Removed `__enter__` and `__exit__` from async context managers #9313\n\n## 1.1.1 (2019-12-03)\n\n### Bug fixes\n\n- Bearer token authorization requires HTTPS\n- Rewind the body position in retry #8307\n\n## 1.1.0 (2019-11-25)\n\n### Features\n\n- New RequestIdPolicy #8437\n- Enable logging policy in default pipeline #8053\n- Normalize transport timeout. #8000\n Now we have:\n * 'connection_timeout' - a single float in seconds for the connection timeout. Default 5min\n * 'read_timeout' - a single float in seconds for the read timeout. Default 5min\n\n### Bug fixes\n\n- RequestHistory: deepcopy fails if request contains a stream #7732\n- Retry: retry raises error if response does not have http_response #8629\n- Client kwargs are now passed to DistributedTracingPolicy correctly #8051\n- NetworkLoggingPolicy now logs correctly all requests in case of retry #8262\n\n## 1.0.0 (2019-10-29)\n\n### Features\n\n- Tracing: DistributedTracingPolicy now accepts kwargs network_span_namer to change network span name #7773\n- Tracing: Implementation of AbstractSpan can now use the mixin HttpSpanMixin to get HTTP span update automatically #7773\n- Tracing: AbstractSpan contract \"change_context\" introduced #7773\n- Introduce new policy HttpLoggingPolicy #7988\n\n### Bug fixes\n\n- Fix AsyncioRequestsTransport if input stream is an async generator #7743\n- Fix form-data with aiohttp transport #7749\n\n### Breaking changes\n\n- Tracing: AbstractSpan.set_current_span is longer supported. Use change_context instead. #7773\n- azure.core.pipeline.policies.ContentDecodePolicy.deserialize_from_text changed\n\n## 1.0.0b4 (2019-10-07)\n\n### Features\n\n- Tracing: network span context is available with the TRACING_CONTEXT in pipeline response #7252\n- Tracing: Span contract now has `kind`, `traceparent` and is a context manager #7252\n- SansIOHTTPPolicy methods can now be coroutines #7497\n- Add multipart/mixed support #7083:\n\n - HttpRequest now has a \"set_multipart_mixed\" method to set the parts of this request\n - HttpRequest now has a \"prepare_multipart_body\" method to build final body.\n - HttpResponse now has a \"parts\" method to return an iterator of parts\n - AsyncHttpResponse now has a \"parts\" methods to return an async iterator of parts\n - Note that multipart/mixed is a Python 3.x only feature\n\n### Bug fixes\n\n- Tracing: policy cannot fail the pipeline, even in the worst condition #7252\n- Tracing: policy pass correctly status message if exception #7252\n- Tracing: incorrect span if exception raised from decorated function #7133\n- Fixed urllib3 ConnectTimeoutError being raised by Requests during a socket timeout. Now this exception is caught and wrapped as a `ServiceRequestError` #7542\n\n### Breaking changes\n\n- Tracing: `azure.core.tracing.context` removed\n- Tracing: `azure.core.tracing.context.tracing_context.with_current_context` renamed to `azure.core.tracing.common.with_current_context` #7252\n- Tracing: `link` renamed `link_from_headers` and `link` takes now a string\n- Tracing: opencensus implementation has been moved to the package `azure-core-tracing-opencensus`\n- Some modules and classes that were importables from several different places have been removed:\n\n - `azure.core.HttpResponseError` is now only `azure.core.exceptions.HttpResponseError`\n - `azure.core.Configuration` is now only `azure.core.configuration.Configuration`\n - `azure.core.HttpRequest` is now only `azure.core.pipeline.transport.HttpRequest`\n - `azure.core.version` module has been removed. Use `azure.core.__version__` to get version number.\n - `azure.core.pipeline_client` has been removed. Import from `azure.core` instead.\n - `azure.core.pipeline_client_async` has been removed. Import from `azure.core` instead.\n - `azure.core.pipeline.base` has been removed. Import from `azure.core.pipeline` instead.\n - `azure.core.pipeline.base_async` has been removed. Import from `azure.core.pipeline` instead.\n - `azure.core.pipeline.policies.base` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.base_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.authentication` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.authentication_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.custom_hook` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.redirect` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.redirect_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.retry` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.retry_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.distributed_tracing` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.universal` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.tracing.abstract_span` has been removed. Import from `azure.core.tracing` instead.\n - `azure.core.pipeline.transport.base` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.base_async` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_basic` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_asyncio` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_trio` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.aiohttp` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.polling.poller` has been removed. Import from `azure.core.polling` instead.\n - `azure.core.polling.async_poller` has been removed. Import from `azure.core.polling` instead.\n\n## 1.0.0b3 (2019-09-09)\n\n### Bug fixes\n\n- Fix aiohttp auto-headers #6992\n- Add tracing to policies module init #6951\n\n## 1.0.0b2 (2019-08-05)\n\n### Breaking changes\n\n- Transport classes don't take `config` parameter anymore (use kwargs instead) #6372\n- `azure.core.paging` has been completely refactored #6420\n- HttpResponse.content_type attribute is now a string (was a list) #6490\n- For `StreamDownloadGenerator` subclasses, `response` is now an `HttpResponse`, and not a transport response like `aiohttp.ClientResponse` or `requests.Response`. The transport response is available in `internal_response` attribute #6490\n\n### Bug fixes\n\n- aiohttp is not required to import async pipelines classes #6496\n- `AsyncioRequestsTransport.sleep` is now a coroutine as expected #6490\n- `RequestsTransport` is not tight to `ProxyPolicy` implementation details anymore #6372\n- `AiohttpTransport` does not raise on unexpected kwargs #6355\n\n### Features\n\n- New paging base classes that support `continuation_token` and `by_page()` #6420\n- Proxy support for `AiohttpTransport` #6372\n\n## 1.0.0b1 (2019-06-26)\n\n- Preview 1 release", - "release_date": "2025-07-03T00:55:25", + "description": "Microsoft Azure Core Library for Python\n# Azure Core shared client library for Python\n\nAzure core provides shared exceptions and modules for Python SDK client libraries.\nThese libraries follow the [Azure SDK Design Guidelines for Python](https://azure.github.io/azure-sdk/python/guidelines/index.html) .\n\nIf you are a client library developer, please reference [client library developer reference](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/CLIENT_LIBRARY_DEVELOPER.md) for more information.\n\n[Source code](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/)\n| [Package (Pypi)][package]\n| [Package (Conda)](https://anaconda.org/microsoft/azure-core/)\n| [API reference documentation](https://learn.microsoft.com/python/api/overview/azure/core-readme)\n\n## Getting started\n\nTypically, you will not need to install azure core;\nit will be installed when you install one of the client libraries using it.\nIn case you want to install it explicitly (to implement your own client library, for example),\nyou can find it [here](https://pypi.org/project/azure-core/).\n\n## Key concepts\n\n### Azure Core Library Exceptions\n\n#### AzureError\n\nAzureError is the base exception for all errors.\n\n```python\nclass AzureError(Exception):\n def __init__(self, message, *args, **kwargs):\n self.inner_exception = kwargs.get(\"error\")\n self.exc_type, self.exc_value, self.exc_traceback = sys.exc_info()\n self.exc_type = self.exc_type.__name__ if self.exc_type else type(self.inner_exception)\n self.exc_msg = \"{}, {}: {}\".format(message, self.exc_type, self.exc_value) # type: ignore\n self.message = str(message)\n self.continuation_token = kwargs.get(\"continuation_token\")\n super(AzureError, self).__init__(self.message, *args)\n```\n\n*message* is any message (str) to be associated with the exception.\n\n*args* are any additional args to be included with exception.\n\n*kwargs* are keyword arguments to include with the exception. Use the keyword *error* to pass in an internal exception and *continuation_token* for a token reference to continue an incomplete operation.\n\n**The following exceptions inherit from AzureError:**\n\n#### ServiceRequestError\n\nAn error occurred while attempt to make a request to the service. No request was sent.\n\n#### ServiceResponseError\n\nThe request was sent, but the client failed to understand the response.\nThe connection may have timed out. These errors can be retried for idempotent or safe operations.\n\n#### HttpResponseError\n\nA request was made, and a non-success status code was received from the service.\n\n```python\nclass HttpResponseError(AzureError):\n def __init__(self, message=None, response=None, **kwargs):\n self.reason = None\n self.response = response\n if response:\n self.reason = response.reason\n self.status_code = response.status_code\n self.error = self._parse_odata_body(ODataV4Format, response) # type: Optional[ODataV4Format]\n if self.error:\n message = str(self.error)\n else:\n message = message or \"Operation returned an invalid status '{}'\".format(\n self.reason\n )\n\n super(HttpResponseError, self).__init__(message=message, **kwargs)\n```\n\n*message* is the HTTP response error message (optional)\n\n*response* is the HTTP response (optional).\n\n*kwargs* are keyword arguments to include with the exception.\n\n**The following exceptions inherit from HttpResponseError:**\n\n#### DecodeError\n\nAn error raised during response de-serialization.\n\n#### IncompleteReadError\n\nAn error raised if peer closes the connection before we have received the complete message body.\n\n#### ResourceExistsError\n\nAn error response with status code 4xx. This will not be raised directly by the Azure core pipeline.\n\n#### ResourceNotFoundError\n\nAn error response, typically triggered by a 412 response (for update) or 404 (for get/post).\n\n#### ResourceModifiedError\n\nAn error response with status code 4xx, typically 412 Conflict. This will not be raised directly by the Azure core pipeline.\n\n#### ResourceNotModifiedError\n\nAn error response with status code 304. This will not be raised directly by the Azure core pipeline.\n\n#### ClientAuthenticationError\n\nAn error response with status code 4xx. This will not be raised directly by the Azure core pipeline.\n\n#### TooManyRedirectsError\n\nAn error raised when the maximum number of redirect attempts is reached. The maximum amount of redirects can be configured in the RedirectPolicy.\n\n```python\nclass TooManyRedirectsError(HttpResponseError):\n def __init__(self, history, *args, **kwargs):\n self.history = history\n message = \"Reached maximum redirect attempts.\"\n super(TooManyRedirectsError, self).__init__(message, *args, **kwargs)\n```\n\n*history* is used to document the requests/responses that resulted in redirected requests.\n\n*args* are any additional args to be included with exception.\n\n*kwargs* are keyword arguments to include with the exception.\n\n#### StreamConsumedError\n\nAn error thrown if you try to access the stream of `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` once\nthe response stream has been consumed.\n\n#### StreamClosedError\n\nAn error thrown if you try to access the stream of the `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` once\nthe response stream has been closed.\n\n#### ResponseNotReadError\n\nAn error thrown if you try to access the `content` of `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` before\nreading in the response's bytes first.\n\n### Configurations\n\nWhen calling the methods, some properties can be configured by passing in as kwargs arguments.\n\n| Parameters | Description |\n| --- | --- |\n| headers | The HTTP Request headers. |\n| request_id | The request id to be added into header. |\n| user_agent | If specified, this will be added in front of the user agent string. |\n| logging_enable| Use to enable per operation. Defaults to `False`. |\n| logger | If specified, it will be used to log information. |\n| response_encoding | The encoding to use if known for this service (will disable auto-detection). |\n| raw_request_hook | Callback function. Will be invoked on request. |\n| raw_response_hook | Callback function. Will be invoked on response. |\n| network_span_namer | A callable to customize the span name. |\n| tracing_attributes | Attributes to set on all created spans. |\n| permit_redirects | Whether the client allows redirects. Defaults to `True`. |\n| redirect_max | The maximum allowed redirects. Defaults to `30`. |\n| retry_total | Total number of retries to allow. Takes precedence over other counts. Default value is `10`. |\n| retry_connect | How many connection-related errors to retry on. These are errors raised before the request is sent to the remote server, which we assume has not triggered the server to process the request. Default value is `3`. |\n| retry_read | How many times to retry on read errors. These errors are raised after the request was sent to the server, so the request may have side-effects. Default value is `3`. |\n| retry_status | How many times to retry on bad status codes. Default value is `3`. |\n| retry_backoff_factor | A backoff factor to apply between attempts after the second try (most errors are resolved immediately by a second try without a delay). Retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))` seconds. If the backoff_factor is 0.1, then the retry will sleep for [0.0s, 0.2s, 0.4s, ...] between retries. The default value is `0.8`. |\n| retry_backoff_max | The maximum back off time. Default value is `120` seconds (2 minutes). |\n| retry_mode | Fixed or exponential delay between attempts, default is `Exponential`. |\n| timeout | Timeout setting for the operation in seconds, default is `604800`s (7 days). |\n| connection_timeout | A single float in seconds for the connection timeout. Defaults to `300` seconds. |\n| read_timeout | A single float in seconds for the read timeout. Defaults to `300` seconds. |\n| connection_verify | SSL certificate verification. Enabled by default. Set to False to disable, alternatively can be set to the path to a CA_BUNDLE file or directory with certificates of trusted CAs. |\n| connection_cert | Client-side certificates. You can specify a local cert to use as client side certificate, as a single file (containing the private key and the certificate) or as a tuple of both files' paths. |\n| proxies | Dictionary mapping protocol or protocol and hostname to the URL of the proxy. |\n| cookies | Dict or CookieJar object to send with the `Request`. |\n| connection_data_block_size | The block size of data sent over the connection. Defaults to `4096` bytes. |\n\n### Async transport\n\nThe async transport is designed to be opt-in. [AioHttp](https://pypi.org/project/aiohttp/) is one of the supported implementations of async transport. It is not installed by default. You need to install it separately.\n\n### Shared modules\n\n#### MatchConditions\n\nMatchConditions is an enum to describe match conditions.\n\n```python\nclass MatchConditions(Enum):\n Unconditionally = 1 # Matches any condition\n IfNotModified = 2 # If the target object is not modified. Usually it maps to etag=\n IfModified = 3 # Only if the target object is modified. Usually it maps to etag!=\n IfPresent = 4 # If the target object exists. Usually it maps to etag='*'\n IfMissing = 5 # If the target object does not exist. Usually it maps to etag!='*'\n```\n\n#### CaseInsensitiveEnumMeta\n\nA metaclass to support case-insensitive enums.\n\n```python\nfrom enum import Enum\n\nfrom azure.core import CaseInsensitiveEnumMeta\n\nclass MyCustomEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta):\n FOO = 'foo'\n BAR = 'bar'\n```\n\n#### Null Sentinel Value\n\nA falsy sentinel object which is supposed to be used to specify attributes\nwith no data. This gets serialized to `null` on the wire.\n\n```python\nfrom azure.core.serialization import NULL\n\nassert bool(NULL) is False\n\nfoo = Foo(\n attr=NULL\n)\n```\n\n## Logging\n\nAzure libraries follow the guidance of Python's standard [logging](https://docs.python.org/3/library/logging.html) module. By following the Python documentation on logging, you should be able to configure logging for Azure libraries effectively.\n\nAzure library loggers use a dot-based separated syntax, where the first section is always `azure`, followed by the package name. For example, the Azure Core library uses logger names that start with `azure.core`.\n\nHere's an example of how to configure logging for Azure libraries:\n\n```python\nimport logging\nimport sys\n\n# Enable detailed console logs across Azure libraries\nazure_logger = logging.getLogger(\"azure\")\nazure_logger.setLevel(logging.DEBUG)\nazure_logger.addHandler(logging.StreamHandler(stream=sys.stdout))\n\n# Exclude detailed logs for network calls associated with getting Entra ID token.\nidentity_logger = logging.getLogger(\"azure.identity\")\nidentity_logger.setLevel(logging.ERROR)\n\n# Make sure regular (redacted) detailed azure.core logs are not shown, as we are about to\n# turn on non-redacted logs by passing 'logging_enable=True' to the client constructor \nlogger = logging.getLogger(\"azure.core.pipeline.policies.http_logging_policy\")\nlogger.setLevel(logging.ERROR)\n```\n\n## Contributing\n\nThis project welcomes contributions and suggestions. Most contributions require\nyou to agree to a Contributor License Agreement (CLA) declaring that you have\nthe right to, and actually do, grant us the rights to use your contribution.\nFor details, visit [https://cla.microsoft.com](https://cla.microsoft.com).\n\nWhen you submit a pull request, a CLA-bot will automatically determine whether\nyou need to provide a CLA and decorate the PR appropriately (e.g., label,\ncomment). Simply follow the instructions provided by the bot. You will only\nneed to do this once across all repos using our CLA.\n\nThis project has adopted the\n[Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).\nFor more information, see the\n[Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)\nor contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any\nadditional questions or comments.\n\n\n[package]: https://pypi.org/project/azure-core/\n\n\n# Release History\n\n## 1.35.1 (2025-09-11)\n\n### Bugs Fixed\n\n- Fixed an issue where the `retry_backoff_max` parameter in `RetryPolicy` and `AsyncRetryPolicy` constructors was being ignored, causing retry operations to use default maximum backoff values instead of the user-specified limits. #42444\n\n### Other Changes\n\n- `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` will now properly surface credential exceptions when handling claims challenges. Previously, exceptions from credential token requests were suppressed; now they are raised and chained with the original 401 `HttpResponseError` response for better debugging visibility. #42536\n\n## 1.35.0 (2025-07-02)\n\n### Features Added\n\n- Added a `start_time` keyword argument to the `start_span` and `start_as_current_span` methods in the `OpenTelemetryTracer` class. This allows users to specify a custom start time for created spans. #41106\n- Added a `context` keyword argument to the `start_span` and `start_as_current_span` methods in the `OpenTelemetryTracer` class. This allows users to specify a custom parent context for created spans. #41511\n- Added method `as_attribute_dict` to `azure.core.serialization` for backcompat migration purposes. Will return a generated model as a dictionary where the keys are in attribute syntax.\n- Added `is_generated_model` method to `azure.core.serialization`. Returns whether a given input is a model from one of our generated sdks. #41445\n- Added `attribute_list` method to `azure.core.serialization`. Returns all of the attributes of a given model from one of our generated sdks. #41571\n\n### Other Changes\n\n- A timeout error when using the `aiohttp` transport (the default for async SDKs) will now be raised as a `azure.core.exceptions.ServiceResponseTimeoutError`, a subtype of the previously raised `ServiceResponseError`.\n- When using with `aiohttp` 3.10 or later, a connection timeout error will now be raised as a `azure.core.exceptions.ServiceRequestTimeoutError`, which can be retried.\n- The default implementation of `on_challenge` in `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` will now cache the retrieved token. #41857\n\n## 1.34.0 (2025-05-01)\n\n### Features Added\n\n- Added a `set_span_error_status` method to the `OpenTelemetryTracer` class. This method allows users to set the status of a span to `ERROR` after it has been created. #40703\n\n### Other Changes\n\n- Python 3.8 is no longer supported. Please use Python version 3.9 or later.\n\n## 1.33.0 (2025-04-03)\n\n### Features Added\n\n- Added native OpenTelemetry tracing to Azure Core which enables users to use OpenTelemetry to trace Azure SDK operations without needing to install a plugin. #39563\n - To enable native OpenTelemetry tracing, users need to:\n 1. Have `opentelemetry-api` installed.\n 2. Ensure that `settings.tracing_implementation` is not set.\n 3. Ensure that `settings.tracing_enabled` is set to `True`.\n - If `setting.tracing_implementation` is set, the tracing plugin will be used instead of the native tracing.\n - If `settings.tracing_enabled` is set to `False`, tracing will be disabled.\n - The `OpenTelemetryTracer` class was added to the `azure.core.tracing.opentelemetry` module. This is a wrapper around the OpenTelemetry tracer that is used to create spans for Azure SDK operations.\n - Added a `get_tracer` method to the new `azure.core.instrumentation` module. This method returns an instance of the `OpenTelemetryTracer` class if OpenTelemetry is available.\n - A `TracingOptions` TypedDict class was added to define the options that SDK users can use to configure tracing per-operation. These options include the ability to enable or disable tracing and set additional attributes on spans.\n - Example usage: `client.method(tracing_options={\"enabled\": True, \"attributes\": {\"foo\": \"bar\"}})`\n - The `DistributedTracingPolicy` and `distributed_trace`/`distributed_trace_async` decorators now uses the OpenTelemetry tracer if it is available and native tracing is enabled.\n - SDK clients can define an `_instrumentation_config` class variable to configure the OpenTelemetry tracer used in method span creation. Possible configuration options are `library_name`, `library_version`, `schema_url`, and `attributes`.\n - `DistributedTracingPolicy` now accepts a `instrumentation_config` keyword argument to configure the OpenTelemetry tracer used in HTTP span creation.\n\n### Breaking Changes\n\n- Removed automatic tracing enablement for the OpenTelemetry plugin if `opentelemetry` was imported. To enable tracing with the plugin, please import `azure.core.settings.settings` and set `settings.tracing_implementation` to `\"opentelemetry\"`. #39563\n- In `DistributedTracingPolicy`, the default span name is now just the HTTP method (e.g., \"GET\", \"POST\") and no longer includes the URL path. This change was made to converge with the OpenTelemetry HTTP semantic conventions. The full URL is still included in the span attributes.\n- Renamed span attributes in `DistributedTracingPolicy`:\n - \"x-ms-client-request-id\" is now \"az.client_request_id\"\n - \"x-ms-request-id\" is now \"az.service_request_id\"\n\n### Bugs Fixed\n\n- Fixed an issue where the `traceparent` header was not being set correctly in the `DistributedTracingPolicy`. The `traceparent` header will now set based on the context of the HTTP client span. #40074\n\n### Other Changes\n\n- Added `opentelemetry-api` as an optional dependency for tracing. This can be installed with `pip install azure-core[tracing]`. #39563\n\n## 1.32.0 (2024-10-31)\n\n### Features Added\n\n- Added a default implementation to handle token challenges in `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy`.\n\n### Bugs Fixed\n\n- Fixed an issue where the `tracing_attributes` keyword argument wasn't being handled at the request/method level. #38164\n\n### Other Changes\n\n- Log \"x-vss-e2eid\" and \"x-msedge-ref\" headers in `HttpLoggingPolicy`.\n\n## 1.31.0 (2024-09-12)\n\n### Features Added\n\n- Added azure.core.AzureClouds enum to represent the different Azure clouds.\n- Added two new credential protocol classes, `SupportsTokenInfo` and `AsyncSupportsTokenInfo`, to offer more extensibility in supporting various token acquisition scenarios. #36565\n - Each new protocol class defines a `get_token_info` method that returns an `AccessTokenInfo` object.\n- Added a new `TokenRequestOptions` class, which is a `TypedDict` with optional parameters, that can be used to define options for token requests through the `get_token_info` method. #36565\n- Added a new `AccessTokenInfo` class, which is returned by `get_token_info` implementations. This class contains the token, its expiration time, and optional additional information like when a token should be refreshed. #36565\n- `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` now first check if a credential has the `get_token_info` method defined. If so, the `get_token_info` method is used to acquire a token. Otherwise, the `get_token` method is used. #36565\n - These policies now also check the `refresh_on` attribute when determining if a new token request should be made.\n\n### Other Changes\n\n- The Azure Core OpenTelemetry tracing plugin will now be the preferred tracing plugin over the OpenCensus plugin. If both plugins are installed and `opentelemetry` is imported, then OpenTelemetry will be used to trace Azure SDK operations. #35050\n\n## 1.30.2 (2024-06-06)\n\n### Features Added\n\n- Tracing: `DistributedTracingPolicy` will now set an attribute, `http.request.resend_count`, on HTTP spans for resent requests to indicate the resend attempt number. #35069\n\n### Bugs Fixed\n\n- Raise correct exception if transport is used while already closed #35559\n\n### Other Changes\n\n- HTTP tracing spans will now include an `error.type` attribute if an error status code is returned. #34619\n- Minimum required Python version is now 3.8\n\n## 1.30.1 (2024-02-29)\n\n### Other Changes\n\n- Accept float for `retry_after` header. #34203\n\n## 1.30.0 (2024-02-01)\n\n### Features Added\n\n- Support tuple input for file values to `azure.core.rest.HttpRequest` #33948\n- Support tuple input to `files` with duplicate field names `azure.core.rest.HttpRequest` #34021\n\n## 1.29.7 (2024-01-18)\n\n### Other Changes\n\n- Removed dependency on `anyio`. #33282\n\n## 1.29.6 (2023-12-14)\n\n### Bugs Fixed\n\n- Adjusted `AsyncBearerTokenCredentialPolicy` to work properly with `trio` concurrency mechanisms. ([#33307](https://github.com/Azure/azure-sdk-for-python/pull/33307))\n\n### Other Changes\n\n- Added dependency on `anyio` >=3.0,<5.0\n- Bumped minimum dependency on `requests` to 2.21.0.\n\n## 1.29.5 (2023-10-19)\n\n### Bugs Fixed\n\n- Fixed an issue with `multipart/form-data` in the async transport where `data` was not getting encoded into the request body. #32473\n\n### Other Changes\n\n- Use ssl context from aiohttp by default.\n\n## 1.29.4 (2023-09-07)\n\n### Bugs Fixed\n\n- Fixed the issue that some urls trigger an infinite loop. #31346\n- Fixed issue where IndexError was raised if multipart responses did not match the number of requests. #31471\n- Fixed issue unbound variable exception if dict is invalid in CloudEvent.from_dict. #31835\n- Fixed issue asyncBearerTokenCredentialPolicy is not backward compatible with SansIOHTTPPolicy. #31836\n- Fixed issue mypy complains with new version of azure-core. #31564\n\n## 1.29.3 (2023-08-22)\n\n### Bugs Fixed\n\n- Typing fix: `message` cannot be `None` in `AzureError`. #31564\n\n## 1.29.2 (2023-08-14)\n\n### Bugs Fixed\n\n- Added a default implementation for `AsyncTokenCredential.__aexit__()` #31573\n\n### Other Changes\n\n- Bumped `typing-extensions` version to 4.6.0.\n\n## 1.29.1 (2023-08-09)\n\n### Bugs Fixed\n\n- Not pass `enabled_cae` unless it is explicitly enabled.\n\n## 1.29.0 (2023-08-03)\n\n### Features Added\n\n- A keyword argument `enable_cae` was added to the `get_token` method of the `TokenCredential` protocol. #31012\n- `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` now accept `enable_cae` keyword arguments in their constructors. This is used in determining if [Continuous Access Evaluation (CAE)](https://learn.microsoft.com/azure/active-directory/conditional-access/concept-continuous-access-evaluation) should be enabled for each `get_token` request. #31012\n\n## 1.28.0 (2023-07-06)\n\n### Features Added\n\n- Added header name parameter to `RequestIdPolicy`. #30772\n- Added `SensitiveHeaderCleanupPolicy` that cleans up sensitive headers if a redirect happens and the new destination is in another domain. #28349\n\n### Other Changes\n\n- Catch aiohttp errors and translate them into azure-core errors.\n\n## 1.27.1 (2023-06-13)\n\n### Bugs Fixed\n\n- Fix url building for some complex query parameters scenarios #30707\n\n## 1.27.0 (2023-06-01)\n\n### Features Added\n\n- Added support to use sync credentials in `AsyncBearerTokenCredentialPolicy`. #30381\n- Added \"prefix\" parameter to AzureKeyCredentialPolicy #29901\n\n### Bugs Fixed\n\n- Improve error message when providing the wrong credential type for AzureKeyCredential #30380\n\n## 1.26.4 (2023-04-06)\n\n### Features Added\n\n- Updated settings to include OpenTelemetry as a tracer provider. #29095\n\n### Other Changes\n\n- Improved typing\n\n## 1.26.3 (2023-02-02)\n\n### Bugs Fixed\n\n- Fixed deflate decompression for aiohttp #28483\n\n## 1.26.2 (2023-01-05)\n\n### Bugs Fixed\n\n- Fix 'ClientSession' object has no attribute 'auto_decompress' (thanks to @mghextreme for the contribution)\n\n### Other Changes\n\n- Add \"x-ms-error-code\" as secure header to log\n- Rename \"DEFAULT_HEADERS_WHITELIST\" to \"DEFAULT_HEADERS_ALLOWLIST\". Added a backward compatible alias.\n\n## 1.26.1 (2022-11-03)\n\n### Other Changes\n\n- Added example of RequestsTransport with custom session. (thanks to @inirudebwoy for the contribution) #26768\n- Added Python 3.11 support.\n\n## 1.26.0 (2022-10-06)\n\n### Other Changes\n\n- LRO polling will not wait anymore before doing the first status check #26376\n- Added extra dependency for [aio]. pip install azure-core[aio] installs aiohttp too.\n\n## 1.25.1 (2022-09-01)\n\n### Bugs Fixed\n\n- Added @runtime_checkable to `TokenCredential` protocol definitions #25187\n\n## 1.25.0 (2022-08-04)\n\nAzure-core is supported on Python 3.7 or later. For more details, please read our page on [Azure SDK for Python version support policy](https://github.com/Azure/azure-sdk-for-python/wiki/Azure-SDKs-Python-version-support-policy).\n\n### Features Added\n\n- Added `CaseInsensitiveDict` implementation in `azure.core.utils` removing dependency on `requests` and `aiohttp`\n\n## 1.24.2 (2022-06-30)\n\n### Bugs Fixed\n\n- Fixed the bug that azure-core could not be imported under Python 3.11.0b3 #24928\n- `ContentDecodePolicy` can now correctly deserialize more JSON bodies with different mime types #22410\n\n## 1.24.1 (2022-06-01)\n\n### Bugs Fixed\n\n- Declare method level span as INTERNAL by default #24492\n- Fixed type hints for `azure.core.paging.ItemPaged` #24548\n\n## 1.24.0 (2022-05-06)\n\n### Features Added\n\n- Add `SerializationError` and `DeserializationError` in `azure.core.exceptions` for errors raised during serialization / deserialization #24312\n\n## 1.23.1 (2022-03-31)\n\n### Bugs Fixed\n\n- Allow stream inputs to the `content` kwarg of `azure.core.rest.HttpRequest` from objects with a `read` method #23578\n\n## 1.23.0 (2022-03-03)\n\n### Features Added\n\n- Improve intellisense type hinting for service client methods. #22891\n\n- Add a case insensitive dict `case_insensitive_dict` in `azure.core.utils`. #23206\n\n### Bugs Fixed\n\n- Use \"\\n\" rather than \"/n\" for new line in log. #23261\n\n### Other Changes\n\n- Log \"WWW-Authenticate\" header in `HttpLoggingPolicy` #22990\n- Added dependency on `typing-extensions` >= 4.0.1\n\n## 1.22.1 (2022-02-09)\n\n### Bugs Fixed\n\n- Limiting `final-state-via` scope to POST until consuming SDKs has been fixed to use this option properly on PUT. #22989\n\n## 1.22.0 (2022-02-03)\n_[**This version is deprecated.**]_\n\n### Features Added\n\n- Add support for `final-state-via` LRO option in core. #22713\n\n### Bugs Fixed\n\n- Add response body to string representation of `HttpResponseError` if we're not able to parse out information #22302\n- Raise `AttributeError` when calling azure.core.pipeline.transport.\\_\\_bases__ #22469\n\n### Other Changes\n\n- Python 2.7 is no longer supported. Please use Python version 3.6 or later.\n\n## 1.21.1 (2021-12-06)\n\n### Other Changes\n\n- Revert change in str method #22023\n\n## 1.21.0 (2021-12-02)\n\n### Breaking Changes\n\n- Sync stream downloading now raises `azure.core.exceptions.DecodeError` rather than `requests.exceptions.ContentDecodingError`\n\n### Bugs Fixed\n\n- Add response body to string representation of `HttpResponseError` if we're not able to parse out information #21800\n\n## 1.20.1 (2021-11-08)\n\n### Bugs Fixed\n\n- Correctly set response's content to decompressed body when users are using aiohttp transport with decompression headers #21620\n\n## 1.20.0 (2021-11-04)\n\n### Features Added\n\n- GA `send_request` onto the `azure.core.PipelineClient` and `azure.core.AsyncPipelineClient`. This method takes in\nrequests and sends them through our pipelines.\n- GA `azure.core.rest`. `azure.core.rest` is our new public simple HTTP library in `azure.core` that users will use to create requests, and consume responses.\n- GA errors `StreamConsumedError`, `StreamClosedError`, and `ResponseNotReadError` to `azure.core.exceptions`. These errors\nare thrown if you mishandle streamed responses from the `azure.core.rest` module\n- add kwargs to the methods for `iter_raw` and `iter_bytes` #21529\n- no longer raise JSON errors if users pass in file descriptors of JSON to the `json` kwarg in `HttpRequest` #21504\n- Added new error type `IncompleteReadError` which is raised if peer closes the connection before we have received the complete message body.\n\n### Breaking Changes\n\n- SansIOHTTPPolicy.on_exception returns None instead of bool.\n\n### Bugs Fixed\n\n- The `Content-Length` header in a http response is strictly checked against the actual number of bytes in the body,\n rather than silently truncating data in case the underlying tcp connection is closed prematurely.\n (thanks to @jochen-ott-by for the contribution) #20412\n- UnboundLocalError when SansIOHTTPPolicy handles an exception #15222\n- Add default content type header of `text/plain` and content length header for users who pass unicode strings to the `content` kwarg of `HttpRequest` in 2.7 #21550\n\n## 1.19.1 (2021-11-01)\n\n### Bugs Fixed\n\n- respect text encoding specified in argument (thanks to @ryohji for the contribution) #20796\n- Fix \"coroutine x.read() was never awaited\" warning from `ContentDecodePolicy` #21318\n- fix type check for `data` input to `azure.core.rest` for python 2.7 users #21341\n- use `charset_normalizer` if `chardet` is not installed to migrate aiohttp 3.8.0 changes.\n\n### Other Changes\n\n- Refactor AzureJSONEncoder (thanks to @Codejune for the contribution) #21028\n\n## 1.19.0 (2021-09-30)\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse` are now abstract base classes. They should not be initialized directly, instead\nyour transport responses should inherit from them and implement them.\n- The properties of the `azure.core.rest` responses are now all read-only\n\n- HttpLoggingPolicy integrates logs into one record #19925\n\n## 1.18.0 (2021-09-02)\n\n### Features Added\n\n- `azure.core.serialization.AzureJSONEncoder` (introduced in 1.17.0) serializes `datetime.datetime` objects in ISO 8601 format, conforming to RFC 3339's specification. #20190\n- We now use `azure.core.serialization.AzureJSONEncoder` to serialize `json` input to `azure.core.rest.HttpRequest`.\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- The `text` property on `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse` has changed to a method, which also takes\nan `encoding` parameter.\n- Removed `iter_text` and `iter_lines` from `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse`\n\n### Bugs Fixed\n\n- The behaviour of the headers returned in `azure.core.rest` responses now aligns across sync and async. Items can now be checked case-insensitively and without raising an error for format.\n\n## 1.17.0 (2021-08-05)\n\n### Features Added\n\n- Cut hard dependency on requests library\n- Added a `from_json` method which now accepts storage QueueMessage, eventhub's EventData or ServiceBusMessage or simply json bytes to return a `CloudEvent`\n\n### Fixed\n\n- Not override \"x-ms-client-request-id\" if it already exists in the header. #17757\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- `azure.core.rest` will not try to guess the `charset` anymore if it was impossible to extract it from `HttpResponse` analysis. This removes our dependency on `charset`.\n\n## 1.16.0 (2021-07-01)\n\n### Features Added\n\n- Add new ***provisional*** methods `send_request` onto the `azure.core.PipelineClient` and `azure.core.AsyncPipelineClient`. This method takes in\nrequests and sends them through our pipelines.\n- Add new ***provisional*** module `azure.core.rest`. `azure.core.rest` is our new public simple HTTP library in `azure.core` that users will use to create requests, and consume responses.\n- Add new ***provisional*** errors `StreamConsumedError`, `StreamClosedError`, and `ResponseNotReadError` to `azure.core.exceptions`. These errors\nare thrown if you mishandle streamed responses from the provisional `azure.core.rest` module\n\n### Fixed\n\n- Improved error message in the `from_dict` method of `CloudEvent` when a wrong schema is sent.\n\n## 1.15.0 (2021-06-04)\n\n### New Features\n\n- Added `BearerTokenCredentialPolicy.on_challenge` and `.authorize_request` to allow subclasses to optionally handle authentication challenges\n\n### Bug Fixes\n\n- Retry policies don't sleep after operations time out\n- The `from_dict` methhod in the `CloudEvent` can now convert a datetime string to datetime object when microsecond exceeds the python limitation\n\n## 1.14.0 (2021-05-13)\n\n### New Features\n\n- Added `azure.core.credentials.AzureNamedKeyCredential` credential #17548.\n- Added `decompress` parameter for `stream_download` method. If it is set to `False`, will not do decompression upon the stream. #17920\n\n## 1.13.0 (2021-04-02)\n\nAzure core requires Python 2.7 or Python 3.6+ since this release.\n\n### New Features\n\n- Added `azure.core.utils.parse_connection_string` function to parse connection strings across SDKs, with common validation and support for case insensitive keys.\n- Supported adding custom policies #16519\n- Added `~azure.core.tracing.Link` that should be used while passing `Links` to `AbstractSpan`.\n- `AbstractSpan` constructor can now take in additional keyword only args.\n\n### Bug fixes\n\n- Make NetworkTraceLoggingPolicy show the auth token in plain text. #14191\n- Fixed RetryPolicy overriding default connection timeout with an extreme value #17481\n\n## 1.12.0 (2021-03-08)\n\nThis version will be the last version to officially support Python 3.5, future versions will require Python 2.7 or Python 3.6+.\n\n### Features\n\n- Added `azure.core.messaging.CloudEvent` model that follows the cloud event spec.\n- Added `azure.core.serialization.NULL` sentinel value\n- Improve `repr`s for `HttpRequest` and `HttpResponse`s #16972\n\n### Bug Fixes\n\n- Disable retry in stream downloading. (thanks to @jochen-ott-by @hoffmann for the contribution) #16723\n\n## 1.11.0 (2021-02-08)\n\n### Features\n\n- Added `CaseInsensitiveEnumMeta` class for case-insensitive enums. #16316\n- Add `raise_for_status` method onto `HttpResponse`. Calling `response.raise_for_status()` on a response with an error code\nwill raise an `HttpResponseError`. Calling it on a good response will do nothing #16399\n\n### Bug Fixes\n\n- Update conn.conn_kw rather than overriding it when setting block size. (thanks for @jiasli for the contribution) #16587\n\n## 1.10.0 (2021-01-11)\n\n### Features\n\n- Added `AzureSasCredential` and its respective policy. #15946\n\n## 1.9.0 (2020-11-09)\n\n### Features\n\n- Add a `continuation_token` attribute to the base `AzureError` exception, and set this value for errors raised\n during paged or long-running operations.\n\n### Bug Fixes\n\n- Set retry_interval to 1 second instead of 1000 seconds (thanks **vbarbaresi** for contributing) #14357\n\n\n## 1.8.2 (2020-10-05)\n\n### Bug Fixes\n\n- Fixed bug to allow polling in the case of parameterized endpoints with relative polling urls #14097\n\n\n## 1.8.1 (2020-09-08)\n\n### Bug fixes\n\n- SAS credential replicated \"/\" fix #13159\n\n## 1.8.0 (2020-08-10)\n\n### Features\n\n- Support params as list for exploding parameters #12410\n\n\n## 1.7.0 (2020-07-06)\n\n### Bug fixes\n\n- `AzureKeyCredentialPolicy` will now accept (and ignore) passed in kwargs #11963\n- Better error messages if passed endpoint is incorrect #12106\n- Do not JSON encore a string if content type is \"text\" #12137\n\n### Features\n\n- Added `http_logging_policy` property on the `Configuration` object, allowing users to individually\nset the http logging policy of the config #12218\n\n## 1.6.0 (2020-06-03)\n\n### Bug fixes\n\n- Fixed deadlocks in AsyncBearerTokenCredentialPolicy #11543\n- Fix AttributeException in StreamDownloadGenerator #11462\n\n### Features\n\n- Added support for changesets as part of multipart message support #10485\n- Add AsyncLROPoller in azure.core.polling #10801\n- Add get_continuation_token/from_continuation_token/polling_method methods in pollers (sync and async) #10801\n- HttpResponse and PipelineContext objects are now pickable #10801\n\n## 1.5.0 (2020-05-04)\n\n### Features\n\n- Support \"x-ms-retry-after-ms\" in response header #10743\n- `link` and `link_from_headers` now accepts attributes #10765\n\n### Bug fixes\n\n- Not retry if the status code is less than 400 #10778\n- \"x-ms-request-id\" is not considered safe header for logging #10967\n\n## 1.4.0 (2020-04-06)\n\n### Features\n\n- Support a default error type in map_error #9773\n- Added `AzureKeyCredential` and its respective policy. #10509\n- Added `azure.core.polling.base_polling` module with a \"Microsoft One API\" polling implementation #10090\n Also contains the async version in `azure.core.polling.async_base_polling`\n- Support kwarg `enforce_https` to disable HTTPS check on authentication #9821\n- Support additional kwargs in `HttpRequest.set_multipart_mixed` that will be passed into pipeline context.\n\n## 1.3.0 (2020-03-09)\n\n### Bug fixes\n\n- Appended RequestIdPolicy to the default pipeline #9841\n- Rewind the body position in async_retry #10117\n\n### Features\n\n- Add raw_request_hook support in custom_hook_policy #9958\n- Add timeout support in retry_policy #10011\n- Add OdataV4 error format auto-parsing in all exceptions ('error' attribute) #9738\n\n## 1.2.2 (2020-02-10)\n\n### Bug fixes\n\n- Fixed a bug that sends None as request_id #9545\n- Enable mypy for customers #9572\n- Handle TypeError in deep copy #9620\n- Fix text/plain content-type in decoder #9589\n\n## 1.2.1 (2020-01-14)\n\n### Bug fixes\n\n- Fixed a regression in 1.2.0 that was incompatible with azure-keyvault-* 4.0.0\n[#9462](https://github.com/Azure/azure-sdk-for-python/issues/9462)\n\n\n## 1.2.0 (2020-01-14)\n\n### Features\n\n- Add user_agent & sdk_moniker kwargs in UserAgentPolicy init #9355\n- Support OPTIONS HTTP verb #9322\n- Add tracing_attributes to tracing decorator #9297\n- Support auto_request_id in RequestIdPolicy #9163\n- Support fixed retry #6419\n- Support \"retry-after-ms\" in response header #9240\n\n### Bug fixes\n\n- Removed `__enter__` and `__exit__` from async context managers #9313\n\n## 1.1.1 (2019-12-03)\n\n### Bug fixes\n\n- Bearer token authorization requires HTTPS\n- Rewind the body position in retry #8307\n\n## 1.1.0 (2019-11-25)\n\n### Features\n\n- New RequestIdPolicy #8437\n- Enable logging policy in default pipeline #8053\n- Normalize transport timeout. #8000\n Now we have:\n * 'connection_timeout' - a single float in seconds for the connection timeout. Default 5min\n * 'read_timeout' - a single float in seconds for the read timeout. Default 5min\n\n### Bug fixes\n\n- RequestHistory: deepcopy fails if request contains a stream #7732\n- Retry: retry raises error if response does not have http_response #8629\n- Client kwargs are now passed to DistributedTracingPolicy correctly #8051\n- NetworkLoggingPolicy now logs correctly all requests in case of retry #8262\n\n## 1.0.0 (2019-10-29)\n\n### Features\n\n- Tracing: DistributedTracingPolicy now accepts kwargs network_span_namer to change network span name #7773\n- Tracing: Implementation of AbstractSpan can now use the mixin HttpSpanMixin to get HTTP span update automatically #7773\n- Tracing: AbstractSpan contract \"change_context\" introduced #7773\n- Introduce new policy HttpLoggingPolicy #7988\n\n### Bug fixes\n\n- Fix AsyncioRequestsTransport if input stream is an async generator #7743\n- Fix form-data with aiohttp transport #7749\n\n### Breaking changes\n\n- Tracing: AbstractSpan.set_current_span is longer supported. Use change_context instead. #7773\n- azure.core.pipeline.policies.ContentDecodePolicy.deserialize_from_text changed\n\n## 1.0.0b4 (2019-10-07)\n\n### Features\n\n- Tracing: network span context is available with the TRACING_CONTEXT in pipeline response #7252\n- Tracing: Span contract now has `kind`, `traceparent` and is a context manager #7252\n- SansIOHTTPPolicy methods can now be coroutines #7497\n- Add multipart/mixed support #7083:\n\n - HttpRequest now has a \"set_multipart_mixed\" method to set the parts of this request\n - HttpRequest now has a \"prepare_multipart_body\" method to build final body.\n - HttpResponse now has a \"parts\" method to return an iterator of parts\n - AsyncHttpResponse now has a \"parts\" methods to return an async iterator of parts\n - Note that multipart/mixed is a Python 3.x only feature\n\n### Bug fixes\n\n- Tracing: policy cannot fail the pipeline, even in the worst condition #7252\n- Tracing: policy pass correctly status message if exception #7252\n- Tracing: incorrect span if exception raised from decorated function #7133\n- Fixed urllib3 ConnectTimeoutError being raised by Requests during a socket timeout. Now this exception is caught and wrapped as a `ServiceRequestError` #7542\n\n### Breaking changes\n\n- Tracing: `azure.core.tracing.context` removed\n- Tracing: `azure.core.tracing.context.tracing_context.with_current_context` renamed to `azure.core.tracing.common.with_current_context` #7252\n- Tracing: `link` renamed `link_from_headers` and `link` takes now a string\n- Tracing: opencensus implementation has been moved to the package `azure-core-tracing-opencensus`\n- Some modules and classes that were importables from several different places have been removed:\n\n - `azure.core.HttpResponseError` is now only `azure.core.exceptions.HttpResponseError`\n - `azure.core.Configuration` is now only `azure.core.configuration.Configuration`\n - `azure.core.HttpRequest` is now only `azure.core.pipeline.transport.HttpRequest`\n - `azure.core.version` module has been removed. Use `azure.core.__version__` to get version number.\n - `azure.core.pipeline_client` has been removed. Import from `azure.core` instead.\n - `azure.core.pipeline_client_async` has been removed. Import from `azure.core` instead.\n - `azure.core.pipeline.base` has been removed. Import from `azure.core.pipeline` instead.\n - `azure.core.pipeline.base_async` has been removed. Import from `azure.core.pipeline` instead.\n - `azure.core.pipeline.policies.base` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.base_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.authentication` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.authentication_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.custom_hook` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.redirect` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.redirect_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.retry` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.retry_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.distributed_tracing` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.universal` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.tracing.abstract_span` has been removed. Import from `azure.core.tracing` instead.\n - `azure.core.pipeline.transport.base` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.base_async` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_basic` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_asyncio` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_trio` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.aiohttp` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.polling.poller` has been removed. Import from `azure.core.polling` instead.\n - `azure.core.polling.async_poller` has been removed. Import from `azure.core.polling` instead.\n\n## 1.0.0b3 (2019-09-09)\n\n### Bug fixes\n\n- Fix aiohttp auto-headers #6992\n- Add tracing to policies module init #6951\n\n## 1.0.0b2 (2019-08-05)\n\n### Breaking changes\n\n- Transport classes don't take `config` parameter anymore (use kwargs instead) #6372\n- `azure.core.paging` has been completely refactored #6420\n- HttpResponse.content_type attribute is now a string (was a list) #6490\n- For `StreamDownloadGenerator` subclasses, `response` is now an `HttpResponse`, and not a transport response like `aiohttp.ClientResponse` or `requests.Response`. The transport response is available in `internal_response` attribute #6490\n\n### Bug fixes\n\n- aiohttp is not required to import async pipelines classes #6496\n- `AsyncioRequestsTransport.sleep` is now a coroutine as expected #6490\n- `RequestsTransport` is not tight to `ProxyPolicy` implementation details anymore #6372\n- `AiohttpTransport` does not raise on unexpected kwargs #6355\n\n### Features\n\n- New paging base classes that support `continuation_token` and `by_page()` #6420\n- Proxy support for `AiohttpTransport` #6372\n\n## 1.0.0b1 (2019-06-26)\n\n- Preview 1 release", + "release_date": "2025-09-11T22:58:06", "parties": [ { "type": "person", @@ -155,11 +155,11 @@ "Programming Language :: Python :: 3.9" ], "homepage_url": "https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/core/azure-core", - "download_url": "https://files.pythonhosted.org/packages/d4/78/bf94897361fdd650850f0f2e405b2293e2f12808239046232bdedf554301/azure_core-1.35.0-py3-none-any.whl", - "size": 210708, + "download_url": "https://files.pythonhosted.org/packages/27/52/805980aa1ba18282077c484dba634ef0ede1e84eec8be9c92b2e162d0ed6/azure_core-1.35.1-py3-none-any.whl", + "size": 211800, "sha1": null, - "md5": "a5bb28aab86f7accdd9c7c36533d6a2d", - "sha256": "8db78c72868a58f3de8991eb4d22c4d368fae226dac1002998d6c50437e7dad1", + "md5": "ece20e5c0b954f1f10defd30a0cc86a0", + "sha256": "12da0c9e08e48e198f9158b56ddbe33b421477e1dc98c2e1c8f9e254d92c468b", "sha512": null, "bug_tracking_url": null, "code_view_url": null, @@ -179,9 +179,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/azure-core/1.35.0/json", + "api_data_url": "https://pypi.org/pypi/azure-core/1.35.1/json", "datasource_id": null, - "purl": "pkg:pypi/azure-core@1.35.0" + "purl": "pkg:pypi/azure-core@1.35.1" }, { "type": "pypi", @@ -255,12 +255,12 @@ "type": "pypi", "namespace": null, "name": "azure-storage-blob", - "version": "12.25.1", + "version": "12.26.0", "qualifiers": {}, "subpath": null, "primary_language": "Python", "description": "Microsoft Azure Blob Storage Client Library for Python\n# Azure Storage Blobs client library for Python\nAzure Blob storage is Microsoft's object storage solution for the cloud. Blob storage is optimized for storing massive amounts of unstructured data, such as text or binary data.\n\nBlob storage is ideal for:\n\n* Serving images or documents directly to a browser\n* Storing files for distributed access\n* Streaming video and audio\n* Storing data for backup and restore, disaster recovery, and archiving\n* Storing data for analysis by an on-premises or Azure-hosted service\n\n[Source code](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/azure/storage/blob)\n| [Package (PyPI)](https://pypi.org/project/azure-storage-blob/)\n| [Package (Conda)](https://anaconda.org/microsoft/azure-storage/)\n| [API reference documentation](https://aka.ms/azsdk-python-storage-blob-ref)\n| [Product documentation](https://learn.microsoft.com/azure/storage/)\n| [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples)\n\n\n## Getting started\n\n### Prerequisites\n* Python 3.8 or later is required to use this package. For more details, please read our page on [Azure SDK for Python version support policy](https://github.com/Azure/azure-sdk-for-python/wiki/Azure-SDKs-Python-version-support-policy).\n* You must have an [Azure subscription](https://azure.microsoft.com/free/) and an\n[Azure storage account](https://learn.microsoft.com/azure/storage/common/storage-account-overview) to use this package.\n\n### Install the package\nInstall the Azure Storage Blobs client library for Python with [pip](https://pypi.org/project/pip/):\n\n```bash\npip install azure-storage-blob\n```\n\n### Create a storage account\nIf you wish to create a new storage account, you can use the\n[Azure Portal](https://learn.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-portal),\n[Azure PowerShell](https://learn.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-powershell),\nor [Azure CLI](https://learn.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-cli):\n\n```bash\n# Create a new resource group to hold the storage account -\n# if using an existing resource group, skip this step\naz group create --name my-resource-group --location westus2\n\n# Create the storage account\naz storage account create -n my-storage-account-name -g my-resource-group\n```\n\n### Create the client\nThe Azure Storage Blobs client library for Python allows you to interact with three types of resources: the storage\naccount itself, blob storage containers, and blobs. Interaction with these resources starts with an instance of a\n[client](#clients). To create a client object, you will need the storage account's blob service account URL and a\ncredential that allows you to access the storage account:\n\n```python\nfrom azure.storage.blob import BlobServiceClient\n\nservice = BlobServiceClient(account_url=\"https://.blob.core.windows.net/\", credential=credential)\n```\n\n#### Looking up the account URL\nYou can find the storage account's blob service URL using the\n[Azure Portal](https://learn.microsoft.com/azure/storage/common/storage-account-overview#storage-account-endpoints),\n[Azure PowerShell](https://learn.microsoft.com/powershell/module/az.storage/get-azstorageaccount),\nor [Azure CLI](https://learn.microsoft.com/cli/azure/storage/account?view=azure-cli-latest#az-storage-account-show):\n\n```bash\n# Get the blob service account url for the storage account\naz storage account show -n my-storage-account-name -g my-resource-group --query \"primaryEndpoints.blob\"\n```\n\n#### Types of credentials\nThe `credential` parameter may be provided in a number of different forms, depending on the type of\n[authorization](https://learn.microsoft.com/azure/storage/common/storage-auth) you wish to use:\n1. To use an [Azure Active Directory (AAD) token credential](https://learn.microsoft.com/azure/storage/common/storage-auth-aad),\n provide an instance of the desired credential type obtained from the\n [azure-identity](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#credentials) library.\n For example, [DefaultAzureCredential](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#defaultazurecredential)\n can be used to authenticate the client.\n\n This requires some initial setup:\n * [Install azure-identity](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#install-the-package)\n * [Register a new AAD application](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) and give permissions to access Azure Storage\n * [Grant access](https://learn.microsoft.com/azure/storage/common/storage-auth-aad-rbac-portal) to Azure Blob data with RBAC in the Azure Portal\n * Set the values of the client ID, tenant ID, and client secret of the AAD application as environment variables:\n AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET\n\n Use the returned token credential to authenticate the client:\n ```python\n from azure.identity import DefaultAzureCredential\n from azure.storage.blob import BlobServiceClient\n token_credential = DefaultAzureCredential()\n\n blob_service_client = BlobServiceClient(\n account_url=\"https://.blob.core.windows.net\",\n credential=token_credential\n )\n ```\n\n2. To use a [shared access signature (SAS) token](https://learn.microsoft.com/azure/storage/common/storage-sas-overview),\n provide the token as a string. If your account URL includes the SAS token, omit the credential parameter.\n You can generate a SAS token from the Azure Portal under \"Shared access signature\" or use one of the `generate_sas()`\n functions to create a sas token for the storage account, container, or blob:\n\n ```python\n from datetime import datetime, timedelta\n from azure.storage.blob import BlobServiceClient, generate_account_sas, ResourceTypes, AccountSasPermissions\n\n sas_token = generate_account_sas(\n account_name=\"\",\n account_key=\"\",\n resource_types=ResourceTypes(service=True),\n permission=AccountSasPermissions(read=True),\n expiry=datetime.utcnow() + timedelta(hours=1)\n )\n\n blob_service_client = BlobServiceClient(account_url=\"https://.blob.core.windows.net\", credential=sas_token)\n ```\n\n3. To use a storage account [shared key](https://learn.microsoft.com/rest/api/storageservices/authenticate-with-shared-key/)\n (aka account key or access key), provide the key as a string. This can be found in the Azure Portal under the \"Access Keys\"\n section or by running the following Azure CLI command:\n\n ```az storage account keys list -g MyResourceGroup -n MyStorageAccount```\n\n Use the key as the credential parameter to authenticate the client:\n ```python\n from azure.storage.blob import BlobServiceClient\n service = BlobServiceClient(account_url=\"https://.blob.core.windows.net\", credential=\"\")\n ```\n \n If you are using **customized url** (which means the url is not in this format `.blob.core.windows.net`),\n please instantiate the client using the credential below:\n ```python\n from azure.storage.blob import BlobServiceClient\n service = BlobServiceClient(account_url=\"https://.blob.core.windows.net\", \n credential={\"account_name\": \"\", \"account_key\":\"\"})\n ```\n\n4. To use [anonymous public read access](https://learn.microsoft.com/azure/storage/blobs/storage-manage-access-to-resources),\n simply omit the credential parameter.\n\n#### Creating the client from a connection string\nDepending on your use case and authorization method, you may prefer to initialize a client instance with a storage\nconnection string instead of providing the account URL and credential separately. To do this, pass the storage\nconnection string to the client's `from_connection_string` class method:\n\n```python\nfrom azure.storage.blob import BlobServiceClient\n\nconnection_string = \"DefaultEndpointsProtocol=https;AccountName=xxxx;AccountKey=xxxx;EndpointSuffix=core.windows.net\"\nservice = BlobServiceClient.from_connection_string(conn_str=connection_string)\n```\n\nThe connection string to your storage account can be found in the Azure Portal under the \"Access Keys\" section or by running the following CLI command:\n\n```bash\naz storage account show-connection-string -g MyResourceGroup -n MyStorageAccount\n```\n\n## Key concepts\nThe following components make up the Azure Blob Service:\n* The storage account itself\n* A container within the storage account\n* A blob within a container\n\nThe Azure Storage Blobs client library for Python allows you to interact with each of these components through the\nuse of a dedicated client object.\n\n### Clients\nFour different clients are provided to interact with the various components of the Blob Service:\n1. [BlobServiceClient](https://aka.ms/azsdk-python-storage-blob-blobserviceclient) -\n this client represents interaction with the Azure storage account itself, and allows you to acquire preconfigured\n client instances to access the containers and blobs within. It provides operations to retrieve and configure the\n account properties as well as list, create, and delete containers within the account. To perform operations on a\n specific container or blob, retrieve a client using the `get_container_client` or `get_blob_client` methods.\n2. [ContainerClient](https://aka.ms/azsdk-python-storage-blob-containerclient) -\n this client represents interaction with a specific container (which need not exist yet), and allows you to acquire\n preconfigured client instances to access the blobs within. It provides operations to create, delete, or configure a\n container and includes operations to list, upload, and delete the blobs within it. To perform operations on a\n specific blob within the container, retrieve a client using the `get_blob_client` method.\n3. [BlobClient](https://aka.ms/azsdk-python-storage-blob-blobclient) -\n this client represents interaction with a specific blob (which need not exist yet). It provides operations to\n upload, download, delete, and create snapshots of a blob, as well as specific operations per blob type.\n4. [BlobLeaseClient](https://aka.ms/azsdk-python-storage-blob-blobleaseclient) -\n this client represents lease interactions with a `ContainerClient` or `BlobClient`. It provides operations to\n acquire, renew, release, change, and break a lease on a specified resource.\n\n### Async Clients \nThis library includes a complete async API supported on Python 3.5+. To use it, you must\nfirst install an async transport, such as [aiohttp](https://pypi.org/project/aiohttp/).\nSee\n[azure-core documentation](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/CLIENT_LIBRARY_DEVELOPER.md#transport)\nfor more information.\n\nAsync clients and credentials should be closed when they're no longer needed. These\nobjects are async context managers and define async `close` methods.\n\n### Blob Types\nOnce you've initialized a Client, you can choose from the different types of blobs:\n* [Block blobs](https://learn.microsoft.com/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs#about-block-blobs)\n store text and binary data, up to approximately 4.75 TiB. Block blobs are made up of blocks of data that can be\n managed individually\n* [Append blobs](https://learn.microsoft.com/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs#about-append-blobs)\n are made up of blocks like block blobs, but are optimized for append operations. Append blobs are ideal for scenarios\n such as logging data from virtual machines\n* [Page blobs](https://learn.microsoft.com/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs#about-page-blobs)\n store random access files up to 8 TiB in size. Page blobs store virtual hard drive (VHD) files and serve as disks for\n Azure virtual machines\n\n## Examples\nThe following sections provide several code snippets covering some of the most common Storage Blob tasks, including:\n\n* [Create a container](#create-a-container \"Create a container\")\n* [Uploading a blob](#uploading-a-blob \"Uploading a blob\")\n* [Downloading a blob](#downloading-a-blob \"Downloading a blob\")\n* [Enumerating blobs](#enumerating-blobs \"Enumerating blobs\")\n\nNote that a container must be created before to upload or download a blob.\n\n### Create a container\n\nCreate a container from where you can upload or download blobs.\n```python\nfrom azure.storage.blob import ContainerClient\n\ncontainer_client = ContainerClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\")\n\ncontainer_client.create_container()\n```\n\nUse the async client to create a container\n\n```python\nfrom azure.storage.blob.aio import ContainerClient\n\ncontainer_client = ContainerClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\")\n\nawait container_client.create_container()\n```\n\n### Uploading a blob\nUpload a blob to your container\n\n```python\nfrom azure.storage.blob import BlobClient\n\nblob = BlobClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\", blob_name=\"my_blob\")\n\nwith open(\"./SampleSource.txt\", \"rb\") as data:\n blob.upload_blob(data)\n```\n\nUse the async client to upload a blob\n\n```python\nfrom azure.storage.blob.aio import BlobClient\n\nblob = BlobClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\", blob_name=\"my_blob\")\n\nwith open(\"./SampleSource.txt\", \"rb\") as data:\n await blob.upload_blob(data)\n```\n\n### Downloading a blob\nDownload a blob from your container\n\n```python\nfrom azure.storage.blob import BlobClient\n\nblob = BlobClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\", blob_name=\"my_blob\")\n\nwith open(\"./BlockDestination.txt\", \"wb\") as my_blob:\n blob_data = blob.download_blob()\n blob_data.readinto(my_blob)\n```\n\nDownload a blob asynchronously\n\n```python\nfrom azure.storage.blob.aio import BlobClient\n\nblob = BlobClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\", blob_name=\"my_blob\")\n\nwith open(\"./BlockDestination.txt\", \"wb\") as my_blob:\n stream = await blob.download_blob()\n data = await stream.readall()\n my_blob.write(data)\n```\n\n### Enumerating blobs\nList the blobs in your container\n\n```python\nfrom azure.storage.blob import ContainerClient\n\ncontainer = ContainerClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\")\n\nblob_list = container.list_blobs()\nfor blob in blob_list:\n print(blob.name + '\\n')\n```\n\nList the blobs asynchronously\n\n```python\nfrom azure.storage.blob.aio import ContainerClient\n\ncontainer = ContainerClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\")\n\nblob_list = []\nasync for blob in container.list_blobs():\n blob_list.append(blob)\nprint(blob_list)\n```\n\n## Optional Configuration\n\nOptional keyword arguments that can be passed in at the client and per-operation level.\n\n### Retry Policy configuration\n\nUse the following keyword arguments when instantiating a client to configure the retry policy:\n\n* __retry_total__ (int): Total number of retries to allow. Takes precedence over other counts.\nPass in `retry_total=0` if you do not want to retry on requests. Defaults to 10.\n* __retry_connect__ (int): How many connection-related errors to retry on. Defaults to 3.\n* __retry_read__ (int): How many times to retry on read errors. Defaults to 3.\n* __retry_status__ (int): How many times to retry on bad status codes. Defaults to 3.\n* __retry_to_secondary__ (bool): Whether the request should be retried to secondary, if able.\nThis should only be enabled of RA-GRS accounts are used and potentially stale data can be handled.\nDefaults to `False`.\n\n### Encryption configuration\n\nUse the following keyword arguments when instantiating a client to configure encryption:\n\n* __require_encryption__ (bool): If set to True, will enforce that objects are encrypted and decrypt them.\n* __encryption_version__ (str): Specifies the version of encryption to use. Current options are `'2.0'` or `'1.0'` and\nthe default value is `'1.0'`. Version 1.0 is deprecated, and it is **highly recommended** to use version 2.0.\n* __key_encryption_key__ (object): The user-provided key-encryption-key. The instance must implement the following methods:\n - `wrap_key(key)`--wraps the specified key using an algorithm of the user's choice.\n - `get_key_wrap_algorithm()`--returns the algorithm used to wrap the specified symmetric key.\n - `get_kid()`--returns a string key id for this key-encryption-key.\n* __key_resolver_function__ (callable): The user-provided key resolver. Uses the kid string to return a key-encryption-key\nimplementing the interface defined above.\n\n### Other client / per-operation configuration\n\nOther optional configuration keyword arguments that can be specified on the client or per-operation.\n\n**Client keyword arguments:**\n\n* __connection_timeout__ (int): The number of seconds the client will wait to establish a connection to the server.\nDefaults to 20 seconds.\n* __read_timeout__ (int): The number of seconds the client will wait, between consecutive read operations, for a\nresponse from the server. This is a socket level timeout and is not affected by overall data size. Client-side read \ntimeouts will be automatically retried. Defaults to 60 seconds.\n* __transport__ (Any): User-provided transport to send the HTTP request.\n\n**Per-operation keyword arguments:**\n\n* __raw_response_hook__ (callable): The given callback uses the response returned from the service.\n* __raw_request_hook__ (callable): The given callback uses the request before being sent to service.\n* __client_request_id__ (str): Optional user specified identification of the request.\n* __user_agent__ (str): Appends the custom value to the user-agent header to be sent with the request.\n* __logging_enable__ (bool): Enables logging at the DEBUG level. Defaults to False. Can also be passed in at\nthe client level to enable it for all requests.\n* __logging_body__ (bool): Enables logging the request and response body. Defaults to False. Can also be passed in at\nthe client level to enable it for all requests.\n* __headers__ (dict): Pass in custom headers as key, value pairs. E.g. `headers={'CustomValue': value}`\n\n## Troubleshooting\n### General\nStorage Blob clients raise exceptions defined in [Azure Core](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/README.md).\n\nThis list can be used for reference to catch thrown exceptions. To get the specific error code of the exception, use the `error_code` attribute, i.e, `exception.error_code`.\n\n### Logging\nThis library uses the standard\n[logging](https://docs.python.org/3/library/logging.html) library for logging.\nBasic information about HTTP sessions (URLs, headers, etc.) is logged at INFO\nlevel.\n\nDetailed DEBUG level logging, including request/response bodies and unredacted\nheaders, can be enabled on a client with the `logging_enable` argument:\n```python\nimport sys\nimport logging\nfrom azure.storage.blob import BlobServiceClient\n\n# Create a logger for the 'azure.storage.blob' SDK\nlogger = logging.getLogger('azure.storage.blob')\nlogger.setLevel(logging.DEBUG)\n\n# Configure a console output\nhandler = logging.StreamHandler(stream=sys.stdout)\nlogger.addHandler(handler)\n\n# This client will log detailed information about its HTTP sessions, at DEBUG level\nservice_client = BlobServiceClient.from_connection_string(\"your_connection_string\", logging_enable=True)\n```\n\nSimilarly, `logging_enable` can enable detailed logging for a single operation,\neven when it isn't enabled for the client:\n```python\nservice_client.get_service_stats(logging_enable=True)\n```\n\n## Next steps\n\n### More sample code\n\nGet started with our [Blob samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples).\n\nSeveral Storage Blobs Python SDK samples are available to you in the SDK's GitHub repository. These samples provide example code for additional scenarios commonly encountered while working with Storage Blobs:\n\n* [blob_samples_container_access_policy.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy_async.py)) - Examples to set Access policies:\n * Set up Access Policy for container\n\n* [blob_samples_hello_world.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world_async.py)) - Examples for common Storage Blob tasks:\n * Set up a container\n * Create a block, page, or append blob\n * Upload blobs\n * Download blobs\n * Delete blobs\n\n* [blob_samples_authentication.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_authentication.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_authentication_async.py)) - Examples for authenticating and creating the client:\n * From a connection string\n * From a shared access key\n * From a shared access signature token\n * From active directory\n\n* [blob_samples_service.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_service.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_service_async.py)) - Examples for interacting with the blob service:\n * Get account information\n * Get and set service properties\n * Get service statistics\n * Create, list, and delete containers\n * Get the Blob or Container client\n\n* [blob_samples_containers.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_containers.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_containers_async.py)) - Examples for interacting with containers:\n * Create a container and delete containers\n * Set metadata on containers\n * Get container properties\n * Acquire a lease on container\n * Set an access policy on a container\n * Upload, list, delete blobs in container\n * Get the blob client to interact with a specific blob\n\n* [blob_samples_common.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_common.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_common_async.py)) - Examples common to all types of blobs:\n * Create a snapshot\n * Delete a blob snapshot\n * Soft delete a blob\n * Undelete a blob\n * Acquire a lease on a blob\n * Copy a blob from a URL\n\n* [blob_samples_directory_interface.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_directory_interface.py) - Examples for interfacing with Blob storage as if it were a directory on a filesystem:\n * Copy (upload or download) a single file or directory\n * List files or directories at a single level or recursively\n * Delete a single file or recursively delete a directory\n\n### Additional documentation\nFor more extensive documentation on Azure Blob storage, see the [Azure Blob storage documentation](https://learn.microsoft.com/azure/storage/blobs/) on learn.microsoft.com.\n\n## Contributing\nThis project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit https://cla.microsoft.com.\n\nWhen you submit a pull request, a CLA-bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA.\n\nThis project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.", - "release_date": "2025-03-27T17:13:06", + "release_date": "2025-07-16T21:34:09", "parties": [ { "type": "person", @@ -284,11 +284,11 @@ "Programming Language :: Python :: 3.9" ], "homepage_url": "https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob", - "download_url": "https://files.pythonhosted.org/packages/57/33/085d9352d416e617993821b9d9488222fbb559bc15c3641d6cbd6d16d236/azure_storage_blob-12.25.1-py3-none-any.whl", - "size": 406990, + "download_url": "https://files.pythonhosted.org/packages/5b/64/63dbfdd83b31200ac58820a7951ddfdeed1fbee9285b0f3eae12d1357155/azure_storage_blob-12.26.0-py3-none-any.whl", + "size": 412907, "sha1": null, - "md5": "20b5072c0d73c87cc0bd020da5c5f2f4", - "sha256": "1f337aab12e918ec3f1b638baada97550673911c4ceed892acc8e4e891b74167", + "md5": "b7ee3d0eec2bce8bbf60fc238d4349b7", + "sha256": "8c5631b8b22b4f53ec5fff2f3bededf34cfef111e2af613ad42c9e6de00a77fe", "sha512": null, "bug_tracking_url": null, "code_view_url": null, @@ -308,20 +308,20 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/azure-storage-blob/12.25.1/json", + "api_data_url": "https://pypi.org/pypi/azure-storage-blob/12.26.0/json", "datasource_id": null, - "purl": "pkg:pypi/azure-storage-blob@12.25.1" + "purl": "pkg:pypi/azure-storage-blob@12.26.0" }, { "type": "pypi", "namespace": null, "name": "certifi", - "version": "2025.7.14", + "version": "2025.8.3", "qualifiers": {}, "subpath": null, "primary_language": "Python", "description": "Python package for providing Mozilla's CA Bundle.\nCertifi: Python SSL Certificates\n================================\n\nCertifi provides Mozilla's carefully curated collection of Root Certificates for\nvalidating the trustworthiness of SSL certificates while verifying the identity\nof TLS hosts. It has been extracted from the `Requests`_ project.\n\nInstallation\n------------\n\n``certifi`` is available on PyPI. Simply install it with ``pip``::\n\n $ pip install certifi\n\nUsage\n-----\n\nTo reference the installed certificate authority (CA) bundle, you can use the\nbuilt-in function::\n\n >>> import certifi\n\n >>> certifi.where()\n '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'\n\nOr from the command line::\n\n $ python -m certifi\n /usr/local/lib/python3.7/site-packages/certifi/cacert.pem\n\nEnjoy!\n\n.. _`Requests`: https://requests.readthedocs.io/en/master/\n\nAddition/Removal of Certificates\n--------------------------------\n\nCertifi does not support any addition/removal or other modification of the\nCA trust store content. This project is intended to provide a reliable and\nhighly portable root of trust to python deployments. Look to upstream projects\nfor methods to use alternate trust.", - "release_date": "2025-07-14T03:29:26", + "release_date": "2025-08-03T03:07:45", "parties": [ { "type": "person", @@ -347,11 +347,11 @@ "Programming Language :: Python :: 3.9" ], "homepage_url": "https://github.com/certifi/python-certifi", - "download_url": "https://files.pythonhosted.org/packages/4f/52/34c6cf5bb9285074dc3531c437b3919e825d976fde097a7a73f79e726d03/certifi-2025.7.14-py3-none-any.whl", - "size": 162722, + "download_url": "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", + "size": 161216, "sha1": null, - "md5": "8561c6b29236cd268f57ddb4f22281d3", - "sha256": "6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2", + "md5": "f9b6740cffcf397b47bc7fb7782b1354", + "sha256": "f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/certifi/python-certifi", @@ -371,26 +371,33 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/certifi/2025.7.14/json", + "api_data_url": "https://pypi.org/pypi/certifi/2025.8.3/json", "datasource_id": null, - "purl": "pkg:pypi/certifi@2025.7.14" + "purl": "pkg:pypi/certifi@2025.8.3" }, { "type": "pypi", "namespace": null, "name": "cffi", - "version": "1.17.1", + "version": "2.0.0", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "CFFI\n====\n\nForeign Function Interface for Python calling C code.\nPlease see the `Documentation `_.\n\nContact\n-------\n\n`Mailing list `_", - "release_date": "2024-09-04T20:43:41", + "description": "[![GitHub Actions Status](https://github.com/python-cffi/cffi/actions/workflows/ci.yaml/badge.svg?branch=main)](https://github.com/python-cffi/cffi/actions/workflows/ci.yaml?query=branch%3Amain++)\n[![PyPI version](https://img.shields.io/pypi/v/cffi.svg)](https://pypi.org/project/cffi)\n[![Read the Docs](https://img.shields.io/badge/docs-latest-blue.svg)][Documentation]\n\n\nCFFI\n====\n\nForeign Function Interface for Python calling C code.\n\nPlease see the [Documentation] or uncompiled in the `doc/` subdirectory.\n\nDownload\n--------\n\n[Download page](https://github.com/python-cffi/cffi/releases)\n\nSource Code\n-----------\n\nSource code is publicly available on\n[GitHub](https://github.com/python-cffi/cffi).\n\nContact\n-------\n\n[Mailing list](https://groups.google.com/forum/#!forum/python-cffi)\n\nTesting/development tips\n------------------------\n\nAfter `git clone` or `wget && tar`, we will get a directory called `cffi` or `cffi-x.x.x`. we call it `repo-directory`. To run tests under CPython, run the following in the `repo-directory`:\n\n pip install pytest\n pip install -e . # editable install of CFFI for local development\n pytest src/c/ testing/\n\n[Documentation]: http://cffi.readthedocs.org/", + "release_date": "2025-09-08T23:22:17", "parties": [ { "type": "person", "role": "author", "name": "Armin Rigo, Maciej Fijalkowski", - "email": "python-cffi@googlegroups.com", + "email": null, + "url": null + }, + { + "type": "person", + "role": "maintainer", + "name": "Matt Davis, Matt Clay, Matti Picus", + "email": null, "url": null } ], @@ -401,29 +408,24 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy" + "Programming Language :: Python :: Free Threading :: 2 - Beta", + "Programming Language :: Python :: Implementation :: CPython" ], - "homepage_url": "http://cffi.readthedocs.org", - "download_url": "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "size": 446211, + "homepage_url": null, + "download_url": "https://files.pythonhosted.org/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", + "size": 216475, "sha1": null, - "md5": "11351a1a87b2f5a8153c94533ae161ed", - "sha256": "2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", + "md5": "47a32c27b12c0d41dcc5e9067bc55dd0", + "sha256": "fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453", "sha512": null, "bug_tracking_url": "https://github.com/python-cffi/cffi/issues", "code_view_url": "https://github.com/python-cffi/cffi", "vcs_url": null, "copyright": null, - "license_expression": null, - "declared_license": { - "license": "MIT", - "classifiers": [ - "License :: OSI Approved :: MIT License" - ] - }, + "license_expression": "MIT", + "declared_license": {}, "notice_text": null, "source_packages": [], "file_references": [], @@ -431,20 +433,20 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/cffi/1.17.1/json", + "api_data_url": "https://pypi.org/pypi/cffi/2.0.0/json", "datasource_id": null, - "purl": "pkg:pypi/cffi@1.17.1" + "purl": "pkg:pypi/cffi@2.0.0" }, { "type": "pypi", "namespace": null, "name": "charset-normalizer", - "version": "3.4.2", + "version": "3.4.3", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.\n

Charset Detection, for Everyone \ud83d\udc4b

\n\n

\n The Real First Universal Charset Detector
\n \n \n \n \n \"Download\n \n \n \n \n

\n

\n Featured Packages
\n \n \"Static\n \n \n \"Static\n \n

\n

\n In other language (unofficial port - by the community)
\n \n \"Static\n \n

\n\n> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`,\n> I'm trying to resolve the issue by taking a new approach.\n> All IANA character set names for which the Python core library provides codecs are supported.\n\n

\n >>>>> \ud83d\udc49 Try Me Online Now, Then Adopt Me \ud83d\udc48 <<<<<\n

\n\nThis project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**.\n\n| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) |\n|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:|\n| `Fast` | \u274c | \u2705 | \u2705 |\n| `Universal**` | \u274c | \u2705 | \u274c |\n| `Reliable` **without** distinguishable standards | \u274c | \u2705 | \u2705 |\n| `Reliable` **with** distinguishable standards | \u2705 | \u2705 | \u2705 |\n| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ |\n| `Native Python` | \u2705 | \u2705 | \u274c |\n| `Detect spoken language` | \u274c | \u2705 | N/A |\n| `UnicodeDecodeError Safety` | \u274c | \u2705 | \u274c |\n| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB |\n| `Supported Encoding` | 33 | \ud83c\udf89 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 |\n\n

\n\"Reading\"Cat\n

\n\n*\\*\\* : They are clearly using specific code for a specific encoding even if covering most of used one*
\n\n## \u26a1 Performance\n\nThis package offer better performance than its counterpart Chardet. Here are some numbers.\n\n| Package | Accuracy | Mean per file (ms) | File per sec (est) |\n|-----------------------------------------------|:--------:|:------------------:|:------------------:|\n| [chardet](https://github.com/chardet/chardet) | 86 % | 63 ms | 16 file/sec |\n| charset-normalizer | **98 %** | **10 ms** | 100 file/sec |\n\n| Package | 99th percentile | 95th percentile | 50th percentile |\n|-----------------------------------------------|:---------------:|:---------------:|:---------------:|\n| [chardet](https://github.com/chardet/chardet) | 265 ms | 71 ms | 7 ms |\n| charset-normalizer | 100 ms | 50 ms | 5 ms |\n\n_updated as of december 2024 using CPython 3.12_\n\nChardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload.\n\n> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows.\n> And yes, these results might change at any time. The dataset can be updated to include more files.\n> The actual delays heavily depends on your CPU capabilities. The factors should remain the same.\n> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability\n> (e.g. Supported Encoding) Challenge-them if you want.\n\n## \u2728 Installation\n\nUsing pip:\n\n```sh\npip install charset-normalizer -U\n```\n\n## \ud83d\ude80 Basic Usage\n\n### CLI\nThis package comes with a CLI.\n\n```\nusage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD]\n file [file ...]\n\nThe Real First Universal Charset Detector. Discover originating encoding used\non text file. Normalize text to unicode.\n\npositional arguments:\n files File(s) to be analysed\n\noptional arguments:\n -h, --help show this help message and exit\n -v, --verbose Display complementary information about file if any.\n Stdout will contain logs about the detection process.\n -a, --with-alternative\n Output complementary possibilities if any. Top-level\n JSON WILL be a list.\n -n, --normalize Permit to normalize input file. If not set, program\n does not write anything.\n -m, --minimal Only output the charset detected to STDOUT. Disabling\n JSON output.\n -r, --replace Replace file when trying to normalize it instead of\n creating a new one.\n -f, --force Replace file without asking if you are sure, use this\n flag with caution.\n -t THRESHOLD, --threshold THRESHOLD\n Define a custom maximum amount of chaos allowed in\n decoded content. 0. <= chaos <= 1.\n --version Show version information and exit.\n```\n\n```bash\nnormalizer ./data/sample.1.fr.srt\n```\n\nor\n\n```bash\npython -m charset_normalizer ./data/sample.1.fr.srt\n```\n\n\ud83c\udf89 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format.\n\n```json\n{\n \"path\": \"/home/default/projects/charset_normalizer/data/sample.1.fr.srt\",\n \"encoding\": \"cp1252\",\n \"encoding_aliases\": [\n \"1252\",\n \"windows_1252\"\n ],\n \"alternative_encodings\": [\n \"cp1254\",\n \"cp1256\",\n \"cp1258\",\n \"iso8859_14\",\n \"iso8859_15\",\n \"iso8859_16\",\n \"iso8859_3\",\n \"iso8859_9\",\n \"latin_1\",\n \"mbcs\"\n ],\n \"language\": \"French\",\n \"alphabets\": [\n \"Basic Latin\",\n \"Latin-1 Supplement\"\n ],\n \"has_sig_or_bom\": false,\n \"chaos\": 0.149,\n \"coherence\": 97.152,\n \"unicode_path\": null,\n \"is_preferred\": true\n}\n```\n\n### Python\n*Just print out normalized text*\n```python\nfrom charset_normalizer import from_path\n\nresults = from_path('./my_subtitle.srt')\n\nprint(str(results.best()))\n```\n\n*Upgrade your code without effort*\n```python\nfrom charset_normalizer import detect\n```\n\nThe above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible.\n\nSee the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/)\n\n## \ud83d\ude07 Why\n\nWhen I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a\nreliable alternative using a completely different method. Also! I never back down on a good challenge!\n\nI **don't care** about the **originating charset** encoding, because **two different tables** can\nproduce **two identical rendered string.**\nWhat I want is to get readable text, the best I can.\n\nIn a way, **I'm brute forcing text decoding.** How cool is that ? \ud83d\ude0e\n\nDon't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair Unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode.\n\n## \ud83c\udf70 How\n\n - Discard all charset encoding table that could not fit the binary content.\n - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding.\n - Extract matches with the lowest mess detected.\n - Additionally, we measure coherence / probe for a language.\n\n**Wait a minute**, what is noise/mess and coherence according to **YOU ?**\n\n*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then\n**I established** some ground rules about **what is obvious** when **it seems like** a mess (aka. defining noise in rendered text).\n I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to\n improve or rewrite it.\n\n*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought\nthat intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design.\n\n## \u26a1 Known limitations\n\n - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters))\n - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content.\n\n## \u26a0\ufe0f About Python EOLs\n\n**If you are running:**\n\n- Python >=2.7,<3.5: Unsupported\n- Python 3.5: charset-normalizer < 2.1\n- Python 3.6: charset-normalizer < 3.1\n- Python 3.7: charset-normalizer < 4.0\n\nUpgrade your Python interpreter as soon as possible.\n\n## \ud83d\udc64 Contributing\n\nContributions, issues and feature requests are very much welcome.
\nFeel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute.\n\n## \ud83d\udcdd License\n\nCopyright \u00a9 [Ahmed TAHRI @Ousret](https://github.com/Ousret).
\nThis project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed.\n\nCharacters frequencies used in this project \u00a9 2012 [Denny Vrande\u010di\u0107](http://simia.net/letters/)\n\n## \ud83d\udcbc For Enterprise\n\nProfessional support for charset-normalizer is available as part of the [Tidelift\nSubscription][1]. Tidelift gives software development teams a single source for\npurchasing and maintaining their software, with professional grade assurances\nfrom the experts who know it best, while seamlessly integrating with existing\ntools.\n\n[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme\n\n[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/7297/badge)](https://www.bestpractices.dev/projects/7297)\n\n# Changelog\nAll notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).\n\n## [3.4.2](https://github.com/Ousret/charset_normalizer/compare/3.4.1...3.4.2) (2025-05-02)\n\n### Fixed\n- Addressed the DeprecationWarning in our CLI regarding `argparse.FileType` by backporting the target class into the package. (#591)\n- Improved the overall reliability of the detector with CJK Ideographs. (#605) (#587)\n\n### Changed\n- Optional mypyc compilation upgraded to version 1.15 for Python >= 3.8\n\n## [3.4.1](https://github.com/Ousret/charset_normalizer/compare/3.4.0...3.4.1) (2024-12-24)\n\n### Changed\n- Project metadata are now stored using `pyproject.toml` instead of `setup.cfg` using setuptools as the build backend.\n- Enforce annotation delayed loading for a simpler and consistent types in the project.\n- Optional mypyc compilation upgraded to version 1.14 for Python >= 3.8\n\n### Added\n- pre-commit configuration.\n- noxfile.\n\n### Removed\n- `build-requirements.txt` as per using `pyproject.toml` native build configuration.\n- `bin/integration.py` and `bin/serve.py` in favor of downstream integration test (see noxfile).\n- `setup.cfg` in favor of `pyproject.toml` metadata configuration.\n- Unused `utils.range_scan` function.\n\n### Fixed\n- Converting content to Unicode bytes may insert `utf_8` instead of preferred `utf-8`. (#572)\n- Deprecation warning \"'count' is passed as positional argument\" when converting to Unicode bytes on Python 3.13+\n\n## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08)\n\n### Added\n- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints.\n- Support for Python 3.13 (#512)\n\n### Fixed\n- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch.\n- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537)\n- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381)\n\n## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31)\n\n### Fixed\n- Unintentional memory usage regression when using large payload that match several encoding (#376)\n- Regression on some detection case showcased in the documentation (#371)\n\n### Added\n- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife)\n\n## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22)\n\n### Changed\n- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8\n- Improved the general detection reliability based on reports from the community\n\n## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30)\n\n### Added\n- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer`\n- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323)\n\n### Removed\n- (internal) Redundant utils.is_ascii function and unused function is_private_use_only\n- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant\n\n### Changed\n- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection\n- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8\n\n### Fixed\n- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \\_\\_lt\\_\\_ (#350)\n\n## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07)\n\n### Changed\n- Typehint for function `from_path` no longer enforce `PathLike` as its first argument\n- Minor improvement over the global detection reliability\n\n### Added\n- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries\n- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True)\n- Explicit support for Python 3.12\n\n### Fixed\n- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289)\n\n## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06)\n\n### Added\n- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262)\n\n### Removed\n- Support for Python 3.6 (PR #260)\n\n### Changed\n- Optional speedup provided by mypy/c 1.0.1\n\n## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18)\n\n### Fixed\n- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233)\n\n### Changed\n- Speedup provided by mypy/c 0.990 on Python >= 3.7\n\n## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20)\n\n### Added\n- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results\n- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES\n- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio\n- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)\n\n### Changed\n- Build with static metadata using 'build' frontend\n- Make the language detection stricter\n- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1\n\n### Fixed\n- CLI with opt --normalize fail when using full path for files\n- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it\n- Sphinx warnings when generating the documentation\n\n### Removed\n- Coherence detector no longer return 'Simple English' instead return 'English'\n- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'\n- Breaking: Method `first()` and `best()` from CharsetMatch\n- UTF-7 will no longer appear as \"detected\" without a recognized SIG/mark (is unreliable/conflict with ASCII)\n- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches\n- Breaking: Top-level function `normalize`\n- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch\n- Support for the backport `unicodedata2`\n\n## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18)\n\n### Added\n- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results\n- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES\n- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio\n\n### Changed\n- Build with static metadata using 'build' frontend\n- Make the language detection stricter\n\n### Fixed\n- CLI with opt --normalize fail when using full path for files\n- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it\n\n### Removed\n- Coherence detector no longer return 'Simple English' instead return 'English'\n- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'\n\n## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21)\n\n### Added\n- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)\n\n### Removed\n- Breaking: Method `first()` and `best()` from CharsetMatch\n- UTF-7 will no longer appear as \"detected\" without a recognized SIG/mark (is unreliable/conflict with ASCII)\n\n### Fixed\n- Sphinx warnings when generating the documentation\n\n## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15)\n\n### Changed\n- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1\n\n### Removed\n- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches\n- Breaking: Top-level function `normalize`\n- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch\n- Support for the backport `unicodedata2`\n\n## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19)\n\n### Deprecated\n- Function `normalize` scheduled for removal in 3.0\n\n### Changed\n- Removed useless call to decode in fn is_unprintable (#206)\n\n### Fixed\n- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204)\n\n## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19)\n\n### Added\n- Output the Unicode table version when running the CLI with `--version` (PR #194)\n\n### Changed\n- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175)\n- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183)\n\n### Fixed\n- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175)\n- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181)\n\n### Removed\n- Support for Python 3.5 (PR #192)\n\n### Deprecated\n- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194)\n\n## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12)\n\n### Fixed\n- ASCII miss-detection on rare cases (PR #170)\n\n## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30)\n\n### Added\n- Explicit support for Python 3.11 (PR #164)\n\n### Changed\n- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165)\n\n## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04)\n\n### Fixed\n- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154)\n\n### Changed\n- Skipping the language-detection (CD) on ASCII (PR #155)\n\n## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03)\n\n### Changed\n- Moderating the logging impact (since 2.0.8) for specific environments (PR #147)\n\n### Fixed\n- Wrong logging level applied when setting kwarg `explain` to True (PR #146)\n\n## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24)\n### Changed\n- Improvement over Vietnamese detection (PR #126)\n- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124)\n- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122)\n- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129)\n- Code style as refactored by Sourcery-AI (PR #131)\n- Minor adjustment on the MD around european words (PR #133)\n- Remove and replace SRTs from assets / tests (PR #139)\n- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135)\n- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135)\n\n### Fixed\n- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137)\n- Avoid using too insignificant chunk (PR #137)\n\n### Added\n- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135)\n- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141)\n\n## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11)\n### Added\n- Add support for Kazakh (Cyrillic) language detection (PR #109)\n\n### Changed\n- Further, improve inferring the language from a given single-byte code page (PR #112)\n- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116)\n- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113)\n- Various detection improvement (MD+CD) (PR #117)\n\n### Removed\n- Remove redundant logging entry about detected language(s) (PR #115)\n\n### Fixed\n- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102)\n\n## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18)\n### Fixed\n- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100)\n- Fix CLI crash when using --minimal output in certain cases (PR #103)\n\n### Changed\n- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101)\n\n## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14)\n### Changed\n- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81)\n- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82)\n- The Unicode detection is slightly improved (PR #93)\n- Add syntax sugar \\_\\_bool\\_\\_ for results CharsetMatches list-container (PR #91)\n\n### Removed\n- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92)\n\n### Fixed\n- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95)\n- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96)\n- The MANIFEST.in was not exhaustive (PR #78)\n\n## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30)\n### Fixed\n- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70)\n- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68)\n- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72)\n- Submatch factoring could be wrong in rare edge cases (PR #72)\n- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72)\n- Fix line endings from CRLF to LF for certain project files (PR #67)\n\n### Changed\n- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76)\n- Allow fallback on specified encoding if any (PR #71)\n\n## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16)\n### Changed\n- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63)\n- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64)\n\n## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15)\n### Fixed\n- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59)\n\n### Changed\n- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57)\n\n## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13)\n### Fixed\n- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55)\n- Using explain=False permanently disable the verbose output in the current runtime (PR #47)\n- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47)\n- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52)\n\n### Changed\n- Public function normalize default args values were not aligned with from_bytes (PR #53)\n\n### Added\n- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47)\n\n## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02)\n### Changed\n- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet.\n- Accent has been made on UTF-8 detection, should perform rather instantaneous.\n- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible.\n- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time)\n- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+\n- utf_7 detection has been reinstated.\n\n### Removed\n- This package no longer require anything when used with Python 3.5 (Dropped cached_property)\n- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volap\u00fck, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian.\n- The exception hook on UnicodeDecodeError has been removed.\n\n### Deprecated\n- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0\n\n### Fixed\n- The CLI output used the relative path of the file(s). Should be absolute.\n\n## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28)\n### Fixed\n- Logger configuration/usage no longer conflict with others (PR #44)\n\n## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21)\n### Removed\n- Using standard logging instead of using the package loguru.\n- Dropping nose test framework in favor of the maintained pytest.\n- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text.\n- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version.\n- Stop support for UTF-7 that does not contain a SIG.\n- Dropping PrettyTable, replaced with pure JSON output in CLI.\n\n### Fixed\n- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process.\n- Not searching properly for the BOM when trying utf32/16 parent codec.\n\n### Changed\n- Improving the package final size by compressing frequencies.json.\n- Huge improvement over the larges payload.\n\n### Added\n- CLI now produces JSON consumable output.\n- Return ASCII if given sequences fit. Given reasonable confidence.\n\n## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13)\n\n### Fixed\n- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40)\n\n## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12)\n\n### Fixed\n- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39)\n\n## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12)\n\n### Fixed\n- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38)\n\n## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09)\n\n### Changed\n- Amend the previous release to allow prettytable 2.0 (PR #35)\n\n## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08)\n\n### Fixed\n- Fix error while using the package with a python pre-release interpreter (PR #33)\n\n### Changed\n- Dependencies refactoring, constraints revised.\n\n### Added\n- Add python 3.9 and 3.10 to the supported interpreters\n\nMIT License\n\nCopyright (c) 2025 TAHRI Ahmed R.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.", - "release_date": "2025-05-02T08:31:56", + "description": "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.\n

Charset Detection, for Everyone \ud83d\udc4b

\n\n

\n The Real First Universal Charset Detector
\n \n \n \n \n \"Download\n \n \n \n \n

\n

\n Featured Packages
\n \n \"Static\n \n \n \"Static\n \n

\n

\n In other language (unofficial port - by the community)
\n \n \"Static\n \n

\n\n> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`,\n> I'm trying to resolve the issue by taking a new approach.\n> All IANA character set names for which the Python core library provides codecs are supported.\n\n

\n >>>>> \ud83d\udc49 Try Me Online Now, Then Adopt Me \ud83d\udc48 <<<<<\n

\n\nThis project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**.\n\n| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) |\n|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:|\n| `Fast` | \u274c | \u2705 | \u2705 |\n| `Universal**` | \u274c | \u2705 | \u274c |\n| `Reliable` **without** distinguishable standards | \u274c | \u2705 | \u2705 |\n| `Reliable` **with** distinguishable standards | \u2705 | \u2705 | \u2705 |\n| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ |\n| `Native Python` | \u2705 | \u2705 | \u274c |\n| `Detect spoken language` | \u274c | \u2705 | N/A |\n| `UnicodeDecodeError Safety` | \u274c | \u2705 | \u274c |\n| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB |\n| `Supported Encoding` | 33 | \ud83c\udf89 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 |\n\n

\n\"Reading\"Cat\n

\n\n*\\*\\* : They are clearly using specific code for a specific encoding even if covering most of used one*
\n\n## \u26a1 Performance\n\nThis package offer better performance than its counterpart Chardet. Here are some numbers.\n\n| Package | Accuracy | Mean per file (ms) | File per sec (est) |\n|-----------------------------------------------|:--------:|:------------------:|:------------------:|\n| [chardet](https://github.com/chardet/chardet) | 86 % | 63 ms | 16 file/sec |\n| charset-normalizer | **98 %** | **10 ms** | 100 file/sec |\n\n| Package | 99th percentile | 95th percentile | 50th percentile |\n|-----------------------------------------------|:---------------:|:---------------:|:---------------:|\n| [chardet](https://github.com/chardet/chardet) | 265 ms | 71 ms | 7 ms |\n| charset-normalizer | 100 ms | 50 ms | 5 ms |\n\n_updated as of december 2024 using CPython 3.12_\n\nChardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload.\n\n> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows.\n> And yes, these results might change at any time. The dataset can be updated to include more files.\n> The actual delays heavily depends on your CPU capabilities. The factors should remain the same.\n> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability\n> (e.g. Supported Encoding) Challenge-them if you want.\n\n## \u2728 Installation\n\nUsing pip:\n\n```sh\npip install charset-normalizer -U\n```\n\n## \ud83d\ude80 Basic Usage\n\n### CLI\nThis package comes with a CLI.\n\n```\nusage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD]\n file [file ...]\n\nThe Real First Universal Charset Detector. Discover originating encoding used\non text file. Normalize text to unicode.\n\npositional arguments:\n files File(s) to be analysed\n\noptional arguments:\n -h, --help show this help message and exit\n -v, --verbose Display complementary information about file if any.\n Stdout will contain logs about the detection process.\n -a, --with-alternative\n Output complementary possibilities if any. Top-level\n JSON WILL be a list.\n -n, --normalize Permit to normalize input file. If not set, program\n does not write anything.\n -m, --minimal Only output the charset detected to STDOUT. Disabling\n JSON output.\n -r, --replace Replace file when trying to normalize it instead of\n creating a new one.\n -f, --force Replace file without asking if you are sure, use this\n flag with caution.\n -t THRESHOLD, --threshold THRESHOLD\n Define a custom maximum amount of chaos allowed in\n decoded content. 0. <= chaos <= 1.\n --version Show version information and exit.\n```\n\n```bash\nnormalizer ./data/sample.1.fr.srt\n```\n\nor\n\n```bash\npython -m charset_normalizer ./data/sample.1.fr.srt\n```\n\n\ud83c\udf89 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format.\n\n```json\n{\n \"path\": \"/home/default/projects/charset_normalizer/data/sample.1.fr.srt\",\n \"encoding\": \"cp1252\",\n \"encoding_aliases\": [\n \"1252\",\n \"windows_1252\"\n ],\n \"alternative_encodings\": [\n \"cp1254\",\n \"cp1256\",\n \"cp1258\",\n \"iso8859_14\",\n \"iso8859_15\",\n \"iso8859_16\",\n \"iso8859_3\",\n \"iso8859_9\",\n \"latin_1\",\n \"mbcs\"\n ],\n \"language\": \"French\",\n \"alphabets\": [\n \"Basic Latin\",\n \"Latin-1 Supplement\"\n ],\n \"has_sig_or_bom\": false,\n \"chaos\": 0.149,\n \"coherence\": 97.152,\n \"unicode_path\": null,\n \"is_preferred\": true\n}\n```\n\n### Python\n*Just print out normalized text*\n```python\nfrom charset_normalizer import from_path\n\nresults = from_path('./my_subtitle.srt')\n\nprint(str(results.best()))\n```\n\n*Upgrade your code without effort*\n```python\nfrom charset_normalizer import detect\n```\n\nThe above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible.\n\nSee the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/)\n\n## \ud83d\ude07 Why\n\nWhen I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a\nreliable alternative using a completely different method. Also! I never back down on a good challenge!\n\nI **don't care** about the **originating charset** encoding, because **two different tables** can\nproduce **two identical rendered string.**\nWhat I want is to get readable text, the best I can.\n\nIn a way, **I'm brute forcing text decoding.** How cool is that ? \ud83d\ude0e\n\nDon't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair Unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode.\n\n## \ud83c\udf70 How\n\n - Discard all charset encoding table that could not fit the binary content.\n - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding.\n - Extract matches with the lowest mess detected.\n - Additionally, we measure coherence / probe for a language.\n\n**Wait a minute**, what is noise/mess and coherence according to **YOU ?**\n\n*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then\n**I established** some ground rules about **what is obvious** when **it seems like** a mess (aka. defining noise in rendered text).\n I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to\n improve or rewrite it.\n\n*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought\nthat intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design.\n\n## \u26a1 Known limitations\n\n - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters))\n - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content.\n\n## \u26a0\ufe0f About Python EOLs\n\n**If you are running:**\n\n- Python >=2.7,<3.5: Unsupported\n- Python 3.5: charset-normalizer < 2.1\n- Python 3.6: charset-normalizer < 3.1\n- Python 3.7: charset-normalizer < 4.0\n\nUpgrade your Python interpreter as soon as possible.\n\n## \ud83d\udc64 Contributing\n\nContributions, issues and feature requests are very much welcome.
\nFeel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute.\n\n## \ud83d\udcdd License\n\nCopyright \u00a9 [Ahmed TAHRI @Ousret](https://github.com/Ousret).
\nThis project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed.\n\nCharacters frequencies used in this project \u00a9 2012 [Denny Vrande\u010di\u0107](http://simia.net/letters/)\n\n## \ud83d\udcbc For Enterprise\n\nProfessional support for charset-normalizer is available as part of the [Tidelift\nSubscription][1]. Tidelift gives software development teams a single source for\npurchasing and maintaining their software, with professional grade assurances\nfrom the experts who know it best, while seamlessly integrating with existing\ntools.\n\n[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme\n\n[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/7297/badge)](https://www.bestpractices.dev/projects/7297)\n\n# Changelog\nAll notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).\n\n## [3.4.3](https://github.com/Ousret/charset_normalizer/compare/3.4.2...3.4.3) (2025-08-09)\n\n### Changed\n- mypy(c) is no longer a required dependency at build time if `CHARSET_NORMALIZER_USE_MYPYC` isn't set to `1`. (#595) (#583)\n- automatically lower confidence on small bytes samples that are not Unicode in `detect` output legacy function. (#391)\n\n### Added\n- Custom build backend to overcome inability to mark mypy as an optional dependency in the build phase.\n- Support for Python 3.14\n\n### Fixed\n- sdist archive contained useless directories.\n- automatically fallback on valid UTF-16 or UTF-32 even if the md says it's noisy. (#633)\n\n### Misc\n- SBOM are automatically published to the relevant GitHub release to comply with regulatory changes.\n Each published wheel comes with its SBOM. We choose CycloneDX as the format.\n- Prebuilt optimized wheel are no longer distributed by default for CPython 3.7 due to a change in cibuildwheel.\n\n## [3.4.2](https://github.com/Ousret/charset_normalizer/compare/3.4.1...3.4.2) (2025-05-02)\n\n### Fixed\n- Addressed the DeprecationWarning in our CLI regarding `argparse.FileType` by backporting the target class into the package. (#591)\n- Improved the overall reliability of the detector with CJK Ideographs. (#605) (#587)\n\n### Changed\n- Optional mypyc compilation upgraded to version 1.15 for Python >= 3.8\n\n## [3.4.1](https://github.com/Ousret/charset_normalizer/compare/3.4.0...3.4.1) (2024-12-24)\n\n### Changed\n- Project metadata are now stored using `pyproject.toml` instead of `setup.cfg` using setuptools as the build backend.\n- Enforce annotation delayed loading for a simpler and consistent types in the project.\n- Optional mypyc compilation upgraded to version 1.14 for Python >= 3.8\n\n### Added\n- pre-commit configuration.\n- noxfile.\n\n### Removed\n- `build-requirements.txt` as per using `pyproject.toml` native build configuration.\n- `bin/integration.py` and `bin/serve.py` in favor of downstream integration test (see noxfile).\n- `setup.cfg` in favor of `pyproject.toml` metadata configuration.\n- Unused `utils.range_scan` function.\n\n### Fixed\n- Converting content to Unicode bytes may insert `utf_8` instead of preferred `utf-8`. (#572)\n- Deprecation warning \"'count' is passed as positional argument\" when converting to Unicode bytes on Python 3.13+\n\n## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08)\n\n### Added\n- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints.\n- Support for Python 3.13 (#512)\n\n### Fixed\n- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch.\n- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537)\n- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381)\n\n## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31)\n\n### Fixed\n- Unintentional memory usage regression when using large payload that match several encoding (#376)\n- Regression on some detection case showcased in the documentation (#371)\n\n### Added\n- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife)\n\n## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22)\n\n### Changed\n- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8\n- Improved the general detection reliability based on reports from the community\n\n## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30)\n\n### Added\n- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer`\n- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323)\n\n### Removed\n- (internal) Redundant utils.is_ascii function and unused function is_private_use_only\n- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant\n\n### Changed\n- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection\n- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8\n\n### Fixed\n- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \\_\\_lt\\_\\_ (#350)\n\n## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07)\n\n### Changed\n- Typehint for function `from_path` no longer enforce `PathLike` as its first argument\n- Minor improvement over the global detection reliability\n\n### Added\n- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries\n- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True)\n- Explicit support for Python 3.12\n\n### Fixed\n- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289)\n\n## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06)\n\n### Added\n- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262)\n\n### Removed\n- Support for Python 3.6 (PR #260)\n\n### Changed\n- Optional speedup provided by mypy/c 1.0.1\n\n## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18)\n\n### Fixed\n- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233)\n\n### Changed\n- Speedup provided by mypy/c 0.990 on Python >= 3.7\n\n## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20)\n\n### Added\n- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results\n- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES\n- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio\n- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)\n\n### Changed\n- Build with static metadata using 'build' frontend\n- Make the language detection stricter\n- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1\n\n### Fixed\n- CLI with opt --normalize fail when using full path for files\n- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it\n- Sphinx warnings when generating the documentation\n\n### Removed\n- Coherence detector no longer return 'Simple English' instead return 'English'\n- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'\n- Breaking: Method `first()` and `best()` from CharsetMatch\n- UTF-7 will no longer appear as \"detected\" without a recognized SIG/mark (is unreliable/conflict with ASCII)\n- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches\n- Breaking: Top-level function `normalize`\n- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch\n- Support for the backport `unicodedata2`\n\n## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18)\n\n### Added\n- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results\n- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES\n- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio\n\n### Changed\n- Build with static metadata using 'build' frontend\n- Make the language detection stricter\n\n### Fixed\n- CLI with opt --normalize fail when using full path for files\n- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it\n\n### Removed\n- Coherence detector no longer return 'Simple English' instead return 'English'\n- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'\n\n## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21)\n\n### Added\n- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)\n\n### Removed\n- Breaking: Method `first()` and `best()` from CharsetMatch\n- UTF-7 will no longer appear as \"detected\" without a recognized SIG/mark (is unreliable/conflict with ASCII)\n\n### Fixed\n- Sphinx warnings when generating the documentation\n\n## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15)\n\n### Changed\n- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1\n\n### Removed\n- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches\n- Breaking: Top-level function `normalize`\n- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch\n- Support for the backport `unicodedata2`\n\n## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19)\n\n### Deprecated\n- Function `normalize` scheduled for removal in 3.0\n\n### Changed\n- Removed useless call to decode in fn is_unprintable (#206)\n\n### Fixed\n- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204)\n\n## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19)\n\n### Added\n- Output the Unicode table version when running the CLI with `--version` (PR #194)\n\n### Changed\n- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175)\n- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183)\n\n### Fixed\n- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175)\n- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181)\n\n### Removed\n- Support for Python 3.5 (PR #192)\n\n### Deprecated\n- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194)\n\n## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12)\n\n### Fixed\n- ASCII miss-detection on rare cases (PR #170)\n\n## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30)\n\n### Added\n- Explicit support for Python 3.11 (PR #164)\n\n### Changed\n- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165)\n\n## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04)\n\n### Fixed\n- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154)\n\n### Changed\n- Skipping the language-detection (CD) on ASCII (PR #155)\n\n## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03)\n\n### Changed\n- Moderating the logging impact (since 2.0.8) for specific environments (PR #147)\n\n### Fixed\n- Wrong logging level applied when setting kwarg `explain` to True (PR #146)\n\n## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24)\n### Changed\n- Improvement over Vietnamese detection (PR #126)\n- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124)\n- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122)\n- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129)\n- Code style as refactored by Sourcery-AI (PR #131)\n- Minor adjustment on the MD around european words (PR #133)\n- Remove and replace SRTs from assets / tests (PR #139)\n- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135)\n- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135)\n\n### Fixed\n- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137)\n- Avoid using too insignificant chunk (PR #137)\n\n### Added\n- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135)\n- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141)\n\n## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11)\n### Added\n- Add support for Kazakh (Cyrillic) language detection (PR #109)\n\n### Changed\n- Further, improve inferring the language from a given single-byte code page (PR #112)\n- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116)\n- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113)\n- Various detection improvement (MD+CD) (PR #117)\n\n### Removed\n- Remove redundant logging entry about detected language(s) (PR #115)\n\n### Fixed\n- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102)\n\n## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18)\n### Fixed\n- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100)\n- Fix CLI crash when using --minimal output in certain cases (PR #103)\n\n### Changed\n- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101)\n\n## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14)\n### Changed\n- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81)\n- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82)\n- The Unicode detection is slightly improved (PR #93)\n- Add syntax sugar \\_\\_bool\\_\\_ for results CharsetMatches list-container (PR #91)\n\n### Removed\n- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92)\n\n### Fixed\n- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95)\n- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96)\n- The MANIFEST.in was not exhaustive (PR #78)\n\n## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30)\n### Fixed\n- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70)\n- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68)\n- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72)\n- Submatch factoring could be wrong in rare edge cases (PR #72)\n- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72)\n- Fix line endings from CRLF to LF for certain project files (PR #67)\n\n### Changed\n- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76)\n- Allow fallback on specified encoding if any (PR #71)\n\n## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16)\n### Changed\n- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63)\n- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64)\n\n## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15)\n### Fixed\n- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59)\n\n### Changed\n- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57)\n\n## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13)\n### Fixed\n- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55)\n- Using explain=False permanently disable the verbose output in the current runtime (PR #47)\n- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47)\n- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52)\n\n### Changed\n- Public function normalize default args values were not aligned with from_bytes (PR #53)\n\n### Added\n- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47)\n\n## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02)\n### Changed\n- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet.\n- Accent has been made on UTF-8 detection, should perform rather instantaneous.\n- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible.\n- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time)\n- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+\n- utf_7 detection has been reinstated.\n\n### Removed\n- This package no longer require anything when used with Python 3.5 (Dropped cached_property)\n- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volap\u00fck, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian.\n- The exception hook on UnicodeDecodeError has been removed.\n\n### Deprecated\n- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0\n\n### Fixed\n- The CLI output used the relative path of the file(s). Should be absolute.\n\n## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28)\n### Fixed\n- Logger configuration/usage no longer conflict with others (PR #44)\n\n## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21)\n### Removed\n- Using standard logging instead of using the package loguru.\n- Dropping nose test framework in favor of the maintained pytest.\n- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text.\n- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version.\n- Stop support for UTF-7 that does not contain a SIG.\n- Dropping PrettyTable, replaced with pure JSON output in CLI.\n\n### Fixed\n- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process.\n- Not searching properly for the BOM when trying utf32/16 parent codec.\n\n### Changed\n- Improving the package final size by compressing frequencies.json.\n- Huge improvement over the larges payload.\n\n### Added\n- CLI now produces JSON consumable output.\n- Return ASCII if given sequences fit. Given reasonable confidence.\n\n## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13)\n\n### Fixed\n- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40)\n\n## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12)\n\n### Fixed\n- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39)\n\n## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12)\n\n### Fixed\n- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38)\n\n## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09)\n\n### Changed\n- Amend the previous release to allow prettytable 2.0 (PR #35)\n\n## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08)\n\n### Fixed\n- Fix error while using the package with a python pre-release interpreter (PR #33)\n\n### Changed\n- Dependencies refactoring, constraints revised.\n\n### Added\n- Add python 3.9 and 3.10 to the supported interpreters\n\nMIT License\n\nCopyright (c) 2025 TAHRI Ahmed R.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.", + "release_date": "2025-08-09T07:57:26", "parties": [ { "type": "person", @@ -480,6 +482,7 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -490,11 +493,11 @@ "Typing :: Typed" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "size": 149471, + "download_url": "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", + "size": 53175, "sha1": null, - "md5": "8e85a262cbd63cdd5f2a3d1304587a45", - "sha256": "8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", + "md5": "4a43811bb5747201dc3694e76763e446", + "sha256": "ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/jawah/charset_normalizer", @@ -502,10 +505,7 @@ "copyright": null, "license_expression": null, "declared_license": { - "license": "MIT", - "classifiers": [ - "License :: OSI Approved :: MIT License" - ] + "license": "MIT" }, "notice_text": null, "source_packages": [], @@ -514,20 +514,20 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/charset-normalizer/3.4.2/json", + "api_data_url": "https://pypi.org/pypi/charset-normalizer/3.4.3/json", "datasource_id": null, - "purl": "pkg:pypi/charset-normalizer@3.4.2" + "purl": "pkg:pypi/charset-normalizer@3.4.3" }, { "type": "pypi", "namespace": null, "name": "click", - "version": "8.2.1", + "version": "8.2.2", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Composable command line interface toolkit\n# $ click_\n\nClick is a Python package for creating beautiful command line interfaces\nin a composable way with as little code as necessary. It's the \"Command\nLine Interface Creation Kit\". It's highly configurable but comes with\nsensible defaults out of the box.\n\nIt aims to make the process of writing command line tools quick and fun\nwhile also preventing any frustration caused by the inability to\nimplement an intended CLI API.\n\nClick in three points:\n\n- Arbitrary nesting of commands\n- Automatic help page generation\n- Supports lazy loading of subcommands at runtime\n\n\n## A Simple Example\n\n```python\nimport click\n\n@click.command()\n@click.option(\"--count\", default=1, help=\"Number of greetings.\")\n@click.option(\"--name\", prompt=\"Your name\", help=\"The person to greet.\")\ndef hello(count, name):\n \"\"\"Simple program that greets NAME for a total of COUNT times.\"\"\"\n for _ in range(count):\n click.echo(f\"Hello, {name}!\")\n\nif __name__ == '__main__':\n hello()\n```\n\n```\n$ python hello.py --count=3\nYour name: Click\nHello, Click!\nHello, Click!\nHello, Click!\n```\n\n\n## Donate\n\nThe Pallets organization develops and supports Click and other popular\npackages. In order to grow the community of contributors and users, and\nallow the maintainers to devote more time to the projects, [please\ndonate today][].\n\n[please donate today]: https://palletsprojects.com/donate\n\n## Contributing\n\nSee our [detailed contributing documentation][contrib] for many ways to\ncontribute, including reporting issues, requesting features, asking or answering\nquestions, and making PRs.\n\n[contrib]: https://palletsprojects.com/contributing/", - "release_date": "2025-05-20T23:19:47", + "description": "Composable command line interface toolkit\n
\"\"
\n\n# Click\n\nClick is a Python package for creating beautiful command line interfaces\nin a composable way with as little code as necessary. It's the \"Command\nLine Interface Creation Kit\". It's highly configurable but comes with\nsensible defaults out of the box.\n\nIt aims to make the process of writing command line tools quick and fun\nwhile also preventing any frustration caused by the inability to\nimplement an intended CLI API.\n\nClick in three points:\n\n- Arbitrary nesting of commands\n- Automatic help page generation\n- Supports lazy loading of subcommands at runtime\n\n\n## A Simple Example\n\n```python\nimport click\n\n@click.command()\n@click.option(\"--count\", default=1, help=\"Number of greetings.\")\n@click.option(\"--name\", prompt=\"Your name\", help=\"The person to greet.\")\ndef hello(count, name):\n \"\"\"Simple program that greets NAME for a total of COUNT times.\"\"\"\n for _ in range(count):\n click.echo(f\"Hello, {name}!\")\n\nif __name__ == '__main__':\n hello()\n```\n\n```\n$ python hello.py --count=3\nYour name: Click\nHello, Click!\nHello, Click!\nHello, Click!\n```\n\n\n## Donate\n\nThe Pallets organization develops and supports Click and other popular\npackages. In order to grow the community of contributors and users, and\nallow the maintainers to devote more time to the projects, [please\ndonate today][].\n\n[please donate today]: https://palletsprojects.com/donate\n\n## Contributing\n\nSee our [detailed contributing documentation][contrib] for many ways to\ncontribute, including reporting issues, requesting features, asking or answering\nquestions, and making PRs.\n\n[contrib]: https://palletsprojects.com/contributing/", + "release_date": "2025-08-02T02:23:39", "parties": [ { "type": "person", @@ -545,11 +545,11 @@ "Typing :: Typed" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", - "size": 102215, + "download_url": "https://files.pythonhosted.org/packages/ec/85/e7297e34133ae1cfde3bffd30c24e1ef055248251baa877834e048687a28/click-8.2.2-py3-none-any.whl", + "size": 103900, "sha1": null, - "md5": "aeead16d8bed93caa7107ac87b1e5ec8", - "sha256": "61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", + "md5": "7d180e1baded1a50d5ad31b43a965888", + "sha256": "52e1e9f5d3db8c85aa76968c7c67ed41ddbacb167f43201511c8fd61eb5ba2ca", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/pallets/click/", @@ -564,25 +564,25 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/click/8.2.1/json", + "api_data_url": "https://pypi.org/pypi/click/8.2.2/json", "datasource_id": null, - "purl": "pkg:pypi/click@8.2.1" + "purl": "pkg:pypi/click@8.2.2" }, { "type": "pypi", "namespace": null, "name": "cryptography", - "version": "45.0.5", + "version": "46.0.1", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "cryptography is a package which provides cryptographic recipes and primitives to Python developers.\npyca/cryptography\n=================\n\n.. image:: https://img.shields.io/pypi/v/cryptography.svg\n :target: https://pypi.org/project/cryptography/\n :alt: Latest Version\n\n.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest\n :target: https://cryptography.io\n :alt: Latest Docs\n\n.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=main\n :target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amain\n\n\n``cryptography`` is a package which provides cryptographic recipes and\nprimitives to Python developers. Our goal is for it to be your \"cryptographic\nstandard library\". It supports Python 3.7+ and PyPy3 7.3.11+.\n\n``cryptography`` includes both high level recipes and low level interfaces to\ncommon cryptographic algorithms such as symmetric ciphers, message digests, and\nkey derivation functions. For example, to encrypt something with\n``cryptography``'s high level symmetric encryption recipe:\n\n.. code-block:: pycon\n\n >>> from cryptography.fernet import Fernet\n >>> # Put this somewhere safe!\n >>> key = Fernet.generate_key()\n >>> f = Fernet(key)\n >>> token = f.encrypt(b\"A really secret message. Not for prying eyes.\")\n >>> token\n b'...'\n >>> f.decrypt(token)\n b'A really secret message. Not for prying eyes.'\n\nYou can find more information in the `documentation`_.\n\nYou can install ``cryptography`` with:\n\n.. code-block:: console\n\n $ pip install cryptography\n\nFor full details see `the installation documentation`_.\n\nDiscussion\n~~~~~~~~~~\n\nIf you run into bugs, you can file them in our `issue tracker`_.\n\nWe maintain a `cryptography-dev`_ mailing list for development discussion.\n\nYou can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get\ninvolved.\n\nSecurity\n~~~~~~~~\n\nNeed to report a security issue? Please consult our `security reporting`_\ndocumentation.\n\n\n.. _`documentation`: https://cryptography.io/\n.. _`the installation documentation`: https://cryptography.io/en/latest/installation/\n.. _`issue tracker`: https://github.com/pyca/cryptography/issues\n.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev\n.. _`security reporting`: https://cryptography.io/en/latest/security/", - "release_date": "2025-07-02T13:05:46", + "description": "cryptography is a package which provides cryptographic recipes and primitives to Python developers.\npyca/cryptography\n=================\n\n.. image:: https://img.shields.io/pypi/v/cryptography.svg\n :target: https://pypi.org/project/cryptography/\n :alt: Latest Version\n\n.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest\n :target: https://cryptography.io\n :alt: Latest Docs\n\n.. image:: https://github.com/pyca/cryptography/actions/workflows/ci.yml/badge.svg\n :target: https://github.com/pyca/cryptography/actions/workflows/ci.yml?query=branch%3Amain\n\n``cryptography`` is a package which provides cryptographic recipes and\nprimitives to Python developers. Our goal is for it to be your \"cryptographic\nstandard library\". It supports Python 3.8+ and PyPy3 7.3.11+.\n\n``cryptography`` includes both high level recipes and low level interfaces to\ncommon cryptographic algorithms such as symmetric ciphers, message digests, and\nkey derivation functions. For example, to encrypt something with\n``cryptography``'s high level symmetric encryption recipe:\n\n.. code-block:: pycon\n\n >>> from cryptography.fernet import Fernet\n >>> # Put this somewhere safe!\n >>> key = Fernet.generate_key()\n >>> f = Fernet(key)\n >>> token = f.encrypt(b\"A really secret message. Not for prying eyes.\")\n >>> token\n b'...'\n >>> f.decrypt(token)\n b'A really secret message. Not for prying eyes.'\n\nYou can find more information in the `documentation`_.\n\nYou can install ``cryptography`` with:\n\n.. code-block:: console\n\n $ pip install cryptography\n\nFor full details see `the installation documentation`_.\n\nDiscussion\n~~~~~~~~~~\n\nIf you run into bugs, you can file them in our `issue tracker`_.\n\nWe maintain a `cryptography-dev`_ mailing list for development discussion.\n\nYou can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get\ninvolved.\n\nSecurity\n~~~~~~~~\n\nNeed to report a security issue? Please consult our `security reporting`_\ndocumentation.\n\n\n.. _`documentation`: https://cryptography.io/\n.. _`the installation documentation`: https://cryptography.io/en/latest/installation/\n.. _`issue tracker`: https://github.com/pyca/cryptography/issues\n.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev\n.. _`security reporting`: https://cryptography.io/en/latest/security/", + "release_date": "2025-09-17T00:09:47", "parties": [ { "type": "person", "role": "author", - "name": "The cryptography developers ", + "name": null, "email": "The Python Cryptographic Authority and individual contributors ", "url": null } @@ -603,28 +603,27 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: Free Threading :: 3 - Stable", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Security :: Cryptography" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/f2/2e/e2dbd629481b499b14516eed933f3276eb3239f7cee2dcfa4ee6b44d4711/cryptography-45.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", - "size": 4554189, + "download_url": "https://files.pythonhosted.org/packages/a2/67/65dc233c1ddd688073cf7b136b06ff4b84bf517ba5529607c9d79720fc67/cryptography-46.0.1-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", + "size": 4562369, "sha1": null, - "md5": "e60dd7bf09e038a4508efcef2fc28cd5", - "sha256": "7259038202a47fdecee7e62e0fd0b0738b6daa335354396c6ddebdbe1206af2a", + "md5": "329af3462aa6c5e199ab4681bf0be5ba", + "sha256": "341fb7a26bc9d6093c1b124b9f13acc283d2d51da440b98b55ab3f79f2522ead", "sha512": null, "bug_tracking_url": null, "code_view_url": null, "vcs_url": null, "copyright": null, - "license_expression": null, - "declared_license": { - "license": "Apache-2.0 OR BSD-3-Clause" - }, + "license_expression": "Apache-2.0 OR BSD-3-Clause", + "declared_license": {}, "notice_text": null, "source_packages": [], "file_references": [], @@ -632,9 +631,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/cryptography/45.0.5/json", + "api_data_url": "https://pypi.org/pypi/cryptography/46.0.1/json", "datasource_id": null, - "purl": "pkg:pypi/cryptography@45.0.5" + "purl": "pkg:pypi/cryptography@46.0.1" }, { "type": "pypi", @@ -908,12 +907,12 @@ "type": "pypi", "namespace": null, "name": "pycparser", - "version": "2.22", + "version": "2.23", "qualifiers": {}, "subpath": null, "primary_language": "Python", "description": "C parser in Python\npycparser is a complete parser of the C language, written in\npure Python using the PLY parsing library.\nIt parses C code into an AST and can serve as a front-end for\nC compilers or analysis tools.", - "release_date": "2024-03-30T13:22:20", + "release_date": "2025-09-09T13:23:46", "parties": [ { "type": "person", @@ -936,15 +935,16 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9" ], "homepage_url": "https://github.com/eliben/pycparser", - "download_url": "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", - "size": 117552, + "download_url": "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", + "size": 118140, "sha1": null, - "md5": "e9bf4a92f270e6482393bd716406ff85", - "sha256": "c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", + "md5": "961daf0e0910747590f8a0101322bcd3", + "sha256": "e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", "sha512": null, "bug_tracking_url": null, "code_view_url": null, @@ -964,9 +964,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/pycparser/2.22/json", + "api_data_url": "https://pypi.org/pypi/pycparser/2.23/json", "datasource_id": null, - "purl": "pkg:pypi/pycparser@2.22" + "purl": "pkg:pypi/pycparser@2.23" }, { "type": "pypi", @@ -1038,12 +1038,12 @@ "type": "pypi", "namespace": null, "name": "requests", - "version": "2.32.4", + "version": "2.32.5", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Python HTTP for Humans.\n# Requests\n\n**Requests** is a simple, yet elegant, HTTP library.\n\n```python\n>>> import requests\n>>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass'))\n>>> r.status_code\n200\n>>> r.headers['content-type']\n'application/json; charset=utf8'\n>>> r.encoding\n'utf-8'\n>>> r.text\n'{\"authenticated\": true, ...'\n>>> r.json()\n{'authenticated': True, ...}\n```\n\nRequests allows you to send HTTP/1.1 requests extremely easily. There\u2019s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data \u2014 but nowadays, just use the `json` method!\n\nRequests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`\u2014 according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code.\n\n[![Downloads](https://static.pepy.tech/badge/requests/month)](https://pepy.tech/project/requests)\n[![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests)\n[![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors)\n\n## Installing Requests and Supported Versions\n\nRequests is available on PyPI:\n\n```console\n$ python -m pip install requests\n```\n\nRequests officially supports Python 3.8+.\n\n## Supported Features & Best\u2013Practices\n\nRequests is ready for the demands of building robust and reliable HTTP\u2013speaking applications, for the needs of today.\n\n- Keep-Alive & Connection Pooling\n- International Domains and URLs\n- Sessions with Cookie Persistence\n- Browser-style TLS/SSL Verification\n- Basic & Digest Authentication\n- Familiar `dict`\u2013like Cookies\n- Automatic Content Decompression and Decoding\n- Multi-part File Uploads\n- SOCKS Proxy Support\n- Connection Timeouts\n- Streaming Downloads\n- Automatic honoring of `.netrc`\n- Chunked HTTP Requests\n\n## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)\n\n[![Read the Docs](https://raw.githubusercontent.com/psf/requests/main/ext/ss.png)](https://requests.readthedocs.io)\n\n## Cloning the repository\n\nWhen cloning the Requests repository, you may need to add the `-c\nfetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit (see\n[this issue](https://github.com/psf/requests/issues/2690) for more background):\n\n```shell\ngit clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git\n```\n\nYou can also apply this setting to your global Git config:\n\n```shell\ngit config --global fetch.fsck.badTimezone ignore\n```\n\n---\n\n[![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/main/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/main/ext/psf.png)](https://www.python.org/psf)", - "release_date": "2025-06-09T16:43:05", + "description": "Python HTTP for Humans.\n# Requests\n\n**Requests** is a simple, yet elegant, HTTP library.\n\n```python\n>>> import requests\n>>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass'))\n>>> r.status_code\n200\n>>> r.headers['content-type']\n'application/json; charset=utf8'\n>>> r.encoding\n'utf-8'\n>>> r.text\n'{\"authenticated\": true, ...'\n>>> r.json()\n{'authenticated': True, ...}\n```\n\nRequests allows you to send HTTP/1.1 requests extremely easily. There\u2019s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data \u2014 but nowadays, just use the `json` method!\n\nRequests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`\u2014 according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code.\n\n[![Downloads](https://static.pepy.tech/badge/requests/month)](https://pepy.tech/project/requests)\n[![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests)\n[![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors)\n\n## Installing Requests and Supported Versions\n\nRequests is available on PyPI:\n\n```console\n$ python -m pip install requests\n```\n\nRequests officially supports Python 3.9+.\n\n## Supported Features & Best\u2013Practices\n\nRequests is ready for the demands of building robust and reliable HTTP\u2013speaking applications, for the needs of today.\n\n- Keep-Alive & Connection Pooling\n- International Domains and URLs\n- Sessions with Cookie Persistence\n- Browser-style TLS/SSL Verification\n- Basic & Digest Authentication\n- Familiar `dict`\u2013like Cookies\n- Automatic Content Decompression and Decoding\n- Multi-part File Uploads\n- SOCKS Proxy Support\n- Connection Timeouts\n- Streaming Downloads\n- Automatic honoring of `.netrc`\n- Chunked HTTP Requests\n\n## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)\n\n[![Read the Docs](https://raw.githubusercontent.com/psf/requests/main/ext/ss.png)](https://requests.readthedocs.io)\n\n## Cloning the repository\n\nWhen cloning the Requests repository, you may need to add the `-c\nfetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit timestamp (see\n[this issue](https://github.com/psf/requests/issues/2690) for more background):\n\n```shell\ngit clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git\n```\n\nYou can also apply this setting to your global Git config:\n\n```shell\ngit config --global fetch.fsck.badTimezone ignore\n```\n\n---\n\n[![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/main/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/main/ext/psf.png)](https://www.python.org/psf)", + "release_date": "2025-08-18T20:46:00", "parties": [ { "type": "person", @@ -1066,7 +1066,7 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", @@ -1074,11 +1074,11 @@ "Topic :: Software Development :: Libraries" ], "homepage_url": "https://requests.readthedocs.io", - "download_url": "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", - "size": 64847, + "download_url": "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", + "size": 64738, "sha1": null, - "md5": "fa8fa331f951fbc5e62f3d3e683a77a4", - "sha256": "27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", + "md5": "bd126794a95616a0da6192b288f9bb88", + "sha256": "2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/psf/requests", @@ -1098,9 +1098,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/requests/2.32.4/json", + "api_data_url": "https://pypi.org/pypi/requests/2.32.5/json", "datasource_id": null, - "purl": "pkg:pypi/requests@2.32.4" + "purl": "pkg:pypi/requests@2.32.5" }, { "type": "pypi", @@ -1162,12 +1162,12 @@ "type": "pypi", "namespace": null, "name": "typing-extensions", - "version": "4.14.1", + "version": "4.15.0", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Backported and Experimental Type Hints for Python 3.9+\n# Typing Extensions\n\n[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing)\n\n[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) \u2013\n[PyPI](https://pypi.org/project/typing-extensions/)\n\n## Overview\n\nThe `typing_extensions` module serves two related purposes:\n\n- Enable use of new type system features on older Python versions. For example,\n `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows\n users on previous Python versions to use it too.\n- Enable experimentation with new type system PEPs before they are accepted and\n added to the `typing` module.\n\n`typing_extensions` is treated specially by static type checkers such as\nmypy and pyright. Objects defined in `typing_extensions` are treated the same\nway as equivalent forms in `typing`.\n\n`typing_extensions` uses\n[Semantic Versioning](https://semver.org/). The\nmajor version will be incremented only for backwards-incompatible changes.\nTherefore, it's safe to depend\non `typing_extensions` like this: `typing_extensions >=x.y, <(x+1)`,\nwhere `x.y` is the first version that includes all features you need.\n\n## Included items\n\nSee [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a\ncomplete listing of module contents.\n\n## Contributing\n\nSee [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md)\nfor how to contribute to `typing_extensions`.", - "release_date": "2025-07-04T13:28:32", + "description": "Backported and Experimental Type Hints for Python 3.9+\n# Typing Extensions\n\n[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing)\n\n[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) \u2013\n[PyPI](https://pypi.org/project/typing-extensions/)\n\n## Overview\n\nThe `typing_extensions` module serves two related purposes:\n\n- Enable use of new type system features on older Python versions. For example,\n `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows\n users on previous Python versions to use it too.\n- Enable experimentation with new type system PEPs before they are accepted and\n added to the `typing` module.\n\n`typing_extensions` is treated specially by static type checkers such as\nmypy and pyright. Objects defined in `typing_extensions` are treated the same\nway as equivalent forms in `typing`.\n\n`typing_extensions` uses\n[Semantic Versioning](https://semver.org/). The\nmajor version will be incremented only for backwards-incompatible changes.\nTherefore, it's safe to depend\non `typing_extensions` like this: `typing_extensions ~=x.y`,\nwhere `x.y` is the first version that includes all features you need.\n[This](https://packaging.python.org/en/latest/specifications/version-specifiers/#compatible-release)\nis equivalent to `typing_extensions >=x.y, <(x+1)`. Do not depend on `~= x.y.z`\nunless you really know what you're doing; that defeats the purpose of\nsemantic versioning.\n\n## Included items\n\nSee [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a\ncomplete listing of module contents.\n\n## Contributing\n\nSee [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md)\nfor how to contribute to `typing_extensions`.", + "release_date": "2025-08-25T13:49:24", "parties": [ { "type": "person", @@ -1205,11 +1205,11 @@ "Topic :: Software Development" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", - "size": 43906, + "download_url": "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", + "size": 44614, "sha1": null, - "md5": "86905389dfed18c11e510c9e23147fcb", - "sha256": "d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", + "md5": "1394f56d85d87540f7907680572797e1", + "sha256": "f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", "sha512": null, "bug_tracking_url": "https://github.com/python/typing_extensions/issues", "code_view_url": null, @@ -1224,9 +1224,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/typing-extensions/4.14.1/json", + "api_data_url": "https://pypi.org/pypi/typing-extensions/4.15.0/json", "datasource_id": null, - "purl": "pkg:pypi/typing-extensions@4.14.1" + "purl": "pkg:pypi/typing-extensions@4.15.0" }, { "type": "pypi", @@ -1306,11 +1306,11 @@ ], "resolved_dependencies_graph": [ { - "package": "pkg:pypi/azure-core@1.35.0", + "package": "pkg:pypi/azure-core@1.35.1", "dependencies": [ - "pkg:pypi/requests@2.32.4", + "pkg:pypi/requests@2.32.5", "pkg:pypi/six@1.17.0", - "pkg:pypi/typing-extensions@4.14.1" + "pkg:pypi/typing-extensions@4.15.0" ] }, { @@ -1320,36 +1320,37 @@ ] }, { - "package": "pkg:pypi/azure-storage-blob@12.25.1", + "package": "pkg:pypi/azure-storage-blob@12.26.0", "dependencies": [ - "pkg:pypi/azure-core@1.35.0", - "pkg:pypi/cryptography@45.0.5", + "pkg:pypi/azure-core@1.35.1", + "pkg:pypi/cryptography@46.0.1", "pkg:pypi/isodate@0.7.2", - "pkg:pypi/typing-extensions@4.14.1" + "pkg:pypi/typing-extensions@4.15.0" ] }, { - "package": "pkg:pypi/certifi@2025.7.14", + "package": "pkg:pypi/certifi@2025.8.3", "dependencies": [] }, { - "package": "pkg:pypi/cffi@1.17.1", + "package": "pkg:pypi/cffi@2.0.0", "dependencies": [ - "pkg:pypi/pycparser@2.22" + "pkg:pypi/pycparser@2.23" ] }, { - "package": "pkg:pypi/charset-normalizer@3.4.2", + "package": "pkg:pypi/charset-normalizer@3.4.3", "dependencies": [] }, { - "package": "pkg:pypi/click@8.2.1", + "package": "pkg:pypi/click@8.2.2", "dependencies": [] }, { - "package": "pkg:pypi/cryptography@45.0.5", + "package": "pkg:pypi/cryptography@46.0.1", "dependencies": [ - "pkg:pypi/cffi@1.17.1" + "pkg:pypi/cffi@2.0.0", + "pkg:pypi/typing-extensions@4.15.0" ] }, { @@ -1363,11 +1364,11 @@ { "package": "pkg:pypi/msrest@0.7.1", "dependencies": [ - "pkg:pypi/azure-core@1.35.0", - "pkg:pypi/certifi@2025.7.14", + "pkg:pypi/azure-core@1.35.1", + "pkg:pypi/certifi@2025.8.3", "pkg:pypi/isodate@0.7.2", "pkg:pypi/requests-oauthlib@2.0.0", - "pkg:pypi/requests@2.32.4" + "pkg:pypi/requests@2.32.5" ] }, { @@ -1375,21 +1376,21 @@ "dependencies": [] }, { - "package": "pkg:pypi/pycparser@2.22", + "package": "pkg:pypi/pycparser@2.23", "dependencies": [] }, { "package": "pkg:pypi/requests-oauthlib@2.0.0", "dependencies": [ "pkg:pypi/oauthlib@3.3.1", - "pkg:pypi/requests@2.32.4" + "pkg:pypi/requests@2.32.5" ] }, { - "package": "pkg:pypi/requests@2.32.4", + "package": "pkg:pypi/requests@2.32.5", "dependencies": [ - "pkg:pypi/certifi@2025.7.14", - "pkg:pypi/charset-normalizer@3.4.2", + "pkg:pypi/certifi@2025.8.3", + "pkg:pypi/charset-normalizer@3.4.3", "pkg:pypi/idna@3.10", "pkg:pypi/urllib3@2.5.0" ] @@ -1399,7 +1400,7 @@ "dependencies": [] }, { - "package": "pkg:pypi/typing-extensions@4.14.1", + "package": "pkg:pypi/typing-extensions@4.15.0", "dependencies": [] }, { diff --git a/tests/data/azure-devops.req-312-expected.json b/tests/data/azure-devops.req-312-expected.json index 03e126c8..8952603a 100644 --- a/tests/data/azure-devops.req-312-expected.json +++ b/tests/data/azure-devops.req-312-expected.json @@ -126,12 +126,12 @@ "type": "pypi", "namespace": null, "name": "azure-core", - "version": "1.35.0", + "version": "1.35.1", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Microsoft Azure Core Library for Python\n# Azure Core shared client library for Python\n\nAzure core provides shared exceptions and modules for Python SDK client libraries.\nThese libraries follow the [Azure SDK Design Guidelines for Python](https://azure.github.io/azure-sdk/python/guidelines/index.html) .\n\nIf you are a client library developer, please reference [client library developer reference](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/CLIENT_LIBRARY_DEVELOPER.md) for more information.\n\n[Source code](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/)\n| [Package (Pypi)][package]\n| [Package (Conda)](https://anaconda.org/microsoft/azure-core/)\n| [API reference documentation](https://learn.microsoft.com/python/api/overview/azure/core-readme)\n\n## Getting started\n\nTypically, you will not need to install azure core;\nit will be installed when you install one of the client libraries using it.\nIn case you want to install it explicitly (to implement your own client library, for example),\nyou can find it [here](https://pypi.org/project/azure-core/).\n\n## Key concepts\n\n### Azure Core Library Exceptions\n\n#### AzureError\n\nAzureError is the base exception for all errors.\n\n```python\nclass AzureError(Exception):\n def __init__(self, message, *args, **kwargs):\n self.inner_exception = kwargs.get(\"error\")\n self.exc_type, self.exc_value, self.exc_traceback = sys.exc_info()\n self.exc_type = self.exc_type.__name__ if self.exc_type else type(self.inner_exception)\n self.exc_msg = \"{}, {}: {}\".format(message, self.exc_type, self.exc_value) # type: ignore\n self.message = str(message)\n self.continuation_token = kwargs.get(\"continuation_token\")\n super(AzureError, self).__init__(self.message, *args)\n```\n\n*message* is any message (str) to be associated with the exception.\n\n*args* are any additional args to be included with exception.\n\n*kwargs* are keyword arguments to include with the exception. Use the keyword *error* to pass in an internal exception and *continuation_token* for a token reference to continue an incomplete operation.\n\n**The following exceptions inherit from AzureError:**\n\n#### ServiceRequestError\n\nAn error occurred while attempt to make a request to the service. No request was sent.\n\n#### ServiceResponseError\n\nThe request was sent, but the client failed to understand the response.\nThe connection may have timed out. These errors can be retried for idempotent or safe operations.\n\n#### HttpResponseError\n\nA request was made, and a non-success status code was received from the service.\n\n```python\nclass HttpResponseError(AzureError):\n def __init__(self, message=None, response=None, **kwargs):\n self.reason = None\n self.response = response\n if response:\n self.reason = response.reason\n self.status_code = response.status_code\n self.error = self._parse_odata_body(ODataV4Format, response) # type: Optional[ODataV4Format]\n if self.error:\n message = str(self.error)\n else:\n message = message or \"Operation returned an invalid status '{}'\".format(\n self.reason\n )\n\n super(HttpResponseError, self).__init__(message=message, **kwargs)\n```\n\n*message* is the HTTP response error message (optional)\n\n*response* is the HTTP response (optional).\n\n*kwargs* are keyword arguments to include with the exception.\n\n**The following exceptions inherit from HttpResponseError:**\n\n#### DecodeError\n\nAn error raised during response de-serialization.\n\n#### IncompleteReadError\n\nAn error raised if peer closes the connection before we have received the complete message body.\n\n#### ResourceExistsError\n\nAn error response with status code 4xx. This will not be raised directly by the Azure core pipeline.\n\n#### ResourceNotFoundError\n\nAn error response, typically triggered by a 412 response (for update) or 404 (for get/post).\n\n#### ResourceModifiedError\n\nAn error response with status code 4xx, typically 412 Conflict. This will not be raised directly by the Azure core pipeline.\n\n#### ResourceNotModifiedError\n\nAn error response with status code 304. This will not be raised directly by the Azure core pipeline.\n\n#### ClientAuthenticationError\n\nAn error response with status code 4xx. This will not be raised directly by the Azure core pipeline.\n\n#### TooManyRedirectsError\n\nAn error raised when the maximum number of redirect attempts is reached. The maximum amount of redirects can be configured in the RedirectPolicy.\n\n```python\nclass TooManyRedirectsError(HttpResponseError):\n def __init__(self, history, *args, **kwargs):\n self.history = history\n message = \"Reached maximum redirect attempts.\"\n super(TooManyRedirectsError, self).__init__(message, *args, **kwargs)\n```\n\n*history* is used to document the requests/responses that resulted in redirected requests.\n\n*args* are any additional args to be included with exception.\n\n*kwargs* are keyword arguments to include with the exception.\n\n#### StreamConsumedError\n\nAn error thrown if you try to access the stream of `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` once\nthe response stream has been consumed.\n\n#### StreamClosedError\n\nAn error thrown if you try to access the stream of the `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` once\nthe response stream has been closed.\n\n#### ResponseNotReadError\n\nAn error thrown if you try to access the `content` of `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` before\nreading in the response's bytes first.\n\n### Configurations\n\nWhen calling the methods, some properties can be configured by passing in as kwargs arguments.\n\n| Parameters | Description |\n| --- | --- |\n| headers | The HTTP Request headers. |\n| request_id | The request id to be added into header. |\n| user_agent | If specified, this will be added in front of the user agent string. |\n| logging_enable| Use to enable per operation. Defaults to `False`. |\n| logger | If specified, it will be used to log information. |\n| response_encoding | The encoding to use if known for this service (will disable auto-detection). |\n| raw_request_hook | Callback function. Will be invoked on request. |\n| raw_response_hook | Callback function. Will be invoked on response. |\n| network_span_namer | A callable to customize the span name. |\n| tracing_attributes | Attributes to set on all created spans. |\n| permit_redirects | Whether the client allows redirects. Defaults to `True`. |\n| redirect_max | The maximum allowed redirects. Defaults to `30`. |\n| retry_total | Total number of retries to allow. Takes precedence over other counts. Default value is `10`. |\n| retry_connect | How many connection-related errors to retry on. These are errors raised before the request is sent to the remote server, which we assume has not triggered the server to process the request. Default value is `3`. |\n| retry_read | How many times to retry on read errors. These errors are raised after the request was sent to the server, so the request may have side-effects. Default value is `3`. |\n| retry_status | How many times to retry on bad status codes. Default value is `3`. |\n| retry_backoff_factor | A backoff factor to apply between attempts after the second try (most errors are resolved immediately by a second try without a delay). Retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))` seconds. If the backoff_factor is 0.1, then the retry will sleep for [0.0s, 0.2s, 0.4s, ...] between retries. The default value is `0.8`. |\n| retry_backoff_max | The maximum back off time. Default value is `120` seconds (2 minutes). |\n| retry_mode | Fixed or exponential delay between attempts, default is `Exponential`. |\n| timeout | Timeout setting for the operation in seconds, default is `604800`s (7 days). |\n| connection_timeout | A single float in seconds for the connection timeout. Defaults to `300` seconds. |\n| read_timeout | A single float in seconds for the read timeout. Defaults to `300` seconds. |\n| connection_verify | SSL certificate verification. Enabled by default. Set to False to disable, alternatively can be set to the path to a CA_BUNDLE file or directory with certificates of trusted CAs. |\n| connection_cert | Client-side certificates. You can specify a local cert to use as client side certificate, as a single file (containing the private key and the certificate) or as a tuple of both files' paths. |\n| proxies | Dictionary mapping protocol or protocol and hostname to the URL of the proxy. |\n| cookies | Dict or CookieJar object to send with the `Request`. |\n| connection_data_block_size | The block size of data sent over the connection. Defaults to `4096` bytes. |\n\n### Async transport\n\nThe async transport is designed to be opt-in. [AioHttp](https://pypi.org/project/aiohttp/) is one of the supported implementations of async transport. It is not installed by default. You need to install it separately.\n\n### Shared modules\n\n#### MatchConditions\n\nMatchConditions is an enum to describe match conditions.\n\n```python\nclass MatchConditions(Enum):\n Unconditionally = 1 # Matches any condition\n IfNotModified = 2 # If the target object is not modified. Usually it maps to etag=\n IfModified = 3 # Only if the target object is modified. Usually it maps to etag!=\n IfPresent = 4 # If the target object exists. Usually it maps to etag='*'\n IfMissing = 5 # If the target object does not exist. Usually it maps to etag!='*'\n```\n\n#### CaseInsensitiveEnumMeta\n\nA metaclass to support case-insensitive enums.\n\n```python\nfrom enum import Enum\n\nfrom azure.core import CaseInsensitiveEnumMeta\n\nclass MyCustomEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta):\n FOO = 'foo'\n BAR = 'bar'\n```\n\n#### Null Sentinel Value\n\nA falsy sentinel object which is supposed to be used to specify attributes\nwith no data. This gets serialized to `null` on the wire.\n\n```python\nfrom azure.core.serialization import NULL\n\nassert bool(NULL) is False\n\nfoo = Foo(\n attr=NULL\n)\n```\n\n## Contributing\n\nThis project welcomes contributions and suggestions. Most contributions require\nyou to agree to a Contributor License Agreement (CLA) declaring that you have\nthe right to, and actually do, grant us the rights to use your contribution.\nFor details, visit [https://cla.microsoft.com](https://cla.microsoft.com).\n\nWhen you submit a pull request, a CLA-bot will automatically determine whether\nyou need to provide a CLA and decorate the PR appropriately (e.g., label,\ncomment). Simply follow the instructions provided by the bot. You will only\nneed to do this once across all repos using our CLA.\n\nThis project has adopted the\n[Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).\nFor more information, see the\n[Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)\nor contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any\nadditional questions or comments.\n\n\n[package]: https://pypi.org/project/azure-core/\n\n\n# Release History\n\n## 1.35.0 (2025-07-02)\n\n### Features Added\n\n- Added a `start_time` keyword argument to the `start_span` and `start_as_current_span` methods in the `OpenTelemetryTracer` class. This allows users to specify a custom start time for created spans. #41106\n- Added a `context` keyword argument to the `start_span` and `start_as_current_span` methods in the `OpenTelemetryTracer` class. This allows users to specify a custom parent context for created spans. #41511\n- Added method `as_attribute_dict` to `azure.core.serialization` for backcompat migration purposes. Will return a generated model as a dictionary where the keys are in attribute syntax.\n- Added `is_generated_model` method to `azure.core.serialization`. Returns whether a given input is a model from one of our generated sdks. #41445\n- Added `attribute_list` method to `azure.core.serialization`. Returns all of the attributes of a given model from one of our generated sdks. #41571\n\n### Other Changes\n\n- A timeout error when using the `aiohttp` transport (the default for async SDKs) will now be raised as a `azure.core.exceptions.ServiceResponseTimeoutError`, a subtype of the previously raised `ServiceResponseError`.\n- When using with `aiohttp` 3.10 or later, a connection timeout error will now be raised as a `azure.core.exceptions.ServiceRequestTimeoutError`, which can be retried.\n- The default implementation of `on_challenge` in `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` will now cache the retrieved token. #41857\n\n## 1.34.0 (2025-05-01)\n\n### Features Added\n\n- Added a `set_span_error_status` method to the `OpenTelemetryTracer` class. This method allows users to set the status of a span to `ERROR` after it has been created. #40703\n\n### Other Changes\n\n- Python 3.8 is no longer supported. Please use Python version 3.9 or later.\n\n## 1.33.0 (2025-04-03)\n\n### Features Added\n\n- Added native OpenTelemetry tracing to Azure Core which enables users to use OpenTelemetry to trace Azure SDK operations without needing to install a plugin. #39563\n - To enable native OpenTelemetry tracing, users need to:\n 1. Have `opentelemetry-api` installed.\n 2. Ensure that `settings.tracing_implementation` is not set.\n 3. Ensure that `settings.tracing_enabled` is set to `True`.\n - If `setting.tracing_implementation` is set, the tracing plugin will be used instead of the native tracing.\n - If `settings.tracing_enabled` is set to `False`, tracing will be disabled.\n - The `OpenTelemetryTracer` class was added to the `azure.core.tracing.opentelemetry` module. This is a wrapper around the OpenTelemetry tracer that is used to create spans for Azure SDK operations.\n - Added a `get_tracer` method to the new `azure.core.instrumentation` module. This method returns an instance of the `OpenTelemetryTracer` class if OpenTelemetry is available.\n - A `TracingOptions` TypedDict class was added to define the options that SDK users can use to configure tracing per-operation. These options include the ability to enable or disable tracing and set additional attributes on spans.\n - Example usage: `client.method(tracing_options={\"enabled\": True, \"attributes\": {\"foo\": \"bar\"}})`\n - The `DistributedTracingPolicy` and `distributed_trace`/`distributed_trace_async` decorators now uses the OpenTelemetry tracer if it is available and native tracing is enabled.\n - SDK clients can define an `_instrumentation_config` class variable to configure the OpenTelemetry tracer used in method span creation. Possible configuration options are `library_name`, `library_version`, `schema_url`, and `attributes`.\n - `DistributedTracingPolicy` now accepts a `instrumentation_config` keyword argument to configure the OpenTelemetry tracer used in HTTP span creation.\n\n### Breaking Changes\n\n- Removed automatic tracing enablement for the OpenTelemetry plugin if `opentelemetry` was imported. To enable tracing with the plugin, please import `azure.core.settings.settings` and set `settings.tracing_implementation` to `\"opentelemetry\"`. #39563\n- In `DistributedTracingPolicy`, the default span name is now just the HTTP method (e.g., \"GET\", \"POST\") and no longer includes the URL path. This change was made to converge with the OpenTelemetry HTTP semantic conventions. The full URL is still included in the span attributes.\n- Renamed span attributes in `DistributedTracingPolicy`:\n - \"x-ms-client-request-id\" is now \"az.client_request_id\"\n - \"x-ms-request-id\" is now \"az.service_request_id\"\n\n### Bugs Fixed\n\n- Fixed an issue where the `traceparent` header was not being set correctly in the `DistributedTracingPolicy`. The `traceparent` header will now set based on the context of the HTTP client span. #40074\n\n### Other Changes\n\n- Added `opentelemetry-api` as an optional dependency for tracing. This can be installed with `pip install azure-core[tracing]`. #39563\n\n## 1.32.0 (2024-10-31)\n\n### Features Added\n\n- Added a default implementation to handle token challenges in `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy`.\n\n### Bugs Fixed\n\n- Fixed an issue where the `tracing_attributes` keyword argument wasn't being handled at the request/method level. #38164\n\n### Other Changes\n\n- Log \"x-vss-e2eid\" and \"x-msedge-ref\" headers in `HttpLoggingPolicy`.\n\n## 1.31.0 (2024-09-12)\n\n### Features Added\n\n- Added azure.core.AzureClouds enum to represent the different Azure clouds.\n- Added two new credential protocol classes, `SupportsTokenInfo` and `AsyncSupportsTokenInfo`, to offer more extensibility in supporting various token acquisition scenarios. #36565\n - Each new protocol class defines a `get_token_info` method that returns an `AccessTokenInfo` object.\n- Added a new `TokenRequestOptions` class, which is a `TypedDict` with optional parameters, that can be used to define options for token requests through the `get_token_info` method. #36565\n- Added a new `AccessTokenInfo` class, which is returned by `get_token_info` implementations. This class contains the token, its expiration time, and optional additional information like when a token should be refreshed. #36565\n- `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` now first check if a credential has the `get_token_info` method defined. If so, the `get_token_info` method is used to acquire a token. Otherwise, the `get_token` method is used. #36565\n - These policies now also check the `refresh_on` attribute when determining if a new token request should be made.\n\n### Other Changes\n\n- The Azure Core OpenTelemetry tracing plugin will now be the preferred tracing plugin over the OpenCensus plugin. If both plugins are installed and `opentelemetry` is imported, then OpenTelemetry will be used to trace Azure SDK operations. #35050\n\n## 1.30.2 (2024-06-06)\n\n### Features Added\n\n- Tracing: `DistributedTracingPolicy` will now set an attribute, `http.request.resend_count`, on HTTP spans for resent requests to indicate the resend attempt number. #35069\n\n### Bugs Fixed\n\n- Raise correct exception if transport is used while already closed #35559\n\n### Other Changes\n\n- HTTP tracing spans will now include an `error.type` attribute if an error status code is returned. #34619\n- Minimum required Python version is now 3.8\n\n## 1.30.1 (2024-02-29)\n\n### Other Changes\n\n- Accept float for `retry_after` header. #34203\n\n## 1.30.0 (2024-02-01)\n\n### Features Added\n\n- Support tuple input for file values to `azure.core.rest.HttpRequest` #33948\n- Support tuple input to `files` with duplicate field names `azure.core.rest.HttpRequest` #34021\n\n## 1.29.7 (2024-01-18)\n\n### Other Changes\n\n- Removed dependency on `anyio`. #33282\n\n## 1.29.6 (2023-12-14)\n\n### Bugs Fixed\n\n- Adjusted `AsyncBearerTokenCredentialPolicy` to work properly with `trio` concurrency mechanisms. ([#33307](https://github.com/Azure/azure-sdk-for-python/pull/33307))\n\n### Other Changes\n\n- Added dependency on `anyio` >=3.0,<5.0\n- Bumped minimum dependency on `requests` to 2.21.0.\n\n## 1.29.5 (2023-10-19)\n\n### Bugs Fixed\n\n- Fixed an issue with `multipart/form-data` in the async transport where `data` was not getting encoded into the request body. #32473\n\n### Other Changes\n\n- Use ssl context from aiohttp by default.\n\n## 1.29.4 (2023-09-07)\n\n### Bugs Fixed\n\n- Fixed the issue that some urls trigger an infinite loop. #31346\n- Fixed issue where IndexError was raised if multipart responses did not match the number of requests. #31471\n- Fixed issue unbound variable exception if dict is invalid in CloudEvent.from_dict. #31835\n- Fixed issue asyncBearerTokenCredentialPolicy is not backward compatible with SansIOHTTPPolicy. #31836\n- Fixed issue mypy complains with new version of azure-core. #31564\n\n## 1.29.3 (2023-08-22)\n\n### Bugs Fixed\n\n- Typing fix: `message` cannot be `None` in `AzureError`. #31564\n\n## 1.29.2 (2023-08-14)\n\n### Bugs Fixed\n\n- Added a default implementation for `AsyncTokenCredential.__aexit__()` #31573\n\n### Other Changes\n\n- Bumped `typing-extensions` version to 4.6.0.\n\n## 1.29.1 (2023-08-09)\n\n### Bugs Fixed\n\n- Not pass `enabled_cae` unless it is explicitly enabled.\n\n## 1.29.0 (2023-08-03)\n\n### Features Added\n\n- A keyword argument `enable_cae` was added to the `get_token` method of the `TokenCredential` protocol. #31012\n- `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` now accept `enable_cae` keyword arguments in their constructors. This is used in determining if [Continuous Access Evaluation (CAE)](https://learn.microsoft.com/azure/active-directory/conditional-access/concept-continuous-access-evaluation) should be enabled for each `get_token` request. #31012\n\n## 1.28.0 (2023-07-06)\n\n### Features Added\n\n- Added header name parameter to `RequestIdPolicy`. #30772\n- Added `SensitiveHeaderCleanupPolicy` that cleans up sensitive headers if a redirect happens and the new destination is in another domain. #28349\n\n### Other Changes\n\n- Catch aiohttp errors and translate them into azure-core errors.\n\n## 1.27.1 (2023-06-13)\n\n### Bugs Fixed\n\n- Fix url building for some complex query parameters scenarios #30707\n\n## 1.27.0 (2023-06-01)\n\n### Features Added\n\n- Added support to use sync credentials in `AsyncBearerTokenCredentialPolicy`. #30381\n- Added \"prefix\" parameter to AzureKeyCredentialPolicy #29901\n\n### Bugs Fixed\n\n- Improve error message when providing the wrong credential type for AzureKeyCredential #30380\n\n## 1.26.4 (2023-04-06)\n\n### Features Added\n\n- Updated settings to include OpenTelemetry as a tracer provider. #29095\n\n### Other Changes\n\n- Improved typing\n\n## 1.26.3 (2023-02-02)\n\n### Bugs Fixed\n\n- Fixed deflate decompression for aiohttp #28483\n\n## 1.26.2 (2023-01-05)\n\n### Bugs Fixed\n\n- Fix 'ClientSession' object has no attribute 'auto_decompress' (thanks to @mghextreme for the contribution)\n\n### Other Changes\n\n- Add \"x-ms-error-code\" as secure header to log\n- Rename \"DEFAULT_HEADERS_WHITELIST\" to \"DEFAULT_HEADERS_ALLOWLIST\". Added a backward compatible alias.\n\n## 1.26.1 (2022-11-03)\n\n### Other Changes\n\n- Added example of RequestsTransport with custom session. (thanks to @inirudebwoy for the contribution) #26768\n- Added Python 3.11 support.\n\n## 1.26.0 (2022-10-06)\n\n### Other Changes\n\n- LRO polling will not wait anymore before doing the first status check #26376\n- Added extra dependency for [aio]. pip install azure-core[aio] installs aiohttp too.\n\n## 1.25.1 (2022-09-01)\n\n### Bugs Fixed\n\n- Added @runtime_checkable to `TokenCredential` protocol definitions #25187\n\n## 1.25.0 (2022-08-04)\n\nAzure-core is supported on Python 3.7 or later. For more details, please read our page on [Azure SDK for Python version support policy](https://github.com/Azure/azure-sdk-for-python/wiki/Azure-SDKs-Python-version-support-policy).\n\n### Features Added\n\n- Added `CaseInsensitiveDict` implementation in `azure.core.utils` removing dependency on `requests` and `aiohttp`\n\n## 1.24.2 (2022-06-30)\n\n### Bugs Fixed\n\n- Fixed the bug that azure-core could not be imported under Python 3.11.0b3 #24928\n- `ContentDecodePolicy` can now correctly deserialize more JSON bodies with different mime types #22410\n\n## 1.24.1 (2022-06-01)\n\n### Bugs Fixed\n\n- Declare method level span as INTERNAL by default #24492\n- Fixed type hints for `azure.core.paging.ItemPaged` #24548\n\n## 1.24.0 (2022-05-06)\n\n### Features Added\n\n- Add `SerializationError` and `DeserializationError` in `azure.core.exceptions` for errors raised during serialization / deserialization #24312\n\n## 1.23.1 (2022-03-31)\n\n### Bugs Fixed\n\n- Allow stream inputs to the `content` kwarg of `azure.core.rest.HttpRequest` from objects with a `read` method #23578\n\n## 1.23.0 (2022-03-03)\n\n### Features Added\n\n- Improve intellisense type hinting for service client methods. #22891\n\n- Add a case insensitive dict `case_insensitive_dict` in `azure.core.utils`. #23206\n\n### Bugs Fixed\n\n- Use \"\\n\" rather than \"/n\" for new line in log. #23261\n\n### Other Changes\n\n- Log \"WWW-Authenticate\" header in `HttpLoggingPolicy` #22990\n- Added dependency on `typing-extensions` >= 4.0.1\n\n## 1.22.1 (2022-02-09)\n\n### Bugs Fixed\n\n- Limiting `final-state-via` scope to POST until consuming SDKs has been fixed to use this option properly on PUT. #22989\n\n## 1.22.0 (2022-02-03)\n_[**This version is deprecated.**]_\n\n### Features Added\n\n- Add support for `final-state-via` LRO option in core. #22713\n\n### Bugs Fixed\n\n- Add response body to string representation of `HttpResponseError` if we're not able to parse out information #22302\n- Raise `AttributeError` when calling azure.core.pipeline.transport.\\_\\_bases__ #22469\n\n### Other Changes\n\n- Python 2.7 is no longer supported. Please use Python version 3.6 or later.\n\n## 1.21.1 (2021-12-06)\n\n### Other Changes\n\n- Revert change in str method #22023\n\n## 1.21.0 (2021-12-02)\n\n### Breaking Changes\n\n- Sync stream downloading now raises `azure.core.exceptions.DecodeError` rather than `requests.exceptions.ContentDecodingError`\n\n### Bugs Fixed\n\n- Add response body to string representation of `HttpResponseError` if we're not able to parse out information #21800\n\n## 1.20.1 (2021-11-08)\n\n### Bugs Fixed\n\n- Correctly set response's content to decompressed body when users are using aiohttp transport with decompression headers #21620\n\n## 1.20.0 (2021-11-04)\n\n### Features Added\n\n- GA `send_request` onto the `azure.core.PipelineClient` and `azure.core.AsyncPipelineClient`. This method takes in\nrequests and sends them through our pipelines.\n- GA `azure.core.rest`. `azure.core.rest` is our new public simple HTTP library in `azure.core` that users will use to create requests, and consume responses.\n- GA errors `StreamConsumedError`, `StreamClosedError`, and `ResponseNotReadError` to `azure.core.exceptions`. These errors\nare thrown if you mishandle streamed responses from the `azure.core.rest` module\n- add kwargs to the methods for `iter_raw` and `iter_bytes` #21529\n- no longer raise JSON errors if users pass in file descriptors of JSON to the `json` kwarg in `HttpRequest` #21504\n- Added new error type `IncompleteReadError` which is raised if peer closes the connection before we have received the complete message body.\n\n### Breaking Changes\n\n- SansIOHTTPPolicy.on_exception returns None instead of bool.\n\n### Bugs Fixed\n\n- The `Content-Length` header in a http response is strictly checked against the actual number of bytes in the body,\n rather than silently truncating data in case the underlying tcp connection is closed prematurely.\n (thanks to @jochen-ott-by for the contribution) #20412\n- UnboundLocalError when SansIOHTTPPolicy handles an exception #15222\n- Add default content type header of `text/plain` and content length header for users who pass unicode strings to the `content` kwarg of `HttpRequest` in 2.7 #21550\n\n## 1.19.1 (2021-11-01)\n\n### Bugs Fixed\n\n- respect text encoding specified in argument (thanks to @ryohji for the contribution) #20796\n- Fix \"coroutine x.read() was never awaited\" warning from `ContentDecodePolicy` #21318\n- fix type check for `data` input to `azure.core.rest` for python 2.7 users #21341\n- use `charset_normalizer` if `chardet` is not installed to migrate aiohttp 3.8.0 changes.\n\n### Other Changes\n\n- Refactor AzureJSONEncoder (thanks to @Codejune for the contribution) #21028\n\n## 1.19.0 (2021-09-30)\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse` are now abstract base classes. They should not be initialized directly, instead\nyour transport responses should inherit from them and implement them.\n- The properties of the `azure.core.rest` responses are now all read-only\n\n- HttpLoggingPolicy integrates logs into one record #19925\n\n## 1.18.0 (2021-09-02)\n\n### Features Added\n\n- `azure.core.serialization.AzureJSONEncoder` (introduced in 1.17.0) serializes `datetime.datetime` objects in ISO 8601 format, conforming to RFC 3339's specification. #20190\n- We now use `azure.core.serialization.AzureJSONEncoder` to serialize `json` input to `azure.core.rest.HttpRequest`.\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- The `text` property on `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse` has changed to a method, which also takes\nan `encoding` parameter.\n- Removed `iter_text` and `iter_lines` from `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse`\n\n### Bugs Fixed\n\n- The behaviour of the headers returned in `azure.core.rest` responses now aligns across sync and async. Items can now be checked case-insensitively and without raising an error for format.\n\n## 1.17.0 (2021-08-05)\n\n### Features Added\n\n- Cut hard dependency on requests library\n- Added a `from_json` method which now accepts storage QueueMessage, eventhub's EventData or ServiceBusMessage or simply json bytes to return a `CloudEvent`\n\n### Fixed\n\n- Not override \"x-ms-client-request-id\" if it already exists in the header. #17757\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- `azure.core.rest` will not try to guess the `charset` anymore if it was impossible to extract it from `HttpResponse` analysis. This removes our dependency on `charset`.\n\n## 1.16.0 (2021-07-01)\n\n### Features Added\n\n- Add new ***provisional*** methods `send_request` onto the `azure.core.PipelineClient` and `azure.core.AsyncPipelineClient`. This method takes in\nrequests and sends them through our pipelines.\n- Add new ***provisional*** module `azure.core.rest`. `azure.core.rest` is our new public simple HTTP library in `azure.core` that users will use to create requests, and consume responses.\n- Add new ***provisional*** errors `StreamConsumedError`, `StreamClosedError`, and `ResponseNotReadError` to `azure.core.exceptions`. These errors\nare thrown if you mishandle streamed responses from the provisional `azure.core.rest` module\n\n### Fixed\n\n- Improved error message in the `from_dict` method of `CloudEvent` when a wrong schema is sent.\n\n## 1.15.0 (2021-06-04)\n\n### New Features\n\n- Added `BearerTokenCredentialPolicy.on_challenge` and `.authorize_request` to allow subclasses to optionally handle authentication challenges\n\n### Bug Fixes\n\n- Retry policies don't sleep after operations time out\n- The `from_dict` methhod in the `CloudEvent` can now convert a datetime string to datetime object when microsecond exceeds the python limitation\n\n## 1.14.0 (2021-05-13)\n\n### New Features\n\n- Added `azure.core.credentials.AzureNamedKeyCredential` credential #17548.\n- Added `decompress` parameter for `stream_download` method. If it is set to `False`, will not do decompression upon the stream. #17920\n\n## 1.13.0 (2021-04-02)\n\nAzure core requires Python 2.7 or Python 3.6+ since this release.\n\n### New Features\n\n- Added `azure.core.utils.parse_connection_string` function to parse connection strings across SDKs, with common validation and support for case insensitive keys.\n- Supported adding custom policies #16519\n- Added `~azure.core.tracing.Link` that should be used while passing `Links` to `AbstractSpan`.\n- `AbstractSpan` constructor can now take in additional keyword only args.\n\n### Bug fixes\n\n- Make NetworkTraceLoggingPolicy show the auth token in plain text. #14191\n- Fixed RetryPolicy overriding default connection timeout with an extreme value #17481\n\n## 1.12.0 (2021-03-08)\n\nThis version will be the last version to officially support Python 3.5, future versions will require Python 2.7 or Python 3.6+.\n\n### Features\n\n- Added `azure.core.messaging.CloudEvent` model that follows the cloud event spec.\n- Added `azure.core.serialization.NULL` sentinel value\n- Improve `repr`s for `HttpRequest` and `HttpResponse`s #16972\n\n### Bug Fixes\n\n- Disable retry in stream downloading. (thanks to @jochen-ott-by @hoffmann for the contribution) #16723\n\n## 1.11.0 (2021-02-08)\n\n### Features\n\n- Added `CaseInsensitiveEnumMeta` class for case-insensitive enums. #16316\n- Add `raise_for_status` method onto `HttpResponse`. Calling `response.raise_for_status()` on a response with an error code\nwill raise an `HttpResponseError`. Calling it on a good response will do nothing #16399\n\n### Bug Fixes\n\n- Update conn.conn_kw rather than overriding it when setting block size. (thanks for @jiasli for the contribution) #16587\n\n## 1.10.0 (2021-01-11)\n\n### Features\n\n- Added `AzureSasCredential` and its respective policy. #15946\n\n## 1.9.0 (2020-11-09)\n\n### Features\n\n- Add a `continuation_token` attribute to the base `AzureError` exception, and set this value for errors raised\n during paged or long-running operations.\n\n### Bug Fixes\n\n- Set retry_interval to 1 second instead of 1000 seconds (thanks **vbarbaresi** for contributing) #14357\n\n\n## 1.8.2 (2020-10-05)\n\n### Bug Fixes\n\n- Fixed bug to allow polling in the case of parameterized endpoints with relative polling urls #14097\n\n\n## 1.8.1 (2020-09-08)\n\n### Bug fixes\n\n- SAS credential replicated \"/\" fix #13159\n\n## 1.8.0 (2020-08-10)\n\n### Features\n\n- Support params as list for exploding parameters #12410\n\n\n## 1.7.0 (2020-07-06)\n\n### Bug fixes\n\n- `AzureKeyCredentialPolicy` will now accept (and ignore) passed in kwargs #11963\n- Better error messages if passed endpoint is incorrect #12106\n- Do not JSON encore a string if content type is \"text\" #12137\n\n### Features\n\n- Added `http_logging_policy` property on the `Configuration` object, allowing users to individually\nset the http logging policy of the config #12218\n\n## 1.6.0 (2020-06-03)\n\n### Bug fixes\n\n- Fixed deadlocks in AsyncBearerTokenCredentialPolicy #11543\n- Fix AttributeException in StreamDownloadGenerator #11462\n\n### Features\n\n- Added support for changesets as part of multipart message support #10485\n- Add AsyncLROPoller in azure.core.polling #10801\n- Add get_continuation_token/from_continuation_token/polling_method methods in pollers (sync and async) #10801\n- HttpResponse and PipelineContext objects are now pickable #10801\n\n## 1.5.0 (2020-05-04)\n\n### Features\n\n- Support \"x-ms-retry-after-ms\" in response header #10743\n- `link` and `link_from_headers` now accepts attributes #10765\n\n### Bug fixes\n\n- Not retry if the status code is less than 400 #10778\n- \"x-ms-request-id\" is not considered safe header for logging #10967\n\n## 1.4.0 (2020-04-06)\n\n### Features\n\n- Support a default error type in map_error #9773\n- Added `AzureKeyCredential` and its respective policy. #10509\n- Added `azure.core.polling.base_polling` module with a \"Microsoft One API\" polling implementation #10090\n Also contains the async version in `azure.core.polling.async_base_polling`\n- Support kwarg `enforce_https` to disable HTTPS check on authentication #9821\n- Support additional kwargs in `HttpRequest.set_multipart_mixed` that will be passed into pipeline context.\n\n## 1.3.0 (2020-03-09)\n\n### Bug fixes\n\n- Appended RequestIdPolicy to the default pipeline #9841\n- Rewind the body position in async_retry #10117\n\n### Features\n\n- Add raw_request_hook support in custom_hook_policy #9958\n- Add timeout support in retry_policy #10011\n- Add OdataV4 error format auto-parsing in all exceptions ('error' attribute) #9738\n\n## 1.2.2 (2020-02-10)\n\n### Bug fixes\n\n- Fixed a bug that sends None as request_id #9545\n- Enable mypy for customers #9572\n- Handle TypeError in deep copy #9620\n- Fix text/plain content-type in decoder #9589\n\n## 1.2.1 (2020-01-14)\n\n### Bug fixes\n\n- Fixed a regression in 1.2.0 that was incompatible with azure-keyvault-* 4.0.0\n[#9462](https://github.com/Azure/azure-sdk-for-python/issues/9462)\n\n\n## 1.2.0 (2020-01-14)\n\n### Features\n\n- Add user_agent & sdk_moniker kwargs in UserAgentPolicy init #9355\n- Support OPTIONS HTTP verb #9322\n- Add tracing_attributes to tracing decorator #9297\n- Support auto_request_id in RequestIdPolicy #9163\n- Support fixed retry #6419\n- Support \"retry-after-ms\" in response header #9240\n\n### Bug fixes\n\n- Removed `__enter__` and `__exit__` from async context managers #9313\n\n## 1.1.1 (2019-12-03)\n\n### Bug fixes\n\n- Bearer token authorization requires HTTPS\n- Rewind the body position in retry #8307\n\n## 1.1.0 (2019-11-25)\n\n### Features\n\n- New RequestIdPolicy #8437\n- Enable logging policy in default pipeline #8053\n- Normalize transport timeout. #8000\n Now we have:\n * 'connection_timeout' - a single float in seconds for the connection timeout. Default 5min\n * 'read_timeout' - a single float in seconds for the read timeout. Default 5min\n\n### Bug fixes\n\n- RequestHistory: deepcopy fails if request contains a stream #7732\n- Retry: retry raises error if response does not have http_response #8629\n- Client kwargs are now passed to DistributedTracingPolicy correctly #8051\n- NetworkLoggingPolicy now logs correctly all requests in case of retry #8262\n\n## 1.0.0 (2019-10-29)\n\n### Features\n\n- Tracing: DistributedTracingPolicy now accepts kwargs network_span_namer to change network span name #7773\n- Tracing: Implementation of AbstractSpan can now use the mixin HttpSpanMixin to get HTTP span update automatically #7773\n- Tracing: AbstractSpan contract \"change_context\" introduced #7773\n- Introduce new policy HttpLoggingPolicy #7988\n\n### Bug fixes\n\n- Fix AsyncioRequestsTransport if input stream is an async generator #7743\n- Fix form-data with aiohttp transport #7749\n\n### Breaking changes\n\n- Tracing: AbstractSpan.set_current_span is longer supported. Use change_context instead. #7773\n- azure.core.pipeline.policies.ContentDecodePolicy.deserialize_from_text changed\n\n## 1.0.0b4 (2019-10-07)\n\n### Features\n\n- Tracing: network span context is available with the TRACING_CONTEXT in pipeline response #7252\n- Tracing: Span contract now has `kind`, `traceparent` and is a context manager #7252\n- SansIOHTTPPolicy methods can now be coroutines #7497\n- Add multipart/mixed support #7083:\n\n - HttpRequest now has a \"set_multipart_mixed\" method to set the parts of this request\n - HttpRequest now has a \"prepare_multipart_body\" method to build final body.\n - HttpResponse now has a \"parts\" method to return an iterator of parts\n - AsyncHttpResponse now has a \"parts\" methods to return an async iterator of parts\n - Note that multipart/mixed is a Python 3.x only feature\n\n### Bug fixes\n\n- Tracing: policy cannot fail the pipeline, even in the worst condition #7252\n- Tracing: policy pass correctly status message if exception #7252\n- Tracing: incorrect span if exception raised from decorated function #7133\n- Fixed urllib3 ConnectTimeoutError being raised by Requests during a socket timeout. Now this exception is caught and wrapped as a `ServiceRequestError` #7542\n\n### Breaking changes\n\n- Tracing: `azure.core.tracing.context` removed\n- Tracing: `azure.core.tracing.context.tracing_context.with_current_context` renamed to `azure.core.tracing.common.with_current_context` #7252\n- Tracing: `link` renamed `link_from_headers` and `link` takes now a string\n- Tracing: opencensus implementation has been moved to the package `azure-core-tracing-opencensus`\n- Some modules and classes that were importables from several different places have been removed:\n\n - `azure.core.HttpResponseError` is now only `azure.core.exceptions.HttpResponseError`\n - `azure.core.Configuration` is now only `azure.core.configuration.Configuration`\n - `azure.core.HttpRequest` is now only `azure.core.pipeline.transport.HttpRequest`\n - `azure.core.version` module has been removed. Use `azure.core.__version__` to get version number.\n - `azure.core.pipeline_client` has been removed. Import from `azure.core` instead.\n - `azure.core.pipeline_client_async` has been removed. Import from `azure.core` instead.\n - `azure.core.pipeline.base` has been removed. Import from `azure.core.pipeline` instead.\n - `azure.core.pipeline.base_async` has been removed. Import from `azure.core.pipeline` instead.\n - `azure.core.pipeline.policies.base` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.base_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.authentication` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.authentication_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.custom_hook` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.redirect` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.redirect_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.retry` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.retry_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.distributed_tracing` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.universal` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.tracing.abstract_span` has been removed. Import from `azure.core.tracing` instead.\n - `azure.core.pipeline.transport.base` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.base_async` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_basic` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_asyncio` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_trio` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.aiohttp` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.polling.poller` has been removed. Import from `azure.core.polling` instead.\n - `azure.core.polling.async_poller` has been removed. Import from `azure.core.polling` instead.\n\n## 1.0.0b3 (2019-09-09)\n\n### Bug fixes\n\n- Fix aiohttp auto-headers #6992\n- Add tracing to policies module init #6951\n\n## 1.0.0b2 (2019-08-05)\n\n### Breaking changes\n\n- Transport classes don't take `config` parameter anymore (use kwargs instead) #6372\n- `azure.core.paging` has been completely refactored #6420\n- HttpResponse.content_type attribute is now a string (was a list) #6490\n- For `StreamDownloadGenerator` subclasses, `response` is now an `HttpResponse`, and not a transport response like `aiohttp.ClientResponse` or `requests.Response`. The transport response is available in `internal_response` attribute #6490\n\n### Bug fixes\n\n- aiohttp is not required to import async pipelines classes #6496\n- `AsyncioRequestsTransport.sleep` is now a coroutine as expected #6490\n- `RequestsTransport` is not tight to `ProxyPolicy` implementation details anymore #6372\n- `AiohttpTransport` does not raise on unexpected kwargs #6355\n\n### Features\n\n- New paging base classes that support `continuation_token` and `by_page()` #6420\n- Proxy support for `AiohttpTransport` #6372\n\n## 1.0.0b1 (2019-06-26)\n\n- Preview 1 release", - "release_date": "2025-07-03T00:55:25", + "description": "Microsoft Azure Core Library for Python\n# Azure Core shared client library for Python\n\nAzure core provides shared exceptions and modules for Python SDK client libraries.\nThese libraries follow the [Azure SDK Design Guidelines for Python](https://azure.github.io/azure-sdk/python/guidelines/index.html) .\n\nIf you are a client library developer, please reference [client library developer reference](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/CLIENT_LIBRARY_DEVELOPER.md) for more information.\n\n[Source code](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/)\n| [Package (Pypi)][package]\n| [Package (Conda)](https://anaconda.org/microsoft/azure-core/)\n| [API reference documentation](https://learn.microsoft.com/python/api/overview/azure/core-readme)\n\n## Getting started\n\nTypically, you will not need to install azure core;\nit will be installed when you install one of the client libraries using it.\nIn case you want to install it explicitly (to implement your own client library, for example),\nyou can find it [here](https://pypi.org/project/azure-core/).\n\n## Key concepts\n\n### Azure Core Library Exceptions\n\n#### AzureError\n\nAzureError is the base exception for all errors.\n\n```python\nclass AzureError(Exception):\n def __init__(self, message, *args, **kwargs):\n self.inner_exception = kwargs.get(\"error\")\n self.exc_type, self.exc_value, self.exc_traceback = sys.exc_info()\n self.exc_type = self.exc_type.__name__ if self.exc_type else type(self.inner_exception)\n self.exc_msg = \"{}, {}: {}\".format(message, self.exc_type, self.exc_value) # type: ignore\n self.message = str(message)\n self.continuation_token = kwargs.get(\"continuation_token\")\n super(AzureError, self).__init__(self.message, *args)\n```\n\n*message* is any message (str) to be associated with the exception.\n\n*args* are any additional args to be included with exception.\n\n*kwargs* are keyword arguments to include with the exception. Use the keyword *error* to pass in an internal exception and *continuation_token* for a token reference to continue an incomplete operation.\n\n**The following exceptions inherit from AzureError:**\n\n#### ServiceRequestError\n\nAn error occurred while attempt to make a request to the service. No request was sent.\n\n#### ServiceResponseError\n\nThe request was sent, but the client failed to understand the response.\nThe connection may have timed out. These errors can be retried for idempotent or safe operations.\n\n#### HttpResponseError\n\nA request was made, and a non-success status code was received from the service.\n\n```python\nclass HttpResponseError(AzureError):\n def __init__(self, message=None, response=None, **kwargs):\n self.reason = None\n self.response = response\n if response:\n self.reason = response.reason\n self.status_code = response.status_code\n self.error = self._parse_odata_body(ODataV4Format, response) # type: Optional[ODataV4Format]\n if self.error:\n message = str(self.error)\n else:\n message = message or \"Operation returned an invalid status '{}'\".format(\n self.reason\n )\n\n super(HttpResponseError, self).__init__(message=message, **kwargs)\n```\n\n*message* is the HTTP response error message (optional)\n\n*response* is the HTTP response (optional).\n\n*kwargs* are keyword arguments to include with the exception.\n\n**The following exceptions inherit from HttpResponseError:**\n\n#### DecodeError\n\nAn error raised during response de-serialization.\n\n#### IncompleteReadError\n\nAn error raised if peer closes the connection before we have received the complete message body.\n\n#### ResourceExistsError\n\nAn error response with status code 4xx. This will not be raised directly by the Azure core pipeline.\n\n#### ResourceNotFoundError\n\nAn error response, typically triggered by a 412 response (for update) or 404 (for get/post).\n\n#### ResourceModifiedError\n\nAn error response with status code 4xx, typically 412 Conflict. This will not be raised directly by the Azure core pipeline.\n\n#### ResourceNotModifiedError\n\nAn error response with status code 304. This will not be raised directly by the Azure core pipeline.\n\n#### ClientAuthenticationError\n\nAn error response with status code 4xx. This will not be raised directly by the Azure core pipeline.\n\n#### TooManyRedirectsError\n\nAn error raised when the maximum number of redirect attempts is reached. The maximum amount of redirects can be configured in the RedirectPolicy.\n\n```python\nclass TooManyRedirectsError(HttpResponseError):\n def __init__(self, history, *args, **kwargs):\n self.history = history\n message = \"Reached maximum redirect attempts.\"\n super(TooManyRedirectsError, self).__init__(message, *args, **kwargs)\n```\n\n*history* is used to document the requests/responses that resulted in redirected requests.\n\n*args* are any additional args to be included with exception.\n\n*kwargs* are keyword arguments to include with the exception.\n\n#### StreamConsumedError\n\nAn error thrown if you try to access the stream of `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` once\nthe response stream has been consumed.\n\n#### StreamClosedError\n\nAn error thrown if you try to access the stream of the `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` once\nthe response stream has been closed.\n\n#### ResponseNotReadError\n\nAn error thrown if you try to access the `content` of `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` before\nreading in the response's bytes first.\n\n### Configurations\n\nWhen calling the methods, some properties can be configured by passing in as kwargs arguments.\n\n| Parameters | Description |\n| --- | --- |\n| headers | The HTTP Request headers. |\n| request_id | The request id to be added into header. |\n| user_agent | If specified, this will be added in front of the user agent string. |\n| logging_enable| Use to enable per operation. Defaults to `False`. |\n| logger | If specified, it will be used to log information. |\n| response_encoding | The encoding to use if known for this service (will disable auto-detection). |\n| raw_request_hook | Callback function. Will be invoked on request. |\n| raw_response_hook | Callback function. Will be invoked on response. |\n| network_span_namer | A callable to customize the span name. |\n| tracing_attributes | Attributes to set on all created spans. |\n| permit_redirects | Whether the client allows redirects. Defaults to `True`. |\n| redirect_max | The maximum allowed redirects. Defaults to `30`. |\n| retry_total | Total number of retries to allow. Takes precedence over other counts. Default value is `10`. |\n| retry_connect | How many connection-related errors to retry on. These are errors raised before the request is sent to the remote server, which we assume has not triggered the server to process the request. Default value is `3`. |\n| retry_read | How many times to retry on read errors. These errors are raised after the request was sent to the server, so the request may have side-effects. Default value is `3`. |\n| retry_status | How many times to retry on bad status codes. Default value is `3`. |\n| retry_backoff_factor | A backoff factor to apply between attempts after the second try (most errors are resolved immediately by a second try without a delay). Retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))` seconds. If the backoff_factor is 0.1, then the retry will sleep for [0.0s, 0.2s, 0.4s, ...] between retries. The default value is `0.8`. |\n| retry_backoff_max | The maximum back off time. Default value is `120` seconds (2 minutes). |\n| retry_mode | Fixed or exponential delay between attempts, default is `Exponential`. |\n| timeout | Timeout setting for the operation in seconds, default is `604800`s (7 days). |\n| connection_timeout | A single float in seconds for the connection timeout. Defaults to `300` seconds. |\n| read_timeout | A single float in seconds for the read timeout. Defaults to `300` seconds. |\n| connection_verify | SSL certificate verification. Enabled by default. Set to False to disable, alternatively can be set to the path to a CA_BUNDLE file or directory with certificates of trusted CAs. |\n| connection_cert | Client-side certificates. You can specify a local cert to use as client side certificate, as a single file (containing the private key and the certificate) or as a tuple of both files' paths. |\n| proxies | Dictionary mapping protocol or protocol and hostname to the URL of the proxy. |\n| cookies | Dict or CookieJar object to send with the `Request`. |\n| connection_data_block_size | The block size of data sent over the connection. Defaults to `4096` bytes. |\n\n### Async transport\n\nThe async transport is designed to be opt-in. [AioHttp](https://pypi.org/project/aiohttp/) is one of the supported implementations of async transport. It is not installed by default. You need to install it separately.\n\n### Shared modules\n\n#### MatchConditions\n\nMatchConditions is an enum to describe match conditions.\n\n```python\nclass MatchConditions(Enum):\n Unconditionally = 1 # Matches any condition\n IfNotModified = 2 # If the target object is not modified. Usually it maps to etag=\n IfModified = 3 # Only if the target object is modified. Usually it maps to etag!=\n IfPresent = 4 # If the target object exists. Usually it maps to etag='*'\n IfMissing = 5 # If the target object does not exist. Usually it maps to etag!='*'\n```\n\n#### CaseInsensitiveEnumMeta\n\nA metaclass to support case-insensitive enums.\n\n```python\nfrom enum import Enum\n\nfrom azure.core import CaseInsensitiveEnumMeta\n\nclass MyCustomEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta):\n FOO = 'foo'\n BAR = 'bar'\n```\n\n#### Null Sentinel Value\n\nA falsy sentinel object which is supposed to be used to specify attributes\nwith no data. This gets serialized to `null` on the wire.\n\n```python\nfrom azure.core.serialization import NULL\n\nassert bool(NULL) is False\n\nfoo = Foo(\n attr=NULL\n)\n```\n\n## Logging\n\nAzure libraries follow the guidance of Python's standard [logging](https://docs.python.org/3/library/logging.html) module. By following the Python documentation on logging, you should be able to configure logging for Azure libraries effectively.\n\nAzure library loggers use a dot-based separated syntax, where the first section is always `azure`, followed by the package name. For example, the Azure Core library uses logger names that start with `azure.core`.\n\nHere's an example of how to configure logging for Azure libraries:\n\n```python\nimport logging\nimport sys\n\n# Enable detailed console logs across Azure libraries\nazure_logger = logging.getLogger(\"azure\")\nazure_logger.setLevel(logging.DEBUG)\nazure_logger.addHandler(logging.StreamHandler(stream=sys.stdout))\n\n# Exclude detailed logs for network calls associated with getting Entra ID token.\nidentity_logger = logging.getLogger(\"azure.identity\")\nidentity_logger.setLevel(logging.ERROR)\n\n# Make sure regular (redacted) detailed azure.core logs are not shown, as we are about to\n# turn on non-redacted logs by passing 'logging_enable=True' to the client constructor \nlogger = logging.getLogger(\"azure.core.pipeline.policies.http_logging_policy\")\nlogger.setLevel(logging.ERROR)\n```\n\n## Contributing\n\nThis project welcomes contributions and suggestions. Most contributions require\nyou to agree to a Contributor License Agreement (CLA) declaring that you have\nthe right to, and actually do, grant us the rights to use your contribution.\nFor details, visit [https://cla.microsoft.com](https://cla.microsoft.com).\n\nWhen you submit a pull request, a CLA-bot will automatically determine whether\nyou need to provide a CLA and decorate the PR appropriately (e.g., label,\ncomment). Simply follow the instructions provided by the bot. You will only\nneed to do this once across all repos using our CLA.\n\nThis project has adopted the\n[Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).\nFor more information, see the\n[Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)\nor contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any\nadditional questions or comments.\n\n\n[package]: https://pypi.org/project/azure-core/\n\n\n# Release History\n\n## 1.35.1 (2025-09-11)\n\n### Bugs Fixed\n\n- Fixed an issue where the `retry_backoff_max` parameter in `RetryPolicy` and `AsyncRetryPolicy` constructors was being ignored, causing retry operations to use default maximum backoff values instead of the user-specified limits. #42444\n\n### Other Changes\n\n- `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` will now properly surface credential exceptions when handling claims challenges. Previously, exceptions from credential token requests were suppressed; now they are raised and chained with the original 401 `HttpResponseError` response for better debugging visibility. #42536\n\n## 1.35.0 (2025-07-02)\n\n### Features Added\n\n- Added a `start_time` keyword argument to the `start_span` and `start_as_current_span` methods in the `OpenTelemetryTracer` class. This allows users to specify a custom start time for created spans. #41106\n- Added a `context` keyword argument to the `start_span` and `start_as_current_span` methods in the `OpenTelemetryTracer` class. This allows users to specify a custom parent context for created spans. #41511\n- Added method `as_attribute_dict` to `azure.core.serialization` for backcompat migration purposes. Will return a generated model as a dictionary where the keys are in attribute syntax.\n- Added `is_generated_model` method to `azure.core.serialization`. Returns whether a given input is a model from one of our generated sdks. #41445\n- Added `attribute_list` method to `azure.core.serialization`. Returns all of the attributes of a given model from one of our generated sdks. #41571\n\n### Other Changes\n\n- A timeout error when using the `aiohttp` transport (the default for async SDKs) will now be raised as a `azure.core.exceptions.ServiceResponseTimeoutError`, a subtype of the previously raised `ServiceResponseError`.\n- When using with `aiohttp` 3.10 or later, a connection timeout error will now be raised as a `azure.core.exceptions.ServiceRequestTimeoutError`, which can be retried.\n- The default implementation of `on_challenge` in `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` will now cache the retrieved token. #41857\n\n## 1.34.0 (2025-05-01)\n\n### Features Added\n\n- Added a `set_span_error_status` method to the `OpenTelemetryTracer` class. This method allows users to set the status of a span to `ERROR` after it has been created. #40703\n\n### Other Changes\n\n- Python 3.8 is no longer supported. Please use Python version 3.9 or later.\n\n## 1.33.0 (2025-04-03)\n\n### Features Added\n\n- Added native OpenTelemetry tracing to Azure Core which enables users to use OpenTelemetry to trace Azure SDK operations without needing to install a plugin. #39563\n - To enable native OpenTelemetry tracing, users need to:\n 1. Have `opentelemetry-api` installed.\n 2. Ensure that `settings.tracing_implementation` is not set.\n 3. Ensure that `settings.tracing_enabled` is set to `True`.\n - If `setting.tracing_implementation` is set, the tracing plugin will be used instead of the native tracing.\n - If `settings.tracing_enabled` is set to `False`, tracing will be disabled.\n - The `OpenTelemetryTracer` class was added to the `azure.core.tracing.opentelemetry` module. This is a wrapper around the OpenTelemetry tracer that is used to create spans for Azure SDK operations.\n - Added a `get_tracer` method to the new `azure.core.instrumentation` module. This method returns an instance of the `OpenTelemetryTracer` class if OpenTelemetry is available.\n - A `TracingOptions` TypedDict class was added to define the options that SDK users can use to configure tracing per-operation. These options include the ability to enable or disable tracing and set additional attributes on spans.\n - Example usage: `client.method(tracing_options={\"enabled\": True, \"attributes\": {\"foo\": \"bar\"}})`\n - The `DistributedTracingPolicy` and `distributed_trace`/`distributed_trace_async` decorators now uses the OpenTelemetry tracer if it is available and native tracing is enabled.\n - SDK clients can define an `_instrumentation_config` class variable to configure the OpenTelemetry tracer used in method span creation. Possible configuration options are `library_name`, `library_version`, `schema_url`, and `attributes`.\n - `DistributedTracingPolicy` now accepts a `instrumentation_config` keyword argument to configure the OpenTelemetry tracer used in HTTP span creation.\n\n### Breaking Changes\n\n- Removed automatic tracing enablement for the OpenTelemetry plugin if `opentelemetry` was imported. To enable tracing with the plugin, please import `azure.core.settings.settings` and set `settings.tracing_implementation` to `\"opentelemetry\"`. #39563\n- In `DistributedTracingPolicy`, the default span name is now just the HTTP method (e.g., \"GET\", \"POST\") and no longer includes the URL path. This change was made to converge with the OpenTelemetry HTTP semantic conventions. The full URL is still included in the span attributes.\n- Renamed span attributes in `DistributedTracingPolicy`:\n - \"x-ms-client-request-id\" is now \"az.client_request_id\"\n - \"x-ms-request-id\" is now \"az.service_request_id\"\n\n### Bugs Fixed\n\n- Fixed an issue where the `traceparent` header was not being set correctly in the `DistributedTracingPolicy`. The `traceparent` header will now set based on the context of the HTTP client span. #40074\n\n### Other Changes\n\n- Added `opentelemetry-api` as an optional dependency for tracing. This can be installed with `pip install azure-core[tracing]`. #39563\n\n## 1.32.0 (2024-10-31)\n\n### Features Added\n\n- Added a default implementation to handle token challenges in `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy`.\n\n### Bugs Fixed\n\n- Fixed an issue where the `tracing_attributes` keyword argument wasn't being handled at the request/method level. #38164\n\n### Other Changes\n\n- Log \"x-vss-e2eid\" and \"x-msedge-ref\" headers in `HttpLoggingPolicy`.\n\n## 1.31.0 (2024-09-12)\n\n### Features Added\n\n- Added azure.core.AzureClouds enum to represent the different Azure clouds.\n- Added two new credential protocol classes, `SupportsTokenInfo` and `AsyncSupportsTokenInfo`, to offer more extensibility in supporting various token acquisition scenarios. #36565\n - Each new protocol class defines a `get_token_info` method that returns an `AccessTokenInfo` object.\n- Added a new `TokenRequestOptions` class, which is a `TypedDict` with optional parameters, that can be used to define options for token requests through the `get_token_info` method. #36565\n- Added a new `AccessTokenInfo` class, which is returned by `get_token_info` implementations. This class contains the token, its expiration time, and optional additional information like when a token should be refreshed. #36565\n- `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` now first check if a credential has the `get_token_info` method defined. If so, the `get_token_info` method is used to acquire a token. Otherwise, the `get_token` method is used. #36565\n - These policies now also check the `refresh_on` attribute when determining if a new token request should be made.\n\n### Other Changes\n\n- The Azure Core OpenTelemetry tracing plugin will now be the preferred tracing plugin over the OpenCensus plugin. If both plugins are installed and `opentelemetry` is imported, then OpenTelemetry will be used to trace Azure SDK operations. #35050\n\n## 1.30.2 (2024-06-06)\n\n### Features Added\n\n- Tracing: `DistributedTracingPolicy` will now set an attribute, `http.request.resend_count`, on HTTP spans for resent requests to indicate the resend attempt number. #35069\n\n### Bugs Fixed\n\n- Raise correct exception if transport is used while already closed #35559\n\n### Other Changes\n\n- HTTP tracing spans will now include an `error.type` attribute if an error status code is returned. #34619\n- Minimum required Python version is now 3.8\n\n## 1.30.1 (2024-02-29)\n\n### Other Changes\n\n- Accept float for `retry_after` header. #34203\n\n## 1.30.0 (2024-02-01)\n\n### Features Added\n\n- Support tuple input for file values to `azure.core.rest.HttpRequest` #33948\n- Support tuple input to `files` with duplicate field names `azure.core.rest.HttpRequest` #34021\n\n## 1.29.7 (2024-01-18)\n\n### Other Changes\n\n- Removed dependency on `anyio`. #33282\n\n## 1.29.6 (2023-12-14)\n\n### Bugs Fixed\n\n- Adjusted `AsyncBearerTokenCredentialPolicy` to work properly with `trio` concurrency mechanisms. ([#33307](https://github.com/Azure/azure-sdk-for-python/pull/33307))\n\n### Other Changes\n\n- Added dependency on `anyio` >=3.0,<5.0\n- Bumped minimum dependency on `requests` to 2.21.0.\n\n## 1.29.5 (2023-10-19)\n\n### Bugs Fixed\n\n- Fixed an issue with `multipart/form-data` in the async transport where `data` was not getting encoded into the request body. #32473\n\n### Other Changes\n\n- Use ssl context from aiohttp by default.\n\n## 1.29.4 (2023-09-07)\n\n### Bugs Fixed\n\n- Fixed the issue that some urls trigger an infinite loop. #31346\n- Fixed issue where IndexError was raised if multipart responses did not match the number of requests. #31471\n- Fixed issue unbound variable exception if dict is invalid in CloudEvent.from_dict. #31835\n- Fixed issue asyncBearerTokenCredentialPolicy is not backward compatible with SansIOHTTPPolicy. #31836\n- Fixed issue mypy complains with new version of azure-core. #31564\n\n## 1.29.3 (2023-08-22)\n\n### Bugs Fixed\n\n- Typing fix: `message` cannot be `None` in `AzureError`. #31564\n\n## 1.29.2 (2023-08-14)\n\n### Bugs Fixed\n\n- Added a default implementation for `AsyncTokenCredential.__aexit__()` #31573\n\n### Other Changes\n\n- Bumped `typing-extensions` version to 4.6.0.\n\n## 1.29.1 (2023-08-09)\n\n### Bugs Fixed\n\n- Not pass `enabled_cae` unless it is explicitly enabled.\n\n## 1.29.0 (2023-08-03)\n\n### Features Added\n\n- A keyword argument `enable_cae` was added to the `get_token` method of the `TokenCredential` protocol. #31012\n- `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` now accept `enable_cae` keyword arguments in their constructors. This is used in determining if [Continuous Access Evaluation (CAE)](https://learn.microsoft.com/azure/active-directory/conditional-access/concept-continuous-access-evaluation) should be enabled for each `get_token` request. #31012\n\n## 1.28.0 (2023-07-06)\n\n### Features Added\n\n- Added header name parameter to `RequestIdPolicy`. #30772\n- Added `SensitiveHeaderCleanupPolicy` that cleans up sensitive headers if a redirect happens and the new destination is in another domain. #28349\n\n### Other Changes\n\n- Catch aiohttp errors and translate them into azure-core errors.\n\n## 1.27.1 (2023-06-13)\n\n### Bugs Fixed\n\n- Fix url building for some complex query parameters scenarios #30707\n\n## 1.27.0 (2023-06-01)\n\n### Features Added\n\n- Added support to use sync credentials in `AsyncBearerTokenCredentialPolicy`. #30381\n- Added \"prefix\" parameter to AzureKeyCredentialPolicy #29901\n\n### Bugs Fixed\n\n- Improve error message when providing the wrong credential type for AzureKeyCredential #30380\n\n## 1.26.4 (2023-04-06)\n\n### Features Added\n\n- Updated settings to include OpenTelemetry as a tracer provider. #29095\n\n### Other Changes\n\n- Improved typing\n\n## 1.26.3 (2023-02-02)\n\n### Bugs Fixed\n\n- Fixed deflate decompression for aiohttp #28483\n\n## 1.26.2 (2023-01-05)\n\n### Bugs Fixed\n\n- Fix 'ClientSession' object has no attribute 'auto_decompress' (thanks to @mghextreme for the contribution)\n\n### Other Changes\n\n- Add \"x-ms-error-code\" as secure header to log\n- Rename \"DEFAULT_HEADERS_WHITELIST\" to \"DEFAULT_HEADERS_ALLOWLIST\". Added a backward compatible alias.\n\n## 1.26.1 (2022-11-03)\n\n### Other Changes\n\n- Added example of RequestsTransport with custom session. (thanks to @inirudebwoy for the contribution) #26768\n- Added Python 3.11 support.\n\n## 1.26.0 (2022-10-06)\n\n### Other Changes\n\n- LRO polling will not wait anymore before doing the first status check #26376\n- Added extra dependency for [aio]. pip install azure-core[aio] installs aiohttp too.\n\n## 1.25.1 (2022-09-01)\n\n### Bugs Fixed\n\n- Added @runtime_checkable to `TokenCredential` protocol definitions #25187\n\n## 1.25.0 (2022-08-04)\n\nAzure-core is supported on Python 3.7 or later. For more details, please read our page on [Azure SDK for Python version support policy](https://github.com/Azure/azure-sdk-for-python/wiki/Azure-SDKs-Python-version-support-policy).\n\n### Features Added\n\n- Added `CaseInsensitiveDict` implementation in `azure.core.utils` removing dependency on `requests` and `aiohttp`\n\n## 1.24.2 (2022-06-30)\n\n### Bugs Fixed\n\n- Fixed the bug that azure-core could not be imported under Python 3.11.0b3 #24928\n- `ContentDecodePolicy` can now correctly deserialize more JSON bodies with different mime types #22410\n\n## 1.24.1 (2022-06-01)\n\n### Bugs Fixed\n\n- Declare method level span as INTERNAL by default #24492\n- Fixed type hints for `azure.core.paging.ItemPaged` #24548\n\n## 1.24.0 (2022-05-06)\n\n### Features Added\n\n- Add `SerializationError` and `DeserializationError` in `azure.core.exceptions` for errors raised during serialization / deserialization #24312\n\n## 1.23.1 (2022-03-31)\n\n### Bugs Fixed\n\n- Allow stream inputs to the `content` kwarg of `azure.core.rest.HttpRequest` from objects with a `read` method #23578\n\n## 1.23.0 (2022-03-03)\n\n### Features Added\n\n- Improve intellisense type hinting for service client methods. #22891\n\n- Add a case insensitive dict `case_insensitive_dict` in `azure.core.utils`. #23206\n\n### Bugs Fixed\n\n- Use \"\\n\" rather than \"/n\" for new line in log. #23261\n\n### Other Changes\n\n- Log \"WWW-Authenticate\" header in `HttpLoggingPolicy` #22990\n- Added dependency on `typing-extensions` >= 4.0.1\n\n## 1.22.1 (2022-02-09)\n\n### Bugs Fixed\n\n- Limiting `final-state-via` scope to POST until consuming SDKs has been fixed to use this option properly on PUT. #22989\n\n## 1.22.0 (2022-02-03)\n_[**This version is deprecated.**]_\n\n### Features Added\n\n- Add support for `final-state-via` LRO option in core. #22713\n\n### Bugs Fixed\n\n- Add response body to string representation of `HttpResponseError` if we're not able to parse out information #22302\n- Raise `AttributeError` when calling azure.core.pipeline.transport.\\_\\_bases__ #22469\n\n### Other Changes\n\n- Python 2.7 is no longer supported. Please use Python version 3.6 or later.\n\n## 1.21.1 (2021-12-06)\n\n### Other Changes\n\n- Revert change in str method #22023\n\n## 1.21.0 (2021-12-02)\n\n### Breaking Changes\n\n- Sync stream downloading now raises `azure.core.exceptions.DecodeError` rather than `requests.exceptions.ContentDecodingError`\n\n### Bugs Fixed\n\n- Add response body to string representation of `HttpResponseError` if we're not able to parse out information #21800\n\n## 1.20.1 (2021-11-08)\n\n### Bugs Fixed\n\n- Correctly set response's content to decompressed body when users are using aiohttp transport with decompression headers #21620\n\n## 1.20.0 (2021-11-04)\n\n### Features Added\n\n- GA `send_request` onto the `azure.core.PipelineClient` and `azure.core.AsyncPipelineClient`. This method takes in\nrequests and sends them through our pipelines.\n- GA `azure.core.rest`. `azure.core.rest` is our new public simple HTTP library in `azure.core` that users will use to create requests, and consume responses.\n- GA errors `StreamConsumedError`, `StreamClosedError`, and `ResponseNotReadError` to `azure.core.exceptions`. These errors\nare thrown if you mishandle streamed responses from the `azure.core.rest` module\n- add kwargs to the methods for `iter_raw` and `iter_bytes` #21529\n- no longer raise JSON errors if users pass in file descriptors of JSON to the `json` kwarg in `HttpRequest` #21504\n- Added new error type `IncompleteReadError` which is raised if peer closes the connection before we have received the complete message body.\n\n### Breaking Changes\n\n- SansIOHTTPPolicy.on_exception returns None instead of bool.\n\n### Bugs Fixed\n\n- The `Content-Length` header in a http response is strictly checked against the actual number of bytes in the body,\n rather than silently truncating data in case the underlying tcp connection is closed prematurely.\n (thanks to @jochen-ott-by for the contribution) #20412\n- UnboundLocalError when SansIOHTTPPolicy handles an exception #15222\n- Add default content type header of `text/plain` and content length header for users who pass unicode strings to the `content` kwarg of `HttpRequest` in 2.7 #21550\n\n## 1.19.1 (2021-11-01)\n\n### Bugs Fixed\n\n- respect text encoding specified in argument (thanks to @ryohji for the contribution) #20796\n- Fix \"coroutine x.read() was never awaited\" warning from `ContentDecodePolicy` #21318\n- fix type check for `data` input to `azure.core.rest` for python 2.7 users #21341\n- use `charset_normalizer` if `chardet` is not installed to migrate aiohttp 3.8.0 changes.\n\n### Other Changes\n\n- Refactor AzureJSONEncoder (thanks to @Codejune for the contribution) #21028\n\n## 1.19.0 (2021-09-30)\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse` are now abstract base classes. They should not be initialized directly, instead\nyour transport responses should inherit from them and implement them.\n- The properties of the `azure.core.rest` responses are now all read-only\n\n- HttpLoggingPolicy integrates logs into one record #19925\n\n## 1.18.0 (2021-09-02)\n\n### Features Added\n\n- `azure.core.serialization.AzureJSONEncoder` (introduced in 1.17.0) serializes `datetime.datetime` objects in ISO 8601 format, conforming to RFC 3339's specification. #20190\n- We now use `azure.core.serialization.AzureJSONEncoder` to serialize `json` input to `azure.core.rest.HttpRequest`.\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- The `text` property on `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse` has changed to a method, which also takes\nan `encoding` parameter.\n- Removed `iter_text` and `iter_lines` from `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse`\n\n### Bugs Fixed\n\n- The behaviour of the headers returned in `azure.core.rest` responses now aligns across sync and async. Items can now be checked case-insensitively and without raising an error for format.\n\n## 1.17.0 (2021-08-05)\n\n### Features Added\n\n- Cut hard dependency on requests library\n- Added a `from_json` method which now accepts storage QueueMessage, eventhub's EventData or ServiceBusMessage or simply json bytes to return a `CloudEvent`\n\n### Fixed\n\n- Not override \"x-ms-client-request-id\" if it already exists in the header. #17757\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- `azure.core.rest` will not try to guess the `charset` anymore if it was impossible to extract it from `HttpResponse` analysis. This removes our dependency on `charset`.\n\n## 1.16.0 (2021-07-01)\n\n### Features Added\n\n- Add new ***provisional*** methods `send_request` onto the `azure.core.PipelineClient` and `azure.core.AsyncPipelineClient`. This method takes in\nrequests and sends them through our pipelines.\n- Add new ***provisional*** module `azure.core.rest`. `azure.core.rest` is our new public simple HTTP library in `azure.core` that users will use to create requests, and consume responses.\n- Add new ***provisional*** errors `StreamConsumedError`, `StreamClosedError`, and `ResponseNotReadError` to `azure.core.exceptions`. These errors\nare thrown if you mishandle streamed responses from the provisional `azure.core.rest` module\n\n### Fixed\n\n- Improved error message in the `from_dict` method of `CloudEvent` when a wrong schema is sent.\n\n## 1.15.0 (2021-06-04)\n\n### New Features\n\n- Added `BearerTokenCredentialPolicy.on_challenge` and `.authorize_request` to allow subclasses to optionally handle authentication challenges\n\n### Bug Fixes\n\n- Retry policies don't sleep after operations time out\n- The `from_dict` methhod in the `CloudEvent` can now convert a datetime string to datetime object when microsecond exceeds the python limitation\n\n## 1.14.0 (2021-05-13)\n\n### New Features\n\n- Added `azure.core.credentials.AzureNamedKeyCredential` credential #17548.\n- Added `decompress` parameter for `stream_download` method. If it is set to `False`, will not do decompression upon the stream. #17920\n\n## 1.13.0 (2021-04-02)\n\nAzure core requires Python 2.7 or Python 3.6+ since this release.\n\n### New Features\n\n- Added `azure.core.utils.parse_connection_string` function to parse connection strings across SDKs, with common validation and support for case insensitive keys.\n- Supported adding custom policies #16519\n- Added `~azure.core.tracing.Link` that should be used while passing `Links` to `AbstractSpan`.\n- `AbstractSpan` constructor can now take in additional keyword only args.\n\n### Bug fixes\n\n- Make NetworkTraceLoggingPolicy show the auth token in plain text. #14191\n- Fixed RetryPolicy overriding default connection timeout with an extreme value #17481\n\n## 1.12.0 (2021-03-08)\n\nThis version will be the last version to officially support Python 3.5, future versions will require Python 2.7 or Python 3.6+.\n\n### Features\n\n- Added `azure.core.messaging.CloudEvent` model that follows the cloud event spec.\n- Added `azure.core.serialization.NULL` sentinel value\n- Improve `repr`s for `HttpRequest` and `HttpResponse`s #16972\n\n### Bug Fixes\n\n- Disable retry in stream downloading. (thanks to @jochen-ott-by @hoffmann for the contribution) #16723\n\n## 1.11.0 (2021-02-08)\n\n### Features\n\n- Added `CaseInsensitiveEnumMeta` class for case-insensitive enums. #16316\n- Add `raise_for_status` method onto `HttpResponse`. Calling `response.raise_for_status()` on a response with an error code\nwill raise an `HttpResponseError`. Calling it on a good response will do nothing #16399\n\n### Bug Fixes\n\n- Update conn.conn_kw rather than overriding it when setting block size. (thanks for @jiasli for the contribution) #16587\n\n## 1.10.0 (2021-01-11)\n\n### Features\n\n- Added `AzureSasCredential` and its respective policy. #15946\n\n## 1.9.0 (2020-11-09)\n\n### Features\n\n- Add a `continuation_token` attribute to the base `AzureError` exception, and set this value for errors raised\n during paged or long-running operations.\n\n### Bug Fixes\n\n- Set retry_interval to 1 second instead of 1000 seconds (thanks **vbarbaresi** for contributing) #14357\n\n\n## 1.8.2 (2020-10-05)\n\n### Bug Fixes\n\n- Fixed bug to allow polling in the case of parameterized endpoints with relative polling urls #14097\n\n\n## 1.8.1 (2020-09-08)\n\n### Bug fixes\n\n- SAS credential replicated \"/\" fix #13159\n\n## 1.8.0 (2020-08-10)\n\n### Features\n\n- Support params as list for exploding parameters #12410\n\n\n## 1.7.0 (2020-07-06)\n\n### Bug fixes\n\n- `AzureKeyCredentialPolicy` will now accept (and ignore) passed in kwargs #11963\n- Better error messages if passed endpoint is incorrect #12106\n- Do not JSON encore a string if content type is \"text\" #12137\n\n### Features\n\n- Added `http_logging_policy` property on the `Configuration` object, allowing users to individually\nset the http logging policy of the config #12218\n\n## 1.6.0 (2020-06-03)\n\n### Bug fixes\n\n- Fixed deadlocks in AsyncBearerTokenCredentialPolicy #11543\n- Fix AttributeException in StreamDownloadGenerator #11462\n\n### Features\n\n- Added support for changesets as part of multipart message support #10485\n- Add AsyncLROPoller in azure.core.polling #10801\n- Add get_continuation_token/from_continuation_token/polling_method methods in pollers (sync and async) #10801\n- HttpResponse and PipelineContext objects are now pickable #10801\n\n## 1.5.0 (2020-05-04)\n\n### Features\n\n- Support \"x-ms-retry-after-ms\" in response header #10743\n- `link` and `link_from_headers` now accepts attributes #10765\n\n### Bug fixes\n\n- Not retry if the status code is less than 400 #10778\n- \"x-ms-request-id\" is not considered safe header for logging #10967\n\n## 1.4.0 (2020-04-06)\n\n### Features\n\n- Support a default error type in map_error #9773\n- Added `AzureKeyCredential` and its respective policy. #10509\n- Added `azure.core.polling.base_polling` module with a \"Microsoft One API\" polling implementation #10090\n Also contains the async version in `azure.core.polling.async_base_polling`\n- Support kwarg `enforce_https` to disable HTTPS check on authentication #9821\n- Support additional kwargs in `HttpRequest.set_multipart_mixed` that will be passed into pipeline context.\n\n## 1.3.0 (2020-03-09)\n\n### Bug fixes\n\n- Appended RequestIdPolicy to the default pipeline #9841\n- Rewind the body position in async_retry #10117\n\n### Features\n\n- Add raw_request_hook support in custom_hook_policy #9958\n- Add timeout support in retry_policy #10011\n- Add OdataV4 error format auto-parsing in all exceptions ('error' attribute) #9738\n\n## 1.2.2 (2020-02-10)\n\n### Bug fixes\n\n- Fixed a bug that sends None as request_id #9545\n- Enable mypy for customers #9572\n- Handle TypeError in deep copy #9620\n- Fix text/plain content-type in decoder #9589\n\n## 1.2.1 (2020-01-14)\n\n### Bug fixes\n\n- Fixed a regression in 1.2.0 that was incompatible with azure-keyvault-* 4.0.0\n[#9462](https://github.com/Azure/azure-sdk-for-python/issues/9462)\n\n\n## 1.2.0 (2020-01-14)\n\n### Features\n\n- Add user_agent & sdk_moniker kwargs in UserAgentPolicy init #9355\n- Support OPTIONS HTTP verb #9322\n- Add tracing_attributes to tracing decorator #9297\n- Support auto_request_id in RequestIdPolicy #9163\n- Support fixed retry #6419\n- Support \"retry-after-ms\" in response header #9240\n\n### Bug fixes\n\n- Removed `__enter__` and `__exit__` from async context managers #9313\n\n## 1.1.1 (2019-12-03)\n\n### Bug fixes\n\n- Bearer token authorization requires HTTPS\n- Rewind the body position in retry #8307\n\n## 1.1.0 (2019-11-25)\n\n### Features\n\n- New RequestIdPolicy #8437\n- Enable logging policy in default pipeline #8053\n- Normalize transport timeout. #8000\n Now we have:\n * 'connection_timeout' - a single float in seconds for the connection timeout. Default 5min\n * 'read_timeout' - a single float in seconds for the read timeout. Default 5min\n\n### Bug fixes\n\n- RequestHistory: deepcopy fails if request contains a stream #7732\n- Retry: retry raises error if response does not have http_response #8629\n- Client kwargs are now passed to DistributedTracingPolicy correctly #8051\n- NetworkLoggingPolicy now logs correctly all requests in case of retry #8262\n\n## 1.0.0 (2019-10-29)\n\n### Features\n\n- Tracing: DistributedTracingPolicy now accepts kwargs network_span_namer to change network span name #7773\n- Tracing: Implementation of AbstractSpan can now use the mixin HttpSpanMixin to get HTTP span update automatically #7773\n- Tracing: AbstractSpan contract \"change_context\" introduced #7773\n- Introduce new policy HttpLoggingPolicy #7988\n\n### Bug fixes\n\n- Fix AsyncioRequestsTransport if input stream is an async generator #7743\n- Fix form-data with aiohttp transport #7749\n\n### Breaking changes\n\n- Tracing: AbstractSpan.set_current_span is longer supported. Use change_context instead. #7773\n- azure.core.pipeline.policies.ContentDecodePolicy.deserialize_from_text changed\n\n## 1.0.0b4 (2019-10-07)\n\n### Features\n\n- Tracing: network span context is available with the TRACING_CONTEXT in pipeline response #7252\n- Tracing: Span contract now has `kind`, `traceparent` and is a context manager #7252\n- SansIOHTTPPolicy methods can now be coroutines #7497\n- Add multipart/mixed support #7083:\n\n - HttpRequest now has a \"set_multipart_mixed\" method to set the parts of this request\n - HttpRequest now has a \"prepare_multipart_body\" method to build final body.\n - HttpResponse now has a \"parts\" method to return an iterator of parts\n - AsyncHttpResponse now has a \"parts\" methods to return an async iterator of parts\n - Note that multipart/mixed is a Python 3.x only feature\n\n### Bug fixes\n\n- Tracing: policy cannot fail the pipeline, even in the worst condition #7252\n- Tracing: policy pass correctly status message if exception #7252\n- Tracing: incorrect span if exception raised from decorated function #7133\n- Fixed urllib3 ConnectTimeoutError being raised by Requests during a socket timeout. Now this exception is caught and wrapped as a `ServiceRequestError` #7542\n\n### Breaking changes\n\n- Tracing: `azure.core.tracing.context` removed\n- Tracing: `azure.core.tracing.context.tracing_context.with_current_context` renamed to `azure.core.tracing.common.with_current_context` #7252\n- Tracing: `link` renamed `link_from_headers` and `link` takes now a string\n- Tracing: opencensus implementation has been moved to the package `azure-core-tracing-opencensus`\n- Some modules and classes that were importables from several different places have been removed:\n\n - `azure.core.HttpResponseError` is now only `azure.core.exceptions.HttpResponseError`\n - `azure.core.Configuration` is now only `azure.core.configuration.Configuration`\n - `azure.core.HttpRequest` is now only `azure.core.pipeline.transport.HttpRequest`\n - `azure.core.version` module has been removed. Use `azure.core.__version__` to get version number.\n - `azure.core.pipeline_client` has been removed. Import from `azure.core` instead.\n - `azure.core.pipeline_client_async` has been removed. Import from `azure.core` instead.\n - `azure.core.pipeline.base` has been removed. Import from `azure.core.pipeline` instead.\n - `azure.core.pipeline.base_async` has been removed. Import from `azure.core.pipeline` instead.\n - `azure.core.pipeline.policies.base` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.base_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.authentication` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.authentication_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.custom_hook` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.redirect` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.redirect_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.retry` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.retry_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.distributed_tracing` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.universal` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.tracing.abstract_span` has been removed. Import from `azure.core.tracing` instead.\n - `azure.core.pipeline.transport.base` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.base_async` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_basic` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_asyncio` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_trio` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.aiohttp` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.polling.poller` has been removed. Import from `azure.core.polling` instead.\n - `azure.core.polling.async_poller` has been removed. Import from `azure.core.polling` instead.\n\n## 1.0.0b3 (2019-09-09)\n\n### Bug fixes\n\n- Fix aiohttp auto-headers #6992\n- Add tracing to policies module init #6951\n\n## 1.0.0b2 (2019-08-05)\n\n### Breaking changes\n\n- Transport classes don't take `config` parameter anymore (use kwargs instead) #6372\n- `azure.core.paging` has been completely refactored #6420\n- HttpResponse.content_type attribute is now a string (was a list) #6490\n- For `StreamDownloadGenerator` subclasses, `response` is now an `HttpResponse`, and not a transport response like `aiohttp.ClientResponse` or `requests.Response`. The transport response is available in `internal_response` attribute #6490\n\n### Bug fixes\n\n- aiohttp is not required to import async pipelines classes #6496\n- `AsyncioRequestsTransport.sleep` is now a coroutine as expected #6490\n- `RequestsTransport` is not tight to `ProxyPolicy` implementation details anymore #6372\n- `AiohttpTransport` does not raise on unexpected kwargs #6355\n\n### Features\n\n- New paging base classes that support `continuation_token` and `by_page()` #6420\n- Proxy support for `AiohttpTransport` #6372\n\n## 1.0.0b1 (2019-06-26)\n\n- Preview 1 release", + "release_date": "2025-09-11T22:58:06", "parties": [ { "type": "person", @@ -155,11 +155,11 @@ "Programming Language :: Python :: 3.9" ], "homepage_url": "https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/core/azure-core", - "download_url": "https://files.pythonhosted.org/packages/d4/78/bf94897361fdd650850f0f2e405b2293e2f12808239046232bdedf554301/azure_core-1.35.0-py3-none-any.whl", - "size": 210708, + "download_url": "https://files.pythonhosted.org/packages/27/52/805980aa1ba18282077c484dba634ef0ede1e84eec8be9c92b2e162d0ed6/azure_core-1.35.1-py3-none-any.whl", + "size": 211800, "sha1": null, - "md5": "a5bb28aab86f7accdd9c7c36533d6a2d", - "sha256": "8db78c72868a58f3de8991eb4d22c4d368fae226dac1002998d6c50437e7dad1", + "md5": "ece20e5c0b954f1f10defd30a0cc86a0", + "sha256": "12da0c9e08e48e198f9158b56ddbe33b421477e1dc98c2e1c8f9e254d92c468b", "sha512": null, "bug_tracking_url": null, "code_view_url": null, @@ -179,9 +179,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/azure-core/1.35.0/json", + "api_data_url": "https://pypi.org/pypi/azure-core/1.35.1/json", "datasource_id": null, - "purl": "pkg:pypi/azure-core@1.35.0" + "purl": "pkg:pypi/azure-core@1.35.1" }, { "type": "pypi", @@ -255,12 +255,12 @@ "type": "pypi", "namespace": null, "name": "azure-storage-blob", - "version": "12.25.1", + "version": "12.26.0", "qualifiers": {}, "subpath": null, "primary_language": "Python", "description": "Microsoft Azure Blob Storage Client Library for Python\n# Azure Storage Blobs client library for Python\nAzure Blob storage is Microsoft's object storage solution for the cloud. Blob storage is optimized for storing massive amounts of unstructured data, such as text or binary data.\n\nBlob storage is ideal for:\n\n* Serving images or documents directly to a browser\n* Storing files for distributed access\n* Streaming video and audio\n* Storing data for backup and restore, disaster recovery, and archiving\n* Storing data for analysis by an on-premises or Azure-hosted service\n\n[Source code](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/azure/storage/blob)\n| [Package (PyPI)](https://pypi.org/project/azure-storage-blob/)\n| [Package (Conda)](https://anaconda.org/microsoft/azure-storage/)\n| [API reference documentation](https://aka.ms/azsdk-python-storage-blob-ref)\n| [Product documentation](https://learn.microsoft.com/azure/storage/)\n| [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples)\n\n\n## Getting started\n\n### Prerequisites\n* Python 3.8 or later is required to use this package. For more details, please read our page on [Azure SDK for Python version support policy](https://github.com/Azure/azure-sdk-for-python/wiki/Azure-SDKs-Python-version-support-policy).\n* You must have an [Azure subscription](https://azure.microsoft.com/free/) and an\n[Azure storage account](https://learn.microsoft.com/azure/storage/common/storage-account-overview) to use this package.\n\n### Install the package\nInstall the Azure Storage Blobs client library for Python with [pip](https://pypi.org/project/pip/):\n\n```bash\npip install azure-storage-blob\n```\n\n### Create a storage account\nIf you wish to create a new storage account, you can use the\n[Azure Portal](https://learn.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-portal),\n[Azure PowerShell](https://learn.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-powershell),\nor [Azure CLI](https://learn.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-cli):\n\n```bash\n# Create a new resource group to hold the storage account -\n# if using an existing resource group, skip this step\naz group create --name my-resource-group --location westus2\n\n# Create the storage account\naz storage account create -n my-storage-account-name -g my-resource-group\n```\n\n### Create the client\nThe Azure Storage Blobs client library for Python allows you to interact with three types of resources: the storage\naccount itself, blob storage containers, and blobs. Interaction with these resources starts with an instance of a\n[client](#clients). To create a client object, you will need the storage account's blob service account URL and a\ncredential that allows you to access the storage account:\n\n```python\nfrom azure.storage.blob import BlobServiceClient\n\nservice = BlobServiceClient(account_url=\"https://.blob.core.windows.net/\", credential=credential)\n```\n\n#### Looking up the account URL\nYou can find the storage account's blob service URL using the\n[Azure Portal](https://learn.microsoft.com/azure/storage/common/storage-account-overview#storage-account-endpoints),\n[Azure PowerShell](https://learn.microsoft.com/powershell/module/az.storage/get-azstorageaccount),\nor [Azure CLI](https://learn.microsoft.com/cli/azure/storage/account?view=azure-cli-latest#az-storage-account-show):\n\n```bash\n# Get the blob service account url for the storage account\naz storage account show -n my-storage-account-name -g my-resource-group --query \"primaryEndpoints.blob\"\n```\n\n#### Types of credentials\nThe `credential` parameter may be provided in a number of different forms, depending on the type of\n[authorization](https://learn.microsoft.com/azure/storage/common/storage-auth) you wish to use:\n1. To use an [Azure Active Directory (AAD) token credential](https://learn.microsoft.com/azure/storage/common/storage-auth-aad),\n provide an instance of the desired credential type obtained from the\n [azure-identity](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#credentials) library.\n For example, [DefaultAzureCredential](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#defaultazurecredential)\n can be used to authenticate the client.\n\n This requires some initial setup:\n * [Install azure-identity](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#install-the-package)\n * [Register a new AAD application](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) and give permissions to access Azure Storage\n * [Grant access](https://learn.microsoft.com/azure/storage/common/storage-auth-aad-rbac-portal) to Azure Blob data with RBAC in the Azure Portal\n * Set the values of the client ID, tenant ID, and client secret of the AAD application as environment variables:\n AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET\n\n Use the returned token credential to authenticate the client:\n ```python\n from azure.identity import DefaultAzureCredential\n from azure.storage.blob import BlobServiceClient\n token_credential = DefaultAzureCredential()\n\n blob_service_client = BlobServiceClient(\n account_url=\"https://.blob.core.windows.net\",\n credential=token_credential\n )\n ```\n\n2. To use a [shared access signature (SAS) token](https://learn.microsoft.com/azure/storage/common/storage-sas-overview),\n provide the token as a string. If your account URL includes the SAS token, omit the credential parameter.\n You can generate a SAS token from the Azure Portal under \"Shared access signature\" or use one of the `generate_sas()`\n functions to create a sas token for the storage account, container, or blob:\n\n ```python\n from datetime import datetime, timedelta\n from azure.storage.blob import BlobServiceClient, generate_account_sas, ResourceTypes, AccountSasPermissions\n\n sas_token = generate_account_sas(\n account_name=\"\",\n account_key=\"\",\n resource_types=ResourceTypes(service=True),\n permission=AccountSasPermissions(read=True),\n expiry=datetime.utcnow() + timedelta(hours=1)\n )\n\n blob_service_client = BlobServiceClient(account_url=\"https://.blob.core.windows.net\", credential=sas_token)\n ```\n\n3. To use a storage account [shared key](https://learn.microsoft.com/rest/api/storageservices/authenticate-with-shared-key/)\n (aka account key or access key), provide the key as a string. This can be found in the Azure Portal under the \"Access Keys\"\n section or by running the following Azure CLI command:\n\n ```az storage account keys list -g MyResourceGroup -n MyStorageAccount```\n\n Use the key as the credential parameter to authenticate the client:\n ```python\n from azure.storage.blob import BlobServiceClient\n service = BlobServiceClient(account_url=\"https://.blob.core.windows.net\", credential=\"\")\n ```\n \n If you are using **customized url** (which means the url is not in this format `.blob.core.windows.net`),\n please instantiate the client using the credential below:\n ```python\n from azure.storage.blob import BlobServiceClient\n service = BlobServiceClient(account_url=\"https://.blob.core.windows.net\", \n credential={\"account_name\": \"\", \"account_key\":\"\"})\n ```\n\n4. To use [anonymous public read access](https://learn.microsoft.com/azure/storage/blobs/storage-manage-access-to-resources),\n simply omit the credential parameter.\n\n#### Creating the client from a connection string\nDepending on your use case and authorization method, you may prefer to initialize a client instance with a storage\nconnection string instead of providing the account URL and credential separately. To do this, pass the storage\nconnection string to the client's `from_connection_string` class method:\n\n```python\nfrom azure.storage.blob import BlobServiceClient\n\nconnection_string = \"DefaultEndpointsProtocol=https;AccountName=xxxx;AccountKey=xxxx;EndpointSuffix=core.windows.net\"\nservice = BlobServiceClient.from_connection_string(conn_str=connection_string)\n```\n\nThe connection string to your storage account can be found in the Azure Portal under the \"Access Keys\" section or by running the following CLI command:\n\n```bash\naz storage account show-connection-string -g MyResourceGroup -n MyStorageAccount\n```\n\n## Key concepts\nThe following components make up the Azure Blob Service:\n* The storage account itself\n* A container within the storage account\n* A blob within a container\n\nThe Azure Storage Blobs client library for Python allows you to interact with each of these components through the\nuse of a dedicated client object.\n\n### Clients\nFour different clients are provided to interact with the various components of the Blob Service:\n1. [BlobServiceClient](https://aka.ms/azsdk-python-storage-blob-blobserviceclient) -\n this client represents interaction with the Azure storage account itself, and allows you to acquire preconfigured\n client instances to access the containers and blobs within. It provides operations to retrieve and configure the\n account properties as well as list, create, and delete containers within the account. To perform operations on a\n specific container or blob, retrieve a client using the `get_container_client` or `get_blob_client` methods.\n2. [ContainerClient](https://aka.ms/azsdk-python-storage-blob-containerclient) -\n this client represents interaction with a specific container (which need not exist yet), and allows you to acquire\n preconfigured client instances to access the blobs within. It provides operations to create, delete, or configure a\n container and includes operations to list, upload, and delete the blobs within it. To perform operations on a\n specific blob within the container, retrieve a client using the `get_blob_client` method.\n3. [BlobClient](https://aka.ms/azsdk-python-storage-blob-blobclient) -\n this client represents interaction with a specific blob (which need not exist yet). It provides operations to\n upload, download, delete, and create snapshots of a blob, as well as specific operations per blob type.\n4. [BlobLeaseClient](https://aka.ms/azsdk-python-storage-blob-blobleaseclient) -\n this client represents lease interactions with a `ContainerClient` or `BlobClient`. It provides operations to\n acquire, renew, release, change, and break a lease on a specified resource.\n\n### Async Clients \nThis library includes a complete async API supported on Python 3.5+. To use it, you must\nfirst install an async transport, such as [aiohttp](https://pypi.org/project/aiohttp/).\nSee\n[azure-core documentation](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/CLIENT_LIBRARY_DEVELOPER.md#transport)\nfor more information.\n\nAsync clients and credentials should be closed when they're no longer needed. These\nobjects are async context managers and define async `close` methods.\n\n### Blob Types\nOnce you've initialized a Client, you can choose from the different types of blobs:\n* [Block blobs](https://learn.microsoft.com/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs#about-block-blobs)\n store text and binary data, up to approximately 4.75 TiB. Block blobs are made up of blocks of data that can be\n managed individually\n* [Append blobs](https://learn.microsoft.com/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs#about-append-blobs)\n are made up of blocks like block blobs, but are optimized for append operations. Append blobs are ideal for scenarios\n such as logging data from virtual machines\n* [Page blobs](https://learn.microsoft.com/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs#about-page-blobs)\n store random access files up to 8 TiB in size. Page blobs store virtual hard drive (VHD) files and serve as disks for\n Azure virtual machines\n\n## Examples\nThe following sections provide several code snippets covering some of the most common Storage Blob tasks, including:\n\n* [Create a container](#create-a-container \"Create a container\")\n* [Uploading a blob](#uploading-a-blob \"Uploading a blob\")\n* [Downloading a blob](#downloading-a-blob \"Downloading a blob\")\n* [Enumerating blobs](#enumerating-blobs \"Enumerating blobs\")\n\nNote that a container must be created before to upload or download a blob.\n\n### Create a container\n\nCreate a container from where you can upload or download blobs.\n```python\nfrom azure.storage.blob import ContainerClient\n\ncontainer_client = ContainerClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\")\n\ncontainer_client.create_container()\n```\n\nUse the async client to create a container\n\n```python\nfrom azure.storage.blob.aio import ContainerClient\n\ncontainer_client = ContainerClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\")\n\nawait container_client.create_container()\n```\n\n### Uploading a blob\nUpload a blob to your container\n\n```python\nfrom azure.storage.blob import BlobClient\n\nblob = BlobClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\", blob_name=\"my_blob\")\n\nwith open(\"./SampleSource.txt\", \"rb\") as data:\n blob.upload_blob(data)\n```\n\nUse the async client to upload a blob\n\n```python\nfrom azure.storage.blob.aio import BlobClient\n\nblob = BlobClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\", blob_name=\"my_blob\")\n\nwith open(\"./SampleSource.txt\", \"rb\") as data:\n await blob.upload_blob(data)\n```\n\n### Downloading a blob\nDownload a blob from your container\n\n```python\nfrom azure.storage.blob import BlobClient\n\nblob = BlobClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\", blob_name=\"my_blob\")\n\nwith open(\"./BlockDestination.txt\", \"wb\") as my_blob:\n blob_data = blob.download_blob()\n blob_data.readinto(my_blob)\n```\n\nDownload a blob asynchronously\n\n```python\nfrom azure.storage.blob.aio import BlobClient\n\nblob = BlobClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\", blob_name=\"my_blob\")\n\nwith open(\"./BlockDestination.txt\", \"wb\") as my_blob:\n stream = await blob.download_blob()\n data = await stream.readall()\n my_blob.write(data)\n```\n\n### Enumerating blobs\nList the blobs in your container\n\n```python\nfrom azure.storage.blob import ContainerClient\n\ncontainer = ContainerClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\")\n\nblob_list = container.list_blobs()\nfor blob in blob_list:\n print(blob.name + '\\n')\n```\n\nList the blobs asynchronously\n\n```python\nfrom azure.storage.blob.aio import ContainerClient\n\ncontainer = ContainerClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\")\n\nblob_list = []\nasync for blob in container.list_blobs():\n blob_list.append(blob)\nprint(blob_list)\n```\n\n## Optional Configuration\n\nOptional keyword arguments that can be passed in at the client and per-operation level.\n\n### Retry Policy configuration\n\nUse the following keyword arguments when instantiating a client to configure the retry policy:\n\n* __retry_total__ (int): Total number of retries to allow. Takes precedence over other counts.\nPass in `retry_total=0` if you do not want to retry on requests. Defaults to 10.\n* __retry_connect__ (int): How many connection-related errors to retry on. Defaults to 3.\n* __retry_read__ (int): How many times to retry on read errors. Defaults to 3.\n* __retry_status__ (int): How many times to retry on bad status codes. Defaults to 3.\n* __retry_to_secondary__ (bool): Whether the request should be retried to secondary, if able.\nThis should only be enabled of RA-GRS accounts are used and potentially stale data can be handled.\nDefaults to `False`.\n\n### Encryption configuration\n\nUse the following keyword arguments when instantiating a client to configure encryption:\n\n* __require_encryption__ (bool): If set to True, will enforce that objects are encrypted and decrypt them.\n* __encryption_version__ (str): Specifies the version of encryption to use. Current options are `'2.0'` or `'1.0'` and\nthe default value is `'1.0'`. Version 1.0 is deprecated, and it is **highly recommended** to use version 2.0.\n* __key_encryption_key__ (object): The user-provided key-encryption-key. The instance must implement the following methods:\n - `wrap_key(key)`--wraps the specified key using an algorithm of the user's choice.\n - `get_key_wrap_algorithm()`--returns the algorithm used to wrap the specified symmetric key.\n - `get_kid()`--returns a string key id for this key-encryption-key.\n* __key_resolver_function__ (callable): The user-provided key resolver. Uses the kid string to return a key-encryption-key\nimplementing the interface defined above.\n\n### Other client / per-operation configuration\n\nOther optional configuration keyword arguments that can be specified on the client or per-operation.\n\n**Client keyword arguments:**\n\n* __connection_timeout__ (int): The number of seconds the client will wait to establish a connection to the server.\nDefaults to 20 seconds.\n* __read_timeout__ (int): The number of seconds the client will wait, between consecutive read operations, for a\nresponse from the server. This is a socket level timeout and is not affected by overall data size. Client-side read \ntimeouts will be automatically retried. Defaults to 60 seconds.\n* __transport__ (Any): User-provided transport to send the HTTP request.\n\n**Per-operation keyword arguments:**\n\n* __raw_response_hook__ (callable): The given callback uses the response returned from the service.\n* __raw_request_hook__ (callable): The given callback uses the request before being sent to service.\n* __client_request_id__ (str): Optional user specified identification of the request.\n* __user_agent__ (str): Appends the custom value to the user-agent header to be sent with the request.\n* __logging_enable__ (bool): Enables logging at the DEBUG level. Defaults to False. Can also be passed in at\nthe client level to enable it for all requests.\n* __logging_body__ (bool): Enables logging the request and response body. Defaults to False. Can also be passed in at\nthe client level to enable it for all requests.\n* __headers__ (dict): Pass in custom headers as key, value pairs. E.g. `headers={'CustomValue': value}`\n\n## Troubleshooting\n### General\nStorage Blob clients raise exceptions defined in [Azure Core](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/README.md).\n\nThis list can be used for reference to catch thrown exceptions. To get the specific error code of the exception, use the `error_code` attribute, i.e, `exception.error_code`.\n\n### Logging\nThis library uses the standard\n[logging](https://docs.python.org/3/library/logging.html) library for logging.\nBasic information about HTTP sessions (URLs, headers, etc.) is logged at INFO\nlevel.\n\nDetailed DEBUG level logging, including request/response bodies and unredacted\nheaders, can be enabled on a client with the `logging_enable` argument:\n```python\nimport sys\nimport logging\nfrom azure.storage.blob import BlobServiceClient\n\n# Create a logger for the 'azure.storage.blob' SDK\nlogger = logging.getLogger('azure.storage.blob')\nlogger.setLevel(logging.DEBUG)\n\n# Configure a console output\nhandler = logging.StreamHandler(stream=sys.stdout)\nlogger.addHandler(handler)\n\n# This client will log detailed information about its HTTP sessions, at DEBUG level\nservice_client = BlobServiceClient.from_connection_string(\"your_connection_string\", logging_enable=True)\n```\n\nSimilarly, `logging_enable` can enable detailed logging for a single operation,\neven when it isn't enabled for the client:\n```python\nservice_client.get_service_stats(logging_enable=True)\n```\n\n## Next steps\n\n### More sample code\n\nGet started with our [Blob samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples).\n\nSeveral Storage Blobs Python SDK samples are available to you in the SDK's GitHub repository. These samples provide example code for additional scenarios commonly encountered while working with Storage Blobs:\n\n* [blob_samples_container_access_policy.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy_async.py)) - Examples to set Access policies:\n * Set up Access Policy for container\n\n* [blob_samples_hello_world.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world_async.py)) - Examples for common Storage Blob tasks:\n * Set up a container\n * Create a block, page, or append blob\n * Upload blobs\n * Download blobs\n * Delete blobs\n\n* [blob_samples_authentication.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_authentication.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_authentication_async.py)) - Examples for authenticating and creating the client:\n * From a connection string\n * From a shared access key\n * From a shared access signature token\n * From active directory\n\n* [blob_samples_service.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_service.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_service_async.py)) - Examples for interacting with the blob service:\n * Get account information\n * Get and set service properties\n * Get service statistics\n * Create, list, and delete containers\n * Get the Blob or Container client\n\n* [blob_samples_containers.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_containers.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_containers_async.py)) - Examples for interacting with containers:\n * Create a container and delete containers\n * Set metadata on containers\n * Get container properties\n * Acquire a lease on container\n * Set an access policy on a container\n * Upload, list, delete blobs in container\n * Get the blob client to interact with a specific blob\n\n* [blob_samples_common.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_common.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_common_async.py)) - Examples common to all types of blobs:\n * Create a snapshot\n * Delete a blob snapshot\n * Soft delete a blob\n * Undelete a blob\n * Acquire a lease on a blob\n * Copy a blob from a URL\n\n* [blob_samples_directory_interface.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_directory_interface.py) - Examples for interfacing with Blob storage as if it were a directory on a filesystem:\n * Copy (upload or download) a single file or directory\n * List files or directories at a single level or recursively\n * Delete a single file or recursively delete a directory\n\n### Additional documentation\nFor more extensive documentation on Azure Blob storage, see the [Azure Blob storage documentation](https://learn.microsoft.com/azure/storage/blobs/) on learn.microsoft.com.\n\n## Contributing\nThis project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit https://cla.microsoft.com.\n\nWhen you submit a pull request, a CLA-bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA.\n\nThis project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.", - "release_date": "2025-03-27T17:13:06", + "release_date": "2025-07-16T21:34:09", "parties": [ { "type": "person", @@ -284,11 +284,11 @@ "Programming Language :: Python :: 3.9" ], "homepage_url": "https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob", - "download_url": "https://files.pythonhosted.org/packages/57/33/085d9352d416e617993821b9d9488222fbb559bc15c3641d6cbd6d16d236/azure_storage_blob-12.25.1-py3-none-any.whl", - "size": 406990, + "download_url": "https://files.pythonhosted.org/packages/5b/64/63dbfdd83b31200ac58820a7951ddfdeed1fbee9285b0f3eae12d1357155/azure_storage_blob-12.26.0-py3-none-any.whl", + "size": 412907, "sha1": null, - "md5": "20b5072c0d73c87cc0bd020da5c5f2f4", - "sha256": "1f337aab12e918ec3f1b638baada97550673911c4ceed892acc8e4e891b74167", + "md5": "b7ee3d0eec2bce8bbf60fc238d4349b7", + "sha256": "8c5631b8b22b4f53ec5fff2f3bededf34cfef111e2af613ad42c9e6de00a77fe", "sha512": null, "bug_tracking_url": null, "code_view_url": null, @@ -308,20 +308,20 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/azure-storage-blob/12.25.1/json", + "api_data_url": "https://pypi.org/pypi/azure-storage-blob/12.26.0/json", "datasource_id": null, - "purl": "pkg:pypi/azure-storage-blob@12.25.1" + "purl": "pkg:pypi/azure-storage-blob@12.26.0" }, { "type": "pypi", "namespace": null, "name": "certifi", - "version": "2025.7.14", + "version": "2025.8.3", "qualifiers": {}, "subpath": null, "primary_language": "Python", "description": "Python package for providing Mozilla's CA Bundle.\nCertifi: Python SSL Certificates\n================================\n\nCertifi provides Mozilla's carefully curated collection of Root Certificates for\nvalidating the trustworthiness of SSL certificates while verifying the identity\nof TLS hosts. It has been extracted from the `Requests`_ project.\n\nInstallation\n------------\n\n``certifi`` is available on PyPI. Simply install it with ``pip``::\n\n $ pip install certifi\n\nUsage\n-----\n\nTo reference the installed certificate authority (CA) bundle, you can use the\nbuilt-in function::\n\n >>> import certifi\n\n >>> certifi.where()\n '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'\n\nOr from the command line::\n\n $ python -m certifi\n /usr/local/lib/python3.7/site-packages/certifi/cacert.pem\n\nEnjoy!\n\n.. _`Requests`: https://requests.readthedocs.io/en/master/\n\nAddition/Removal of Certificates\n--------------------------------\n\nCertifi does not support any addition/removal or other modification of the\nCA trust store content. This project is intended to provide a reliable and\nhighly portable root of trust to python deployments. Look to upstream projects\nfor methods to use alternate trust.", - "release_date": "2025-07-14T03:29:26", + "release_date": "2025-08-03T03:07:45", "parties": [ { "type": "person", @@ -347,11 +347,11 @@ "Programming Language :: Python :: 3.9" ], "homepage_url": "https://github.com/certifi/python-certifi", - "download_url": "https://files.pythonhosted.org/packages/4f/52/34c6cf5bb9285074dc3531c437b3919e825d976fde097a7a73f79e726d03/certifi-2025.7.14-py3-none-any.whl", - "size": 162722, + "download_url": "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", + "size": 161216, "sha1": null, - "md5": "8561c6b29236cd268f57ddb4f22281d3", - "sha256": "6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2", + "md5": "f9b6740cffcf397b47bc7fb7782b1354", + "sha256": "f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/certifi/python-certifi", @@ -371,26 +371,33 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/certifi/2025.7.14/json", + "api_data_url": "https://pypi.org/pypi/certifi/2025.8.3/json", "datasource_id": null, - "purl": "pkg:pypi/certifi@2025.7.14" + "purl": "pkg:pypi/certifi@2025.8.3" }, { "type": "pypi", "namespace": null, "name": "cffi", - "version": "1.17.1", + "version": "2.0.0", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "CFFI\n====\n\nForeign Function Interface for Python calling C code.\nPlease see the `Documentation `_.\n\nContact\n-------\n\n`Mailing list `_", - "release_date": "2024-09-04T20:44:21", + "description": "[![GitHub Actions Status](https://github.com/python-cffi/cffi/actions/workflows/ci.yaml/badge.svg?branch=main)](https://github.com/python-cffi/cffi/actions/workflows/ci.yaml?query=branch%3Amain++)\n[![PyPI version](https://img.shields.io/pypi/v/cffi.svg)](https://pypi.org/project/cffi)\n[![Read the Docs](https://img.shields.io/badge/docs-latest-blue.svg)][Documentation]\n\n\nCFFI\n====\n\nForeign Function Interface for Python calling C code.\n\nPlease see the [Documentation] or uncompiled in the `doc/` subdirectory.\n\nDownload\n--------\n\n[Download page](https://github.com/python-cffi/cffi/releases)\n\nSource Code\n-----------\n\nSource code is publicly available on\n[GitHub](https://github.com/python-cffi/cffi).\n\nContact\n-------\n\n[Mailing list](https://groups.google.com/forum/#!forum/python-cffi)\n\nTesting/development tips\n------------------------\n\nAfter `git clone` or `wget && tar`, we will get a directory called `cffi` or `cffi-x.x.x`. we call it `repo-directory`. To run tests under CPython, run the following in the `repo-directory`:\n\n pip install pytest\n pip install -e . # editable install of CFFI for local development\n pytest src/c/ testing/\n\n[Documentation]: http://cffi.readthedocs.org/", + "release_date": "2025-09-08T23:22:55", "parties": [ { "type": "person", "role": "author", "name": "Armin Rigo, Maciej Fijalkowski", - "email": "python-cffi@googlegroups.com", + "email": null, + "url": null + }, + { + "type": "person", + "role": "maintainer", + "name": "Matt Davis, Matt Clay, Matti Picus", + "email": null, "url": null } ], @@ -401,29 +408,24 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy" + "Programming Language :: Python :: Free Threading :: 2 - Beta", + "Programming Language :: Python :: Implementation :: CPython" ], - "homepage_url": "http://cffi.readthedocs.org", - "download_url": "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "size": 479424, + "homepage_url": null, + "download_url": "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", + "size": 221361, "sha1": null, - "md5": "9a3c4039917ce4c92bb5b26636a5bc9a", - "sha256": "b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", + "md5": "22dc3bf9218b2b45f2980eae627112f1", + "sha256": "2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", "sha512": null, "bug_tracking_url": "https://github.com/python-cffi/cffi/issues", "code_view_url": "https://github.com/python-cffi/cffi", "vcs_url": null, "copyright": null, - "license_expression": null, - "declared_license": { - "license": "MIT", - "classifiers": [ - "License :: OSI Approved :: MIT License" - ] - }, + "license_expression": "MIT", + "declared_license": {}, "notice_text": null, "source_packages": [], "file_references": [], @@ -431,20 +433,20 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/cffi/1.17.1/json", + "api_data_url": "https://pypi.org/pypi/cffi/2.0.0/json", "datasource_id": null, - "purl": "pkg:pypi/cffi@1.17.1" + "purl": "pkg:pypi/cffi@2.0.0" }, { "type": "pypi", "namespace": null, "name": "charset-normalizer", - "version": "3.4.2", + "version": "3.4.3", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.\n

Charset Detection, for Everyone \ud83d\udc4b

\n\n

\n The Real First Universal Charset Detector
\n \n \n \n \n \"Download\n \n \n \n \n

\n

\n Featured Packages
\n \n \"Static\n \n \n \"Static\n \n

\n

\n In other language (unofficial port - by the community)
\n \n \"Static\n \n

\n\n> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`,\n> I'm trying to resolve the issue by taking a new approach.\n> All IANA character set names for which the Python core library provides codecs are supported.\n\n

\n >>>>> \ud83d\udc49 Try Me Online Now, Then Adopt Me \ud83d\udc48 <<<<<\n

\n\nThis project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**.\n\n| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) |\n|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:|\n| `Fast` | \u274c | \u2705 | \u2705 |\n| `Universal**` | \u274c | \u2705 | \u274c |\n| `Reliable` **without** distinguishable standards | \u274c | \u2705 | \u2705 |\n| `Reliable` **with** distinguishable standards | \u2705 | \u2705 | \u2705 |\n| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ |\n| `Native Python` | \u2705 | \u2705 | \u274c |\n| `Detect spoken language` | \u274c | \u2705 | N/A |\n| `UnicodeDecodeError Safety` | \u274c | \u2705 | \u274c |\n| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB |\n| `Supported Encoding` | 33 | \ud83c\udf89 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 |\n\n

\n\"Reading\"Cat\n

\n\n*\\*\\* : They are clearly using specific code for a specific encoding even if covering most of used one*
\n\n## \u26a1 Performance\n\nThis package offer better performance than its counterpart Chardet. Here are some numbers.\n\n| Package | Accuracy | Mean per file (ms) | File per sec (est) |\n|-----------------------------------------------|:--------:|:------------------:|:------------------:|\n| [chardet](https://github.com/chardet/chardet) | 86 % | 63 ms | 16 file/sec |\n| charset-normalizer | **98 %** | **10 ms** | 100 file/sec |\n\n| Package | 99th percentile | 95th percentile | 50th percentile |\n|-----------------------------------------------|:---------------:|:---------------:|:---------------:|\n| [chardet](https://github.com/chardet/chardet) | 265 ms | 71 ms | 7 ms |\n| charset-normalizer | 100 ms | 50 ms | 5 ms |\n\n_updated as of december 2024 using CPython 3.12_\n\nChardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload.\n\n> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows.\n> And yes, these results might change at any time. The dataset can be updated to include more files.\n> The actual delays heavily depends on your CPU capabilities. The factors should remain the same.\n> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability\n> (e.g. Supported Encoding) Challenge-them if you want.\n\n## \u2728 Installation\n\nUsing pip:\n\n```sh\npip install charset-normalizer -U\n```\n\n## \ud83d\ude80 Basic Usage\n\n### CLI\nThis package comes with a CLI.\n\n```\nusage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD]\n file [file ...]\n\nThe Real First Universal Charset Detector. Discover originating encoding used\non text file. Normalize text to unicode.\n\npositional arguments:\n files File(s) to be analysed\n\noptional arguments:\n -h, --help show this help message and exit\n -v, --verbose Display complementary information about file if any.\n Stdout will contain logs about the detection process.\n -a, --with-alternative\n Output complementary possibilities if any. Top-level\n JSON WILL be a list.\n -n, --normalize Permit to normalize input file. If not set, program\n does not write anything.\n -m, --minimal Only output the charset detected to STDOUT. Disabling\n JSON output.\n -r, --replace Replace file when trying to normalize it instead of\n creating a new one.\n -f, --force Replace file without asking if you are sure, use this\n flag with caution.\n -t THRESHOLD, --threshold THRESHOLD\n Define a custom maximum amount of chaos allowed in\n decoded content. 0. <= chaos <= 1.\n --version Show version information and exit.\n```\n\n```bash\nnormalizer ./data/sample.1.fr.srt\n```\n\nor\n\n```bash\npython -m charset_normalizer ./data/sample.1.fr.srt\n```\n\n\ud83c\udf89 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format.\n\n```json\n{\n \"path\": \"/home/default/projects/charset_normalizer/data/sample.1.fr.srt\",\n \"encoding\": \"cp1252\",\n \"encoding_aliases\": [\n \"1252\",\n \"windows_1252\"\n ],\n \"alternative_encodings\": [\n \"cp1254\",\n \"cp1256\",\n \"cp1258\",\n \"iso8859_14\",\n \"iso8859_15\",\n \"iso8859_16\",\n \"iso8859_3\",\n \"iso8859_9\",\n \"latin_1\",\n \"mbcs\"\n ],\n \"language\": \"French\",\n \"alphabets\": [\n \"Basic Latin\",\n \"Latin-1 Supplement\"\n ],\n \"has_sig_or_bom\": false,\n \"chaos\": 0.149,\n \"coherence\": 97.152,\n \"unicode_path\": null,\n \"is_preferred\": true\n}\n```\n\n### Python\n*Just print out normalized text*\n```python\nfrom charset_normalizer import from_path\n\nresults = from_path('./my_subtitle.srt')\n\nprint(str(results.best()))\n```\n\n*Upgrade your code without effort*\n```python\nfrom charset_normalizer import detect\n```\n\nThe above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible.\n\nSee the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/)\n\n## \ud83d\ude07 Why\n\nWhen I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a\nreliable alternative using a completely different method. Also! I never back down on a good challenge!\n\nI **don't care** about the **originating charset** encoding, because **two different tables** can\nproduce **two identical rendered string.**\nWhat I want is to get readable text, the best I can.\n\nIn a way, **I'm brute forcing text decoding.** How cool is that ? \ud83d\ude0e\n\nDon't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair Unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode.\n\n## \ud83c\udf70 How\n\n - Discard all charset encoding table that could not fit the binary content.\n - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding.\n - Extract matches with the lowest mess detected.\n - Additionally, we measure coherence / probe for a language.\n\n**Wait a minute**, what is noise/mess and coherence according to **YOU ?**\n\n*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then\n**I established** some ground rules about **what is obvious** when **it seems like** a mess (aka. defining noise in rendered text).\n I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to\n improve or rewrite it.\n\n*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought\nthat intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design.\n\n## \u26a1 Known limitations\n\n - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters))\n - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content.\n\n## \u26a0\ufe0f About Python EOLs\n\n**If you are running:**\n\n- Python >=2.7,<3.5: Unsupported\n- Python 3.5: charset-normalizer < 2.1\n- Python 3.6: charset-normalizer < 3.1\n- Python 3.7: charset-normalizer < 4.0\n\nUpgrade your Python interpreter as soon as possible.\n\n## \ud83d\udc64 Contributing\n\nContributions, issues and feature requests are very much welcome.
\nFeel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute.\n\n## \ud83d\udcdd License\n\nCopyright \u00a9 [Ahmed TAHRI @Ousret](https://github.com/Ousret).
\nThis project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed.\n\nCharacters frequencies used in this project \u00a9 2012 [Denny Vrande\u010di\u0107](http://simia.net/letters/)\n\n## \ud83d\udcbc For Enterprise\n\nProfessional support for charset-normalizer is available as part of the [Tidelift\nSubscription][1]. Tidelift gives software development teams a single source for\npurchasing and maintaining their software, with professional grade assurances\nfrom the experts who know it best, while seamlessly integrating with existing\ntools.\n\n[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme\n\n[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/7297/badge)](https://www.bestpractices.dev/projects/7297)\n\n# Changelog\nAll notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).\n\n## [3.4.2](https://github.com/Ousret/charset_normalizer/compare/3.4.1...3.4.2) (2025-05-02)\n\n### Fixed\n- Addressed the DeprecationWarning in our CLI regarding `argparse.FileType` by backporting the target class into the package. (#591)\n- Improved the overall reliability of the detector with CJK Ideographs. (#605) (#587)\n\n### Changed\n- Optional mypyc compilation upgraded to version 1.15 for Python >= 3.8\n\n## [3.4.1](https://github.com/Ousret/charset_normalizer/compare/3.4.0...3.4.1) (2024-12-24)\n\n### Changed\n- Project metadata are now stored using `pyproject.toml` instead of `setup.cfg` using setuptools as the build backend.\n- Enforce annotation delayed loading for a simpler and consistent types in the project.\n- Optional mypyc compilation upgraded to version 1.14 for Python >= 3.8\n\n### Added\n- pre-commit configuration.\n- noxfile.\n\n### Removed\n- `build-requirements.txt` as per using `pyproject.toml` native build configuration.\n- `bin/integration.py` and `bin/serve.py` in favor of downstream integration test (see noxfile).\n- `setup.cfg` in favor of `pyproject.toml` metadata configuration.\n- Unused `utils.range_scan` function.\n\n### Fixed\n- Converting content to Unicode bytes may insert `utf_8` instead of preferred `utf-8`. (#572)\n- Deprecation warning \"'count' is passed as positional argument\" when converting to Unicode bytes on Python 3.13+\n\n## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08)\n\n### Added\n- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints.\n- Support for Python 3.13 (#512)\n\n### Fixed\n- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch.\n- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537)\n- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381)\n\n## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31)\n\n### Fixed\n- Unintentional memory usage regression when using large payload that match several encoding (#376)\n- Regression on some detection case showcased in the documentation (#371)\n\n### Added\n- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife)\n\n## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22)\n\n### Changed\n- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8\n- Improved the general detection reliability based on reports from the community\n\n## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30)\n\n### Added\n- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer`\n- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323)\n\n### Removed\n- (internal) Redundant utils.is_ascii function and unused function is_private_use_only\n- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant\n\n### Changed\n- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection\n- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8\n\n### Fixed\n- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \\_\\_lt\\_\\_ (#350)\n\n## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07)\n\n### Changed\n- Typehint for function `from_path` no longer enforce `PathLike` as its first argument\n- Minor improvement over the global detection reliability\n\n### Added\n- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries\n- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True)\n- Explicit support for Python 3.12\n\n### Fixed\n- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289)\n\n## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06)\n\n### Added\n- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262)\n\n### Removed\n- Support for Python 3.6 (PR #260)\n\n### Changed\n- Optional speedup provided by mypy/c 1.0.1\n\n## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18)\n\n### Fixed\n- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233)\n\n### Changed\n- Speedup provided by mypy/c 0.990 on Python >= 3.7\n\n## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20)\n\n### Added\n- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results\n- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES\n- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio\n- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)\n\n### Changed\n- Build with static metadata using 'build' frontend\n- Make the language detection stricter\n- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1\n\n### Fixed\n- CLI with opt --normalize fail when using full path for files\n- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it\n- Sphinx warnings when generating the documentation\n\n### Removed\n- Coherence detector no longer return 'Simple English' instead return 'English'\n- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'\n- Breaking: Method `first()` and `best()` from CharsetMatch\n- UTF-7 will no longer appear as \"detected\" without a recognized SIG/mark (is unreliable/conflict with ASCII)\n- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches\n- Breaking: Top-level function `normalize`\n- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch\n- Support for the backport `unicodedata2`\n\n## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18)\n\n### Added\n- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results\n- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES\n- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio\n\n### Changed\n- Build with static metadata using 'build' frontend\n- Make the language detection stricter\n\n### Fixed\n- CLI with opt --normalize fail when using full path for files\n- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it\n\n### Removed\n- Coherence detector no longer return 'Simple English' instead return 'English'\n- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'\n\n## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21)\n\n### Added\n- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)\n\n### Removed\n- Breaking: Method `first()` and `best()` from CharsetMatch\n- UTF-7 will no longer appear as \"detected\" without a recognized SIG/mark (is unreliable/conflict with ASCII)\n\n### Fixed\n- Sphinx warnings when generating the documentation\n\n## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15)\n\n### Changed\n- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1\n\n### Removed\n- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches\n- Breaking: Top-level function `normalize`\n- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch\n- Support for the backport `unicodedata2`\n\n## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19)\n\n### Deprecated\n- Function `normalize` scheduled for removal in 3.0\n\n### Changed\n- Removed useless call to decode in fn is_unprintable (#206)\n\n### Fixed\n- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204)\n\n## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19)\n\n### Added\n- Output the Unicode table version when running the CLI with `--version` (PR #194)\n\n### Changed\n- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175)\n- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183)\n\n### Fixed\n- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175)\n- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181)\n\n### Removed\n- Support for Python 3.5 (PR #192)\n\n### Deprecated\n- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194)\n\n## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12)\n\n### Fixed\n- ASCII miss-detection on rare cases (PR #170)\n\n## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30)\n\n### Added\n- Explicit support for Python 3.11 (PR #164)\n\n### Changed\n- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165)\n\n## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04)\n\n### Fixed\n- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154)\n\n### Changed\n- Skipping the language-detection (CD) on ASCII (PR #155)\n\n## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03)\n\n### Changed\n- Moderating the logging impact (since 2.0.8) for specific environments (PR #147)\n\n### Fixed\n- Wrong logging level applied when setting kwarg `explain` to True (PR #146)\n\n## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24)\n### Changed\n- Improvement over Vietnamese detection (PR #126)\n- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124)\n- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122)\n- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129)\n- Code style as refactored by Sourcery-AI (PR #131)\n- Minor adjustment on the MD around european words (PR #133)\n- Remove and replace SRTs from assets / tests (PR #139)\n- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135)\n- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135)\n\n### Fixed\n- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137)\n- Avoid using too insignificant chunk (PR #137)\n\n### Added\n- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135)\n- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141)\n\n## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11)\n### Added\n- Add support for Kazakh (Cyrillic) language detection (PR #109)\n\n### Changed\n- Further, improve inferring the language from a given single-byte code page (PR #112)\n- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116)\n- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113)\n- Various detection improvement (MD+CD) (PR #117)\n\n### Removed\n- Remove redundant logging entry about detected language(s) (PR #115)\n\n### Fixed\n- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102)\n\n## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18)\n### Fixed\n- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100)\n- Fix CLI crash when using --minimal output in certain cases (PR #103)\n\n### Changed\n- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101)\n\n## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14)\n### Changed\n- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81)\n- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82)\n- The Unicode detection is slightly improved (PR #93)\n- Add syntax sugar \\_\\_bool\\_\\_ for results CharsetMatches list-container (PR #91)\n\n### Removed\n- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92)\n\n### Fixed\n- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95)\n- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96)\n- The MANIFEST.in was not exhaustive (PR #78)\n\n## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30)\n### Fixed\n- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70)\n- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68)\n- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72)\n- Submatch factoring could be wrong in rare edge cases (PR #72)\n- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72)\n- Fix line endings from CRLF to LF for certain project files (PR #67)\n\n### Changed\n- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76)\n- Allow fallback on specified encoding if any (PR #71)\n\n## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16)\n### Changed\n- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63)\n- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64)\n\n## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15)\n### Fixed\n- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59)\n\n### Changed\n- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57)\n\n## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13)\n### Fixed\n- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55)\n- Using explain=False permanently disable the verbose output in the current runtime (PR #47)\n- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47)\n- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52)\n\n### Changed\n- Public function normalize default args values were not aligned with from_bytes (PR #53)\n\n### Added\n- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47)\n\n## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02)\n### Changed\n- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet.\n- Accent has been made on UTF-8 detection, should perform rather instantaneous.\n- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible.\n- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time)\n- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+\n- utf_7 detection has been reinstated.\n\n### Removed\n- This package no longer require anything when used with Python 3.5 (Dropped cached_property)\n- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volap\u00fck, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian.\n- The exception hook on UnicodeDecodeError has been removed.\n\n### Deprecated\n- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0\n\n### Fixed\n- The CLI output used the relative path of the file(s). Should be absolute.\n\n## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28)\n### Fixed\n- Logger configuration/usage no longer conflict with others (PR #44)\n\n## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21)\n### Removed\n- Using standard logging instead of using the package loguru.\n- Dropping nose test framework in favor of the maintained pytest.\n- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text.\n- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version.\n- Stop support for UTF-7 that does not contain a SIG.\n- Dropping PrettyTable, replaced with pure JSON output in CLI.\n\n### Fixed\n- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process.\n- Not searching properly for the BOM when trying utf32/16 parent codec.\n\n### Changed\n- Improving the package final size by compressing frequencies.json.\n- Huge improvement over the larges payload.\n\n### Added\n- CLI now produces JSON consumable output.\n- Return ASCII if given sequences fit. Given reasonable confidence.\n\n## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13)\n\n### Fixed\n- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40)\n\n## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12)\n\n### Fixed\n- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39)\n\n## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12)\n\n### Fixed\n- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38)\n\n## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09)\n\n### Changed\n- Amend the previous release to allow prettytable 2.0 (PR #35)\n\n## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08)\n\n### Fixed\n- Fix error while using the package with a python pre-release interpreter (PR #33)\n\n### Changed\n- Dependencies refactoring, constraints revised.\n\n### Added\n- Add python 3.9 and 3.10 to the supported interpreters\n\nMIT License\n\nCopyright (c) 2025 TAHRI Ahmed R.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.", - "release_date": "2025-05-02T08:32:40", + "description": "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.\n

Charset Detection, for Everyone \ud83d\udc4b

\n\n

\n The Real First Universal Charset Detector
\n \n \n \n \n \"Download\n \n \n \n \n

\n

\n Featured Packages
\n \n \"Static\n \n \n \"Static\n \n

\n

\n In other language (unofficial port - by the community)
\n \n \"Static\n \n

\n\n> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`,\n> I'm trying to resolve the issue by taking a new approach.\n> All IANA character set names for which the Python core library provides codecs are supported.\n\n

\n >>>>> \ud83d\udc49 Try Me Online Now, Then Adopt Me \ud83d\udc48 <<<<<\n

\n\nThis project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**.\n\n| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) |\n|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:|\n| `Fast` | \u274c | \u2705 | \u2705 |\n| `Universal**` | \u274c | \u2705 | \u274c |\n| `Reliable` **without** distinguishable standards | \u274c | \u2705 | \u2705 |\n| `Reliable` **with** distinguishable standards | \u2705 | \u2705 | \u2705 |\n| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ |\n| `Native Python` | \u2705 | \u2705 | \u274c |\n| `Detect spoken language` | \u274c | \u2705 | N/A |\n| `UnicodeDecodeError Safety` | \u274c | \u2705 | \u274c |\n| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB |\n| `Supported Encoding` | 33 | \ud83c\udf89 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 |\n\n

\n\"Reading\"Cat\n

\n\n*\\*\\* : They are clearly using specific code for a specific encoding even if covering most of used one*
\n\n## \u26a1 Performance\n\nThis package offer better performance than its counterpart Chardet. Here are some numbers.\n\n| Package | Accuracy | Mean per file (ms) | File per sec (est) |\n|-----------------------------------------------|:--------:|:------------------:|:------------------:|\n| [chardet](https://github.com/chardet/chardet) | 86 % | 63 ms | 16 file/sec |\n| charset-normalizer | **98 %** | **10 ms** | 100 file/sec |\n\n| Package | 99th percentile | 95th percentile | 50th percentile |\n|-----------------------------------------------|:---------------:|:---------------:|:---------------:|\n| [chardet](https://github.com/chardet/chardet) | 265 ms | 71 ms | 7 ms |\n| charset-normalizer | 100 ms | 50 ms | 5 ms |\n\n_updated as of december 2024 using CPython 3.12_\n\nChardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload.\n\n> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows.\n> And yes, these results might change at any time. The dataset can be updated to include more files.\n> The actual delays heavily depends on your CPU capabilities. The factors should remain the same.\n> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability\n> (e.g. Supported Encoding) Challenge-them if you want.\n\n## \u2728 Installation\n\nUsing pip:\n\n```sh\npip install charset-normalizer -U\n```\n\n## \ud83d\ude80 Basic Usage\n\n### CLI\nThis package comes with a CLI.\n\n```\nusage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD]\n file [file ...]\n\nThe Real First Universal Charset Detector. Discover originating encoding used\non text file. Normalize text to unicode.\n\npositional arguments:\n files File(s) to be analysed\n\noptional arguments:\n -h, --help show this help message and exit\n -v, --verbose Display complementary information about file if any.\n Stdout will contain logs about the detection process.\n -a, --with-alternative\n Output complementary possibilities if any. Top-level\n JSON WILL be a list.\n -n, --normalize Permit to normalize input file. If not set, program\n does not write anything.\n -m, --minimal Only output the charset detected to STDOUT. Disabling\n JSON output.\n -r, --replace Replace file when trying to normalize it instead of\n creating a new one.\n -f, --force Replace file without asking if you are sure, use this\n flag with caution.\n -t THRESHOLD, --threshold THRESHOLD\n Define a custom maximum amount of chaos allowed in\n decoded content. 0. <= chaos <= 1.\n --version Show version information and exit.\n```\n\n```bash\nnormalizer ./data/sample.1.fr.srt\n```\n\nor\n\n```bash\npython -m charset_normalizer ./data/sample.1.fr.srt\n```\n\n\ud83c\udf89 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format.\n\n```json\n{\n \"path\": \"/home/default/projects/charset_normalizer/data/sample.1.fr.srt\",\n \"encoding\": \"cp1252\",\n \"encoding_aliases\": [\n \"1252\",\n \"windows_1252\"\n ],\n \"alternative_encodings\": [\n \"cp1254\",\n \"cp1256\",\n \"cp1258\",\n \"iso8859_14\",\n \"iso8859_15\",\n \"iso8859_16\",\n \"iso8859_3\",\n \"iso8859_9\",\n \"latin_1\",\n \"mbcs\"\n ],\n \"language\": \"French\",\n \"alphabets\": [\n \"Basic Latin\",\n \"Latin-1 Supplement\"\n ],\n \"has_sig_or_bom\": false,\n \"chaos\": 0.149,\n \"coherence\": 97.152,\n \"unicode_path\": null,\n \"is_preferred\": true\n}\n```\n\n### Python\n*Just print out normalized text*\n```python\nfrom charset_normalizer import from_path\n\nresults = from_path('./my_subtitle.srt')\n\nprint(str(results.best()))\n```\n\n*Upgrade your code without effort*\n```python\nfrom charset_normalizer import detect\n```\n\nThe above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible.\n\nSee the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/)\n\n## \ud83d\ude07 Why\n\nWhen I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a\nreliable alternative using a completely different method. Also! I never back down on a good challenge!\n\nI **don't care** about the **originating charset** encoding, because **two different tables** can\nproduce **two identical rendered string.**\nWhat I want is to get readable text, the best I can.\n\nIn a way, **I'm brute forcing text decoding.** How cool is that ? \ud83d\ude0e\n\nDon't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair Unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode.\n\n## \ud83c\udf70 How\n\n - Discard all charset encoding table that could not fit the binary content.\n - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding.\n - Extract matches with the lowest mess detected.\n - Additionally, we measure coherence / probe for a language.\n\n**Wait a minute**, what is noise/mess and coherence according to **YOU ?**\n\n*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then\n**I established** some ground rules about **what is obvious** when **it seems like** a mess (aka. defining noise in rendered text).\n I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to\n improve or rewrite it.\n\n*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought\nthat intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design.\n\n## \u26a1 Known limitations\n\n - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters))\n - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content.\n\n## \u26a0\ufe0f About Python EOLs\n\n**If you are running:**\n\n- Python >=2.7,<3.5: Unsupported\n- Python 3.5: charset-normalizer < 2.1\n- Python 3.6: charset-normalizer < 3.1\n- Python 3.7: charset-normalizer < 4.0\n\nUpgrade your Python interpreter as soon as possible.\n\n## \ud83d\udc64 Contributing\n\nContributions, issues and feature requests are very much welcome.
\nFeel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute.\n\n## \ud83d\udcdd License\n\nCopyright \u00a9 [Ahmed TAHRI @Ousret](https://github.com/Ousret).
\nThis project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed.\n\nCharacters frequencies used in this project \u00a9 2012 [Denny Vrande\u010di\u0107](http://simia.net/letters/)\n\n## \ud83d\udcbc For Enterprise\n\nProfessional support for charset-normalizer is available as part of the [Tidelift\nSubscription][1]. Tidelift gives software development teams a single source for\npurchasing and maintaining their software, with professional grade assurances\nfrom the experts who know it best, while seamlessly integrating with existing\ntools.\n\n[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme\n\n[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/7297/badge)](https://www.bestpractices.dev/projects/7297)\n\n# Changelog\nAll notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).\n\n## [3.4.3](https://github.com/Ousret/charset_normalizer/compare/3.4.2...3.4.3) (2025-08-09)\n\n### Changed\n- mypy(c) is no longer a required dependency at build time if `CHARSET_NORMALIZER_USE_MYPYC` isn't set to `1`. (#595) (#583)\n- automatically lower confidence on small bytes samples that are not Unicode in `detect` output legacy function. (#391)\n\n### Added\n- Custom build backend to overcome inability to mark mypy as an optional dependency in the build phase.\n- Support for Python 3.14\n\n### Fixed\n- sdist archive contained useless directories.\n- automatically fallback on valid UTF-16 or UTF-32 even if the md says it's noisy. (#633)\n\n### Misc\n- SBOM are automatically published to the relevant GitHub release to comply with regulatory changes.\n Each published wheel comes with its SBOM. We choose CycloneDX as the format.\n- Prebuilt optimized wheel are no longer distributed by default for CPython 3.7 due to a change in cibuildwheel.\n\n## [3.4.2](https://github.com/Ousret/charset_normalizer/compare/3.4.1...3.4.2) (2025-05-02)\n\n### Fixed\n- Addressed the DeprecationWarning in our CLI regarding `argparse.FileType` by backporting the target class into the package. (#591)\n- Improved the overall reliability of the detector with CJK Ideographs. (#605) (#587)\n\n### Changed\n- Optional mypyc compilation upgraded to version 1.15 for Python >= 3.8\n\n## [3.4.1](https://github.com/Ousret/charset_normalizer/compare/3.4.0...3.4.1) (2024-12-24)\n\n### Changed\n- Project metadata are now stored using `pyproject.toml` instead of `setup.cfg` using setuptools as the build backend.\n- Enforce annotation delayed loading for a simpler and consistent types in the project.\n- Optional mypyc compilation upgraded to version 1.14 for Python >= 3.8\n\n### Added\n- pre-commit configuration.\n- noxfile.\n\n### Removed\n- `build-requirements.txt` as per using `pyproject.toml` native build configuration.\n- `bin/integration.py` and `bin/serve.py` in favor of downstream integration test (see noxfile).\n- `setup.cfg` in favor of `pyproject.toml` metadata configuration.\n- Unused `utils.range_scan` function.\n\n### Fixed\n- Converting content to Unicode bytes may insert `utf_8` instead of preferred `utf-8`. (#572)\n- Deprecation warning \"'count' is passed as positional argument\" when converting to Unicode bytes on Python 3.13+\n\n## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08)\n\n### Added\n- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints.\n- Support for Python 3.13 (#512)\n\n### Fixed\n- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch.\n- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537)\n- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381)\n\n## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31)\n\n### Fixed\n- Unintentional memory usage regression when using large payload that match several encoding (#376)\n- Regression on some detection case showcased in the documentation (#371)\n\n### Added\n- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife)\n\n## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22)\n\n### Changed\n- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8\n- Improved the general detection reliability based on reports from the community\n\n## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30)\n\n### Added\n- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer`\n- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323)\n\n### Removed\n- (internal) Redundant utils.is_ascii function and unused function is_private_use_only\n- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant\n\n### Changed\n- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection\n- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8\n\n### Fixed\n- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \\_\\_lt\\_\\_ (#350)\n\n## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07)\n\n### Changed\n- Typehint for function `from_path` no longer enforce `PathLike` as its first argument\n- Minor improvement over the global detection reliability\n\n### Added\n- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries\n- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True)\n- Explicit support for Python 3.12\n\n### Fixed\n- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289)\n\n## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06)\n\n### Added\n- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262)\n\n### Removed\n- Support for Python 3.6 (PR #260)\n\n### Changed\n- Optional speedup provided by mypy/c 1.0.1\n\n## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18)\n\n### Fixed\n- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233)\n\n### Changed\n- Speedup provided by mypy/c 0.990 on Python >= 3.7\n\n## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20)\n\n### Added\n- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results\n- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES\n- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio\n- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)\n\n### Changed\n- Build with static metadata using 'build' frontend\n- Make the language detection stricter\n- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1\n\n### Fixed\n- CLI with opt --normalize fail when using full path for files\n- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it\n- Sphinx warnings when generating the documentation\n\n### Removed\n- Coherence detector no longer return 'Simple English' instead return 'English'\n- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'\n- Breaking: Method `first()` and `best()` from CharsetMatch\n- UTF-7 will no longer appear as \"detected\" without a recognized SIG/mark (is unreliable/conflict with ASCII)\n- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches\n- Breaking: Top-level function `normalize`\n- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch\n- Support for the backport `unicodedata2`\n\n## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18)\n\n### Added\n- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results\n- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES\n- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio\n\n### Changed\n- Build with static metadata using 'build' frontend\n- Make the language detection stricter\n\n### Fixed\n- CLI with opt --normalize fail when using full path for files\n- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it\n\n### Removed\n- Coherence detector no longer return 'Simple English' instead return 'English'\n- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'\n\n## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21)\n\n### Added\n- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)\n\n### Removed\n- Breaking: Method `first()` and `best()` from CharsetMatch\n- UTF-7 will no longer appear as \"detected\" without a recognized SIG/mark (is unreliable/conflict with ASCII)\n\n### Fixed\n- Sphinx warnings when generating the documentation\n\n## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15)\n\n### Changed\n- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1\n\n### Removed\n- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches\n- Breaking: Top-level function `normalize`\n- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch\n- Support for the backport `unicodedata2`\n\n## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19)\n\n### Deprecated\n- Function `normalize` scheduled for removal in 3.0\n\n### Changed\n- Removed useless call to decode in fn is_unprintable (#206)\n\n### Fixed\n- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204)\n\n## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19)\n\n### Added\n- Output the Unicode table version when running the CLI with `--version` (PR #194)\n\n### Changed\n- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175)\n- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183)\n\n### Fixed\n- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175)\n- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181)\n\n### Removed\n- Support for Python 3.5 (PR #192)\n\n### Deprecated\n- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194)\n\n## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12)\n\n### Fixed\n- ASCII miss-detection on rare cases (PR #170)\n\n## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30)\n\n### Added\n- Explicit support for Python 3.11 (PR #164)\n\n### Changed\n- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165)\n\n## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04)\n\n### Fixed\n- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154)\n\n### Changed\n- Skipping the language-detection (CD) on ASCII (PR #155)\n\n## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03)\n\n### Changed\n- Moderating the logging impact (since 2.0.8) for specific environments (PR #147)\n\n### Fixed\n- Wrong logging level applied when setting kwarg `explain` to True (PR #146)\n\n## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24)\n### Changed\n- Improvement over Vietnamese detection (PR #126)\n- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124)\n- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122)\n- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129)\n- Code style as refactored by Sourcery-AI (PR #131)\n- Minor adjustment on the MD around european words (PR #133)\n- Remove and replace SRTs from assets / tests (PR #139)\n- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135)\n- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135)\n\n### Fixed\n- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137)\n- Avoid using too insignificant chunk (PR #137)\n\n### Added\n- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135)\n- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141)\n\n## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11)\n### Added\n- Add support for Kazakh (Cyrillic) language detection (PR #109)\n\n### Changed\n- Further, improve inferring the language from a given single-byte code page (PR #112)\n- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116)\n- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113)\n- Various detection improvement (MD+CD) (PR #117)\n\n### Removed\n- Remove redundant logging entry about detected language(s) (PR #115)\n\n### Fixed\n- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102)\n\n## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18)\n### Fixed\n- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100)\n- Fix CLI crash when using --minimal output in certain cases (PR #103)\n\n### Changed\n- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101)\n\n## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14)\n### Changed\n- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81)\n- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82)\n- The Unicode detection is slightly improved (PR #93)\n- Add syntax sugar \\_\\_bool\\_\\_ for results CharsetMatches list-container (PR #91)\n\n### Removed\n- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92)\n\n### Fixed\n- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95)\n- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96)\n- The MANIFEST.in was not exhaustive (PR #78)\n\n## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30)\n### Fixed\n- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70)\n- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68)\n- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72)\n- Submatch factoring could be wrong in rare edge cases (PR #72)\n- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72)\n- Fix line endings from CRLF to LF for certain project files (PR #67)\n\n### Changed\n- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76)\n- Allow fallback on specified encoding if any (PR #71)\n\n## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16)\n### Changed\n- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63)\n- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64)\n\n## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15)\n### Fixed\n- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59)\n\n### Changed\n- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57)\n\n## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13)\n### Fixed\n- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55)\n- Using explain=False permanently disable the verbose output in the current runtime (PR #47)\n- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47)\n- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52)\n\n### Changed\n- Public function normalize default args values were not aligned with from_bytes (PR #53)\n\n### Added\n- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47)\n\n## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02)\n### Changed\n- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet.\n- Accent has been made on UTF-8 detection, should perform rather instantaneous.\n- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible.\n- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time)\n- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+\n- utf_7 detection has been reinstated.\n\n### Removed\n- This package no longer require anything when used with Python 3.5 (Dropped cached_property)\n- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volap\u00fck, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian.\n- The exception hook on UnicodeDecodeError has been removed.\n\n### Deprecated\n- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0\n\n### Fixed\n- The CLI output used the relative path of the file(s). Should be absolute.\n\n## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28)\n### Fixed\n- Logger configuration/usage no longer conflict with others (PR #44)\n\n## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21)\n### Removed\n- Using standard logging instead of using the package loguru.\n- Dropping nose test framework in favor of the maintained pytest.\n- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text.\n- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version.\n- Stop support for UTF-7 that does not contain a SIG.\n- Dropping PrettyTable, replaced with pure JSON output in CLI.\n\n### Fixed\n- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process.\n- Not searching properly for the BOM when trying utf32/16 parent codec.\n\n### Changed\n- Improving the package final size by compressing frequencies.json.\n- Huge improvement over the larges payload.\n\n### Added\n- CLI now produces JSON consumable output.\n- Return ASCII if given sequences fit. Given reasonable confidence.\n\n## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13)\n\n### Fixed\n- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40)\n\n## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12)\n\n### Fixed\n- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39)\n\n## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12)\n\n### Fixed\n- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38)\n\n## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09)\n\n### Changed\n- Amend the previous release to allow prettytable 2.0 (PR #35)\n\n## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08)\n\n### Fixed\n- Fix error while using the package with a python pre-release interpreter (PR #33)\n\n### Changed\n- Dependencies refactoring, constraints revised.\n\n### Added\n- Add python 3.9 and 3.10 to the supported interpreters\n\nMIT License\n\nCopyright (c) 2025 TAHRI Ahmed R.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.", + "release_date": "2025-08-09T07:56:20", "parties": [ { "type": "person", @@ -480,6 +482,7 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -490,11 +493,11 @@ "Typing :: Typed" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "size": 148567, + "download_url": "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", + "size": 153173, "sha1": null, - "md5": "fec698adfc210708df05f94ca9e09cc3", - "sha256": "4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", + "md5": "d3bd33b92fa188b18b7b8cf04435e744", + "sha256": "320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/jawah/charset_normalizer", @@ -502,10 +505,7 @@ "copyright": null, "license_expression": null, "declared_license": { - "license": "MIT", - "classifiers": [ - "License :: OSI Approved :: MIT License" - ] + "license": "MIT" }, "notice_text": null, "source_packages": [], @@ -514,20 +514,20 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/charset-normalizer/3.4.2/json", + "api_data_url": "https://pypi.org/pypi/charset-normalizer/3.4.3/json", "datasource_id": null, - "purl": "pkg:pypi/charset-normalizer@3.4.2" + "purl": "pkg:pypi/charset-normalizer@3.4.3" }, { "type": "pypi", "namespace": null, "name": "click", - "version": "8.2.1", + "version": "8.2.2", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Composable command line interface toolkit\n# $ click_\n\nClick is a Python package for creating beautiful command line interfaces\nin a composable way with as little code as necessary. It's the \"Command\nLine Interface Creation Kit\". It's highly configurable but comes with\nsensible defaults out of the box.\n\nIt aims to make the process of writing command line tools quick and fun\nwhile also preventing any frustration caused by the inability to\nimplement an intended CLI API.\n\nClick in three points:\n\n- Arbitrary nesting of commands\n- Automatic help page generation\n- Supports lazy loading of subcommands at runtime\n\n\n## A Simple Example\n\n```python\nimport click\n\n@click.command()\n@click.option(\"--count\", default=1, help=\"Number of greetings.\")\n@click.option(\"--name\", prompt=\"Your name\", help=\"The person to greet.\")\ndef hello(count, name):\n \"\"\"Simple program that greets NAME for a total of COUNT times.\"\"\"\n for _ in range(count):\n click.echo(f\"Hello, {name}!\")\n\nif __name__ == '__main__':\n hello()\n```\n\n```\n$ python hello.py --count=3\nYour name: Click\nHello, Click!\nHello, Click!\nHello, Click!\n```\n\n\n## Donate\n\nThe Pallets organization develops and supports Click and other popular\npackages. In order to grow the community of contributors and users, and\nallow the maintainers to devote more time to the projects, [please\ndonate today][].\n\n[please donate today]: https://palletsprojects.com/donate\n\n## Contributing\n\nSee our [detailed contributing documentation][contrib] for many ways to\ncontribute, including reporting issues, requesting features, asking or answering\nquestions, and making PRs.\n\n[contrib]: https://palletsprojects.com/contributing/", - "release_date": "2025-05-20T23:19:47", + "description": "Composable command line interface toolkit\n
\"\"
\n\n# Click\n\nClick is a Python package for creating beautiful command line interfaces\nin a composable way with as little code as necessary. It's the \"Command\nLine Interface Creation Kit\". It's highly configurable but comes with\nsensible defaults out of the box.\n\nIt aims to make the process of writing command line tools quick and fun\nwhile also preventing any frustration caused by the inability to\nimplement an intended CLI API.\n\nClick in three points:\n\n- Arbitrary nesting of commands\n- Automatic help page generation\n- Supports lazy loading of subcommands at runtime\n\n\n## A Simple Example\n\n```python\nimport click\n\n@click.command()\n@click.option(\"--count\", default=1, help=\"Number of greetings.\")\n@click.option(\"--name\", prompt=\"Your name\", help=\"The person to greet.\")\ndef hello(count, name):\n \"\"\"Simple program that greets NAME for a total of COUNT times.\"\"\"\n for _ in range(count):\n click.echo(f\"Hello, {name}!\")\n\nif __name__ == '__main__':\n hello()\n```\n\n```\n$ python hello.py --count=3\nYour name: Click\nHello, Click!\nHello, Click!\nHello, Click!\n```\n\n\n## Donate\n\nThe Pallets organization develops and supports Click and other popular\npackages. In order to grow the community of contributors and users, and\nallow the maintainers to devote more time to the projects, [please\ndonate today][].\n\n[please donate today]: https://palletsprojects.com/donate\n\n## Contributing\n\nSee our [detailed contributing documentation][contrib] for many ways to\ncontribute, including reporting issues, requesting features, asking or answering\nquestions, and making PRs.\n\n[contrib]: https://palletsprojects.com/contributing/", + "release_date": "2025-08-02T02:23:39", "parties": [ { "type": "person", @@ -545,11 +545,11 @@ "Typing :: Typed" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", - "size": 102215, + "download_url": "https://files.pythonhosted.org/packages/ec/85/e7297e34133ae1cfde3bffd30c24e1ef055248251baa877834e048687a28/click-8.2.2-py3-none-any.whl", + "size": 103900, "sha1": null, - "md5": "aeead16d8bed93caa7107ac87b1e5ec8", - "sha256": "61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", + "md5": "7d180e1baded1a50d5ad31b43a965888", + "sha256": "52e1e9f5d3db8c85aa76968c7c67ed41ddbacb167f43201511c8fd61eb5ba2ca", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/pallets/click/", @@ -564,25 +564,25 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/click/8.2.1/json", + "api_data_url": "https://pypi.org/pypi/click/8.2.2/json", "datasource_id": null, - "purl": "pkg:pypi/click@8.2.1" + "purl": "pkg:pypi/click@8.2.2" }, { "type": "pypi", "namespace": null, "name": "cryptography", - "version": "45.0.5", + "version": "46.0.1", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "cryptography is a package which provides cryptographic recipes and primitives to Python developers.\npyca/cryptography\n=================\n\n.. image:: https://img.shields.io/pypi/v/cryptography.svg\n :target: https://pypi.org/project/cryptography/\n :alt: Latest Version\n\n.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest\n :target: https://cryptography.io\n :alt: Latest Docs\n\n.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=main\n :target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amain\n\n\n``cryptography`` is a package which provides cryptographic recipes and\nprimitives to Python developers. Our goal is for it to be your \"cryptographic\nstandard library\". It supports Python 3.7+ and PyPy3 7.3.11+.\n\n``cryptography`` includes both high level recipes and low level interfaces to\ncommon cryptographic algorithms such as symmetric ciphers, message digests, and\nkey derivation functions. For example, to encrypt something with\n``cryptography``'s high level symmetric encryption recipe:\n\n.. code-block:: pycon\n\n >>> from cryptography.fernet import Fernet\n >>> # Put this somewhere safe!\n >>> key = Fernet.generate_key()\n >>> f = Fernet(key)\n >>> token = f.encrypt(b\"A really secret message. Not for prying eyes.\")\n >>> token\n b'...'\n >>> f.decrypt(token)\n b'A really secret message. Not for prying eyes.'\n\nYou can find more information in the `documentation`_.\n\nYou can install ``cryptography`` with:\n\n.. code-block:: console\n\n $ pip install cryptography\n\nFor full details see `the installation documentation`_.\n\nDiscussion\n~~~~~~~~~~\n\nIf you run into bugs, you can file them in our `issue tracker`_.\n\nWe maintain a `cryptography-dev`_ mailing list for development discussion.\n\nYou can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get\ninvolved.\n\nSecurity\n~~~~~~~~\n\nNeed to report a security issue? Please consult our `security reporting`_\ndocumentation.\n\n\n.. _`documentation`: https://cryptography.io/\n.. _`the installation documentation`: https://cryptography.io/en/latest/installation/\n.. _`issue tracker`: https://github.com/pyca/cryptography/issues\n.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev\n.. _`security reporting`: https://cryptography.io/en/latest/security/", - "release_date": "2025-07-02T13:05:46", + "description": "cryptography is a package which provides cryptographic recipes and primitives to Python developers.\npyca/cryptography\n=================\n\n.. image:: https://img.shields.io/pypi/v/cryptography.svg\n :target: https://pypi.org/project/cryptography/\n :alt: Latest Version\n\n.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest\n :target: https://cryptography.io\n :alt: Latest Docs\n\n.. image:: https://github.com/pyca/cryptography/actions/workflows/ci.yml/badge.svg\n :target: https://github.com/pyca/cryptography/actions/workflows/ci.yml?query=branch%3Amain\n\n``cryptography`` is a package which provides cryptographic recipes and\nprimitives to Python developers. Our goal is for it to be your \"cryptographic\nstandard library\". It supports Python 3.8+ and PyPy3 7.3.11+.\n\n``cryptography`` includes both high level recipes and low level interfaces to\ncommon cryptographic algorithms such as symmetric ciphers, message digests, and\nkey derivation functions. For example, to encrypt something with\n``cryptography``'s high level symmetric encryption recipe:\n\n.. code-block:: pycon\n\n >>> from cryptography.fernet import Fernet\n >>> # Put this somewhere safe!\n >>> key = Fernet.generate_key()\n >>> f = Fernet(key)\n >>> token = f.encrypt(b\"A really secret message. Not for prying eyes.\")\n >>> token\n b'...'\n >>> f.decrypt(token)\n b'A really secret message. Not for prying eyes.'\n\nYou can find more information in the `documentation`_.\n\nYou can install ``cryptography`` with:\n\n.. code-block:: console\n\n $ pip install cryptography\n\nFor full details see `the installation documentation`_.\n\nDiscussion\n~~~~~~~~~~\n\nIf you run into bugs, you can file them in our `issue tracker`_.\n\nWe maintain a `cryptography-dev`_ mailing list for development discussion.\n\nYou can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get\ninvolved.\n\nSecurity\n~~~~~~~~\n\nNeed to report a security issue? Please consult our `security reporting`_\ndocumentation.\n\n\n.. _`documentation`: https://cryptography.io/\n.. _`the installation documentation`: https://cryptography.io/en/latest/installation/\n.. _`issue tracker`: https://github.com/pyca/cryptography/issues\n.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev\n.. _`security reporting`: https://cryptography.io/en/latest/security/", + "release_date": "2025-09-17T00:08:49", "parties": [ { "type": "person", "role": "author", - "name": "The cryptography developers ", + "name": null, "email": "The Python Cryptographic Authority and individual contributors ", "url": null } @@ -603,28 +603,27 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: Free Threading :: 3 - Stable", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Security :: Cryptography" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/f2/2e/e2dbd629481b499b14516eed933f3276eb3239f7cee2dcfa4ee6b44d4711/cryptography-45.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", - "size": 4554189, + "download_url": "https://files.pythonhosted.org/packages/e5/d3/de61ad5b52433b389afca0bc70f02a7a1f074651221f599ce368da0fe437/cryptography-46.0.1-cp311-abi3-manylinux_2_28_x86_64.whl", + "size": 4604234, "sha1": null, - "md5": "e60dd7bf09e038a4508efcef2fc28cd5", - "sha256": "7259038202a47fdecee7e62e0fd0b0738b6daa335354396c6ddebdbe1206af2a", + "md5": "b3c22ab264b0f5a2ffc43bd9978e19e5", + "sha256": "f7a24ea78de345cfa7f6a8d3bde8b242c7fac27f2bd78fa23474ca38dfaeeab9", "sha512": null, "bug_tracking_url": null, "code_view_url": null, "vcs_url": null, "copyright": null, - "license_expression": null, - "declared_license": { - "license": "Apache-2.0 OR BSD-3-Clause" - }, + "license_expression": "Apache-2.0 OR BSD-3-Clause", + "declared_license": {}, "notice_text": null, "source_packages": [], "file_references": [], @@ -632,9 +631,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/cryptography/45.0.5/json", + "api_data_url": "https://pypi.org/pypi/cryptography/46.0.1/json", "datasource_id": null, - "purl": "pkg:pypi/cryptography@45.0.5" + "purl": "pkg:pypi/cryptography@46.0.1" }, { "type": "pypi", @@ -908,12 +907,12 @@ "type": "pypi", "namespace": null, "name": "pycparser", - "version": "2.22", + "version": "2.23", "qualifiers": {}, "subpath": null, "primary_language": "Python", "description": "C parser in Python\npycparser is a complete parser of the C language, written in\npure Python using the PLY parsing library.\nIt parses C code into an AST and can serve as a front-end for\nC compilers or analysis tools.", - "release_date": "2024-03-30T13:22:20", + "release_date": "2025-09-09T13:23:46", "parties": [ { "type": "person", @@ -936,15 +935,16 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9" ], "homepage_url": "https://github.com/eliben/pycparser", - "download_url": "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", - "size": 117552, + "download_url": "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", + "size": 118140, "sha1": null, - "md5": "e9bf4a92f270e6482393bd716406ff85", - "sha256": "c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", + "md5": "961daf0e0910747590f8a0101322bcd3", + "sha256": "e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", "sha512": null, "bug_tracking_url": null, "code_view_url": null, @@ -964,9 +964,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/pycparser/2.22/json", + "api_data_url": "https://pypi.org/pypi/pycparser/2.23/json", "datasource_id": null, - "purl": "pkg:pypi/pycparser@2.22" + "purl": "pkg:pypi/pycparser@2.23" }, { "type": "pypi", @@ -1038,12 +1038,12 @@ "type": "pypi", "namespace": null, "name": "requests", - "version": "2.32.4", + "version": "2.32.5", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Python HTTP for Humans.\n# Requests\n\n**Requests** is a simple, yet elegant, HTTP library.\n\n```python\n>>> import requests\n>>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass'))\n>>> r.status_code\n200\n>>> r.headers['content-type']\n'application/json; charset=utf8'\n>>> r.encoding\n'utf-8'\n>>> r.text\n'{\"authenticated\": true, ...'\n>>> r.json()\n{'authenticated': True, ...}\n```\n\nRequests allows you to send HTTP/1.1 requests extremely easily. There\u2019s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data \u2014 but nowadays, just use the `json` method!\n\nRequests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`\u2014 according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code.\n\n[![Downloads](https://static.pepy.tech/badge/requests/month)](https://pepy.tech/project/requests)\n[![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests)\n[![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors)\n\n## Installing Requests and Supported Versions\n\nRequests is available on PyPI:\n\n```console\n$ python -m pip install requests\n```\n\nRequests officially supports Python 3.8+.\n\n## Supported Features & Best\u2013Practices\n\nRequests is ready for the demands of building robust and reliable HTTP\u2013speaking applications, for the needs of today.\n\n- Keep-Alive & Connection Pooling\n- International Domains and URLs\n- Sessions with Cookie Persistence\n- Browser-style TLS/SSL Verification\n- Basic & Digest Authentication\n- Familiar `dict`\u2013like Cookies\n- Automatic Content Decompression and Decoding\n- Multi-part File Uploads\n- SOCKS Proxy Support\n- Connection Timeouts\n- Streaming Downloads\n- Automatic honoring of `.netrc`\n- Chunked HTTP Requests\n\n## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)\n\n[![Read the Docs](https://raw.githubusercontent.com/psf/requests/main/ext/ss.png)](https://requests.readthedocs.io)\n\n## Cloning the repository\n\nWhen cloning the Requests repository, you may need to add the `-c\nfetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit (see\n[this issue](https://github.com/psf/requests/issues/2690) for more background):\n\n```shell\ngit clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git\n```\n\nYou can also apply this setting to your global Git config:\n\n```shell\ngit config --global fetch.fsck.badTimezone ignore\n```\n\n---\n\n[![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/main/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/main/ext/psf.png)](https://www.python.org/psf)", - "release_date": "2025-06-09T16:43:05", + "description": "Python HTTP for Humans.\n# Requests\n\n**Requests** is a simple, yet elegant, HTTP library.\n\n```python\n>>> import requests\n>>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass'))\n>>> r.status_code\n200\n>>> r.headers['content-type']\n'application/json; charset=utf8'\n>>> r.encoding\n'utf-8'\n>>> r.text\n'{\"authenticated\": true, ...'\n>>> r.json()\n{'authenticated': True, ...}\n```\n\nRequests allows you to send HTTP/1.1 requests extremely easily. There\u2019s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data \u2014 but nowadays, just use the `json` method!\n\nRequests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`\u2014 according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code.\n\n[![Downloads](https://static.pepy.tech/badge/requests/month)](https://pepy.tech/project/requests)\n[![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests)\n[![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors)\n\n## Installing Requests and Supported Versions\n\nRequests is available on PyPI:\n\n```console\n$ python -m pip install requests\n```\n\nRequests officially supports Python 3.9+.\n\n## Supported Features & Best\u2013Practices\n\nRequests is ready for the demands of building robust and reliable HTTP\u2013speaking applications, for the needs of today.\n\n- Keep-Alive & Connection Pooling\n- International Domains and URLs\n- Sessions with Cookie Persistence\n- Browser-style TLS/SSL Verification\n- Basic & Digest Authentication\n- Familiar `dict`\u2013like Cookies\n- Automatic Content Decompression and Decoding\n- Multi-part File Uploads\n- SOCKS Proxy Support\n- Connection Timeouts\n- Streaming Downloads\n- Automatic honoring of `.netrc`\n- Chunked HTTP Requests\n\n## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)\n\n[![Read the Docs](https://raw.githubusercontent.com/psf/requests/main/ext/ss.png)](https://requests.readthedocs.io)\n\n## Cloning the repository\n\nWhen cloning the Requests repository, you may need to add the `-c\nfetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit timestamp (see\n[this issue](https://github.com/psf/requests/issues/2690) for more background):\n\n```shell\ngit clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git\n```\n\nYou can also apply this setting to your global Git config:\n\n```shell\ngit config --global fetch.fsck.badTimezone ignore\n```\n\n---\n\n[![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/main/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/main/ext/psf.png)](https://www.python.org/psf)", + "release_date": "2025-08-18T20:46:00", "parties": [ { "type": "person", @@ -1066,7 +1066,7 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", @@ -1074,11 +1074,11 @@ "Topic :: Software Development :: Libraries" ], "homepage_url": "https://requests.readthedocs.io", - "download_url": "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", - "size": 64847, + "download_url": "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", + "size": 64738, "sha1": null, - "md5": "fa8fa331f951fbc5e62f3d3e683a77a4", - "sha256": "27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", + "md5": "bd126794a95616a0da6192b288f9bb88", + "sha256": "2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/psf/requests", @@ -1098,9 +1098,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/requests/2.32.4/json", + "api_data_url": "https://pypi.org/pypi/requests/2.32.5/json", "datasource_id": null, - "purl": "pkg:pypi/requests@2.32.4" + "purl": "pkg:pypi/requests@2.32.5" }, { "type": "pypi", @@ -1162,12 +1162,12 @@ "type": "pypi", "namespace": null, "name": "typing-extensions", - "version": "4.14.1", + "version": "4.15.0", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Backported and Experimental Type Hints for Python 3.9+\n# Typing Extensions\n\n[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing)\n\n[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) \u2013\n[PyPI](https://pypi.org/project/typing-extensions/)\n\n## Overview\n\nThe `typing_extensions` module serves two related purposes:\n\n- Enable use of new type system features on older Python versions. For example,\n `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows\n users on previous Python versions to use it too.\n- Enable experimentation with new type system PEPs before they are accepted and\n added to the `typing` module.\n\n`typing_extensions` is treated specially by static type checkers such as\nmypy and pyright. Objects defined in `typing_extensions` are treated the same\nway as equivalent forms in `typing`.\n\n`typing_extensions` uses\n[Semantic Versioning](https://semver.org/). The\nmajor version will be incremented only for backwards-incompatible changes.\nTherefore, it's safe to depend\non `typing_extensions` like this: `typing_extensions >=x.y, <(x+1)`,\nwhere `x.y` is the first version that includes all features you need.\n\n## Included items\n\nSee [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a\ncomplete listing of module contents.\n\n## Contributing\n\nSee [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md)\nfor how to contribute to `typing_extensions`.", - "release_date": "2025-07-04T13:28:32", + "description": "Backported and Experimental Type Hints for Python 3.9+\n# Typing Extensions\n\n[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing)\n\n[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) \u2013\n[PyPI](https://pypi.org/project/typing-extensions/)\n\n## Overview\n\nThe `typing_extensions` module serves two related purposes:\n\n- Enable use of new type system features on older Python versions. For example,\n `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows\n users on previous Python versions to use it too.\n- Enable experimentation with new type system PEPs before they are accepted and\n added to the `typing` module.\n\n`typing_extensions` is treated specially by static type checkers such as\nmypy and pyright. Objects defined in `typing_extensions` are treated the same\nway as equivalent forms in `typing`.\n\n`typing_extensions` uses\n[Semantic Versioning](https://semver.org/). The\nmajor version will be incremented only for backwards-incompatible changes.\nTherefore, it's safe to depend\non `typing_extensions` like this: `typing_extensions ~=x.y`,\nwhere `x.y` is the first version that includes all features you need.\n[This](https://packaging.python.org/en/latest/specifications/version-specifiers/#compatible-release)\nis equivalent to `typing_extensions >=x.y, <(x+1)`. Do not depend on `~= x.y.z`\nunless you really know what you're doing; that defeats the purpose of\nsemantic versioning.\n\n## Included items\n\nSee [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a\ncomplete listing of module contents.\n\n## Contributing\n\nSee [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md)\nfor how to contribute to `typing_extensions`.", + "release_date": "2025-08-25T13:49:24", "parties": [ { "type": "person", @@ -1205,11 +1205,11 @@ "Topic :: Software Development" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", - "size": 43906, + "download_url": "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", + "size": 44614, "sha1": null, - "md5": "86905389dfed18c11e510c9e23147fcb", - "sha256": "d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", + "md5": "1394f56d85d87540f7907680572797e1", + "sha256": "f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", "sha512": null, "bug_tracking_url": "https://github.com/python/typing_extensions/issues", "code_view_url": null, @@ -1224,9 +1224,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/typing-extensions/4.14.1/json", + "api_data_url": "https://pypi.org/pypi/typing-extensions/4.15.0/json", "datasource_id": null, - "purl": "pkg:pypi/typing-extensions@4.14.1" + "purl": "pkg:pypi/typing-extensions@4.15.0" }, { "type": "pypi", @@ -1306,11 +1306,11 @@ ], "resolved_dependencies_graph": [ { - "package": "pkg:pypi/azure-core@1.35.0", + "package": "pkg:pypi/azure-core@1.35.1", "dependencies": [ - "pkg:pypi/requests@2.32.4", + "pkg:pypi/requests@2.32.5", "pkg:pypi/six@1.17.0", - "pkg:pypi/typing-extensions@4.14.1" + "pkg:pypi/typing-extensions@4.15.0" ] }, { @@ -1320,36 +1320,37 @@ ] }, { - "package": "pkg:pypi/azure-storage-blob@12.25.1", + "package": "pkg:pypi/azure-storage-blob@12.26.0", "dependencies": [ - "pkg:pypi/azure-core@1.35.0", - "pkg:pypi/cryptography@45.0.5", + "pkg:pypi/azure-core@1.35.1", + "pkg:pypi/cryptography@46.0.1", "pkg:pypi/isodate@0.7.2", - "pkg:pypi/typing-extensions@4.14.1" + "pkg:pypi/typing-extensions@4.15.0" ] }, { - "package": "pkg:pypi/certifi@2025.7.14", + "package": "pkg:pypi/certifi@2025.8.3", "dependencies": [] }, { - "package": "pkg:pypi/cffi@1.17.1", + "package": "pkg:pypi/cffi@2.0.0", "dependencies": [ - "pkg:pypi/pycparser@2.22" + "pkg:pypi/pycparser@2.23" ] }, { - "package": "pkg:pypi/charset-normalizer@3.4.2", + "package": "pkg:pypi/charset-normalizer@3.4.3", "dependencies": [] }, { - "package": "pkg:pypi/click@8.2.1", + "package": "pkg:pypi/click@8.2.2", "dependencies": [] }, { - "package": "pkg:pypi/cryptography@45.0.5", + "package": "pkg:pypi/cryptography@46.0.1", "dependencies": [ - "pkg:pypi/cffi@1.17.1" + "pkg:pypi/cffi@2.0.0", + "pkg:pypi/typing-extensions@4.15.0" ] }, { @@ -1363,11 +1364,11 @@ { "package": "pkg:pypi/msrest@0.7.1", "dependencies": [ - "pkg:pypi/azure-core@1.35.0", - "pkg:pypi/certifi@2025.7.14", + "pkg:pypi/azure-core@1.35.1", + "pkg:pypi/certifi@2025.8.3", "pkg:pypi/isodate@0.7.2", "pkg:pypi/requests-oauthlib@2.0.0", - "pkg:pypi/requests@2.32.4" + "pkg:pypi/requests@2.32.5" ] }, { @@ -1375,21 +1376,21 @@ "dependencies": [] }, { - "package": "pkg:pypi/pycparser@2.22", + "package": "pkg:pypi/pycparser@2.23", "dependencies": [] }, { "package": "pkg:pypi/requests-oauthlib@2.0.0", "dependencies": [ "pkg:pypi/oauthlib@3.3.1", - "pkg:pypi/requests@2.32.4" + "pkg:pypi/requests@2.32.5" ] }, { - "package": "pkg:pypi/requests@2.32.4", + "package": "pkg:pypi/requests@2.32.5", "dependencies": [ - "pkg:pypi/certifi@2025.7.14", - "pkg:pypi/charset-normalizer@3.4.2", + "pkg:pypi/certifi@2025.8.3", + "pkg:pypi/charset-normalizer@3.4.3", "pkg:pypi/idna@3.10", "pkg:pypi/urllib3@2.5.0" ] @@ -1399,7 +1400,7 @@ "dependencies": [] }, { - "package": "pkg:pypi/typing-extensions@4.14.1", + "package": "pkg:pypi/typing-extensions@4.15.0", "dependencies": [] }, { diff --git a/tests/data/azure-devops.req-313-expected.json b/tests/data/azure-devops.req-313-expected.json index 4c222303..d4d8d74f 100644 --- a/tests/data/azure-devops.req-313-expected.json +++ b/tests/data/azure-devops.req-313-expected.json @@ -126,12 +126,12 @@ "type": "pypi", "namespace": null, "name": "azure-core", - "version": "1.35.0", + "version": "1.35.1", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Microsoft Azure Core Library for Python\n# Azure Core shared client library for Python\n\nAzure core provides shared exceptions and modules for Python SDK client libraries.\nThese libraries follow the [Azure SDK Design Guidelines for Python](https://azure.github.io/azure-sdk/python/guidelines/index.html) .\n\nIf you are a client library developer, please reference [client library developer reference](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/CLIENT_LIBRARY_DEVELOPER.md) for more information.\n\n[Source code](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/)\n| [Package (Pypi)][package]\n| [Package (Conda)](https://anaconda.org/microsoft/azure-core/)\n| [API reference documentation](https://learn.microsoft.com/python/api/overview/azure/core-readme)\n\n## Getting started\n\nTypically, you will not need to install azure core;\nit will be installed when you install one of the client libraries using it.\nIn case you want to install it explicitly (to implement your own client library, for example),\nyou can find it [here](https://pypi.org/project/azure-core/).\n\n## Key concepts\n\n### Azure Core Library Exceptions\n\n#### AzureError\n\nAzureError is the base exception for all errors.\n\n```python\nclass AzureError(Exception):\n def __init__(self, message, *args, **kwargs):\n self.inner_exception = kwargs.get(\"error\")\n self.exc_type, self.exc_value, self.exc_traceback = sys.exc_info()\n self.exc_type = self.exc_type.__name__ if self.exc_type else type(self.inner_exception)\n self.exc_msg = \"{}, {}: {}\".format(message, self.exc_type, self.exc_value) # type: ignore\n self.message = str(message)\n self.continuation_token = kwargs.get(\"continuation_token\")\n super(AzureError, self).__init__(self.message, *args)\n```\n\n*message* is any message (str) to be associated with the exception.\n\n*args* are any additional args to be included with exception.\n\n*kwargs* are keyword arguments to include with the exception. Use the keyword *error* to pass in an internal exception and *continuation_token* for a token reference to continue an incomplete operation.\n\n**The following exceptions inherit from AzureError:**\n\n#### ServiceRequestError\n\nAn error occurred while attempt to make a request to the service. No request was sent.\n\n#### ServiceResponseError\n\nThe request was sent, but the client failed to understand the response.\nThe connection may have timed out. These errors can be retried for idempotent or safe operations.\n\n#### HttpResponseError\n\nA request was made, and a non-success status code was received from the service.\n\n```python\nclass HttpResponseError(AzureError):\n def __init__(self, message=None, response=None, **kwargs):\n self.reason = None\n self.response = response\n if response:\n self.reason = response.reason\n self.status_code = response.status_code\n self.error = self._parse_odata_body(ODataV4Format, response) # type: Optional[ODataV4Format]\n if self.error:\n message = str(self.error)\n else:\n message = message or \"Operation returned an invalid status '{}'\".format(\n self.reason\n )\n\n super(HttpResponseError, self).__init__(message=message, **kwargs)\n```\n\n*message* is the HTTP response error message (optional)\n\n*response* is the HTTP response (optional).\n\n*kwargs* are keyword arguments to include with the exception.\n\n**The following exceptions inherit from HttpResponseError:**\n\n#### DecodeError\n\nAn error raised during response de-serialization.\n\n#### IncompleteReadError\n\nAn error raised if peer closes the connection before we have received the complete message body.\n\n#### ResourceExistsError\n\nAn error response with status code 4xx. This will not be raised directly by the Azure core pipeline.\n\n#### ResourceNotFoundError\n\nAn error response, typically triggered by a 412 response (for update) or 404 (for get/post).\n\n#### ResourceModifiedError\n\nAn error response with status code 4xx, typically 412 Conflict. This will not be raised directly by the Azure core pipeline.\n\n#### ResourceNotModifiedError\n\nAn error response with status code 304. This will not be raised directly by the Azure core pipeline.\n\n#### ClientAuthenticationError\n\nAn error response with status code 4xx. This will not be raised directly by the Azure core pipeline.\n\n#### TooManyRedirectsError\n\nAn error raised when the maximum number of redirect attempts is reached. The maximum amount of redirects can be configured in the RedirectPolicy.\n\n```python\nclass TooManyRedirectsError(HttpResponseError):\n def __init__(self, history, *args, **kwargs):\n self.history = history\n message = \"Reached maximum redirect attempts.\"\n super(TooManyRedirectsError, self).__init__(message, *args, **kwargs)\n```\n\n*history* is used to document the requests/responses that resulted in redirected requests.\n\n*args* are any additional args to be included with exception.\n\n*kwargs* are keyword arguments to include with the exception.\n\n#### StreamConsumedError\n\nAn error thrown if you try to access the stream of `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` once\nthe response stream has been consumed.\n\n#### StreamClosedError\n\nAn error thrown if you try to access the stream of the `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` once\nthe response stream has been closed.\n\n#### ResponseNotReadError\n\nAn error thrown if you try to access the `content` of `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` before\nreading in the response's bytes first.\n\n### Configurations\n\nWhen calling the methods, some properties can be configured by passing in as kwargs arguments.\n\n| Parameters | Description |\n| --- | --- |\n| headers | The HTTP Request headers. |\n| request_id | The request id to be added into header. |\n| user_agent | If specified, this will be added in front of the user agent string. |\n| logging_enable| Use to enable per operation. Defaults to `False`. |\n| logger | If specified, it will be used to log information. |\n| response_encoding | The encoding to use if known for this service (will disable auto-detection). |\n| raw_request_hook | Callback function. Will be invoked on request. |\n| raw_response_hook | Callback function. Will be invoked on response. |\n| network_span_namer | A callable to customize the span name. |\n| tracing_attributes | Attributes to set on all created spans. |\n| permit_redirects | Whether the client allows redirects. Defaults to `True`. |\n| redirect_max | The maximum allowed redirects. Defaults to `30`. |\n| retry_total | Total number of retries to allow. Takes precedence over other counts. Default value is `10`. |\n| retry_connect | How many connection-related errors to retry on. These are errors raised before the request is sent to the remote server, which we assume has not triggered the server to process the request. Default value is `3`. |\n| retry_read | How many times to retry on read errors. These errors are raised after the request was sent to the server, so the request may have side-effects. Default value is `3`. |\n| retry_status | How many times to retry on bad status codes. Default value is `3`. |\n| retry_backoff_factor | A backoff factor to apply between attempts after the second try (most errors are resolved immediately by a second try without a delay). Retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))` seconds. If the backoff_factor is 0.1, then the retry will sleep for [0.0s, 0.2s, 0.4s, ...] between retries. The default value is `0.8`. |\n| retry_backoff_max | The maximum back off time. Default value is `120` seconds (2 minutes). |\n| retry_mode | Fixed or exponential delay between attempts, default is `Exponential`. |\n| timeout | Timeout setting for the operation in seconds, default is `604800`s (7 days). |\n| connection_timeout | A single float in seconds for the connection timeout. Defaults to `300` seconds. |\n| read_timeout | A single float in seconds for the read timeout. Defaults to `300` seconds. |\n| connection_verify | SSL certificate verification. Enabled by default. Set to False to disable, alternatively can be set to the path to a CA_BUNDLE file or directory with certificates of trusted CAs. |\n| connection_cert | Client-side certificates. You can specify a local cert to use as client side certificate, as a single file (containing the private key and the certificate) or as a tuple of both files' paths. |\n| proxies | Dictionary mapping protocol or protocol and hostname to the URL of the proxy. |\n| cookies | Dict or CookieJar object to send with the `Request`. |\n| connection_data_block_size | The block size of data sent over the connection. Defaults to `4096` bytes. |\n\n### Async transport\n\nThe async transport is designed to be opt-in. [AioHttp](https://pypi.org/project/aiohttp/) is one of the supported implementations of async transport. It is not installed by default. You need to install it separately.\n\n### Shared modules\n\n#### MatchConditions\n\nMatchConditions is an enum to describe match conditions.\n\n```python\nclass MatchConditions(Enum):\n Unconditionally = 1 # Matches any condition\n IfNotModified = 2 # If the target object is not modified. Usually it maps to etag=\n IfModified = 3 # Only if the target object is modified. Usually it maps to etag!=\n IfPresent = 4 # If the target object exists. Usually it maps to etag='*'\n IfMissing = 5 # If the target object does not exist. Usually it maps to etag!='*'\n```\n\n#### CaseInsensitiveEnumMeta\n\nA metaclass to support case-insensitive enums.\n\n```python\nfrom enum import Enum\n\nfrom azure.core import CaseInsensitiveEnumMeta\n\nclass MyCustomEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta):\n FOO = 'foo'\n BAR = 'bar'\n```\n\n#### Null Sentinel Value\n\nA falsy sentinel object which is supposed to be used to specify attributes\nwith no data. This gets serialized to `null` on the wire.\n\n```python\nfrom azure.core.serialization import NULL\n\nassert bool(NULL) is False\n\nfoo = Foo(\n attr=NULL\n)\n```\n\n## Contributing\n\nThis project welcomes contributions and suggestions. Most contributions require\nyou to agree to a Contributor License Agreement (CLA) declaring that you have\nthe right to, and actually do, grant us the rights to use your contribution.\nFor details, visit [https://cla.microsoft.com](https://cla.microsoft.com).\n\nWhen you submit a pull request, a CLA-bot will automatically determine whether\nyou need to provide a CLA and decorate the PR appropriately (e.g., label,\ncomment). Simply follow the instructions provided by the bot. You will only\nneed to do this once across all repos using our CLA.\n\nThis project has adopted the\n[Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).\nFor more information, see the\n[Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)\nor contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any\nadditional questions or comments.\n\n\n[package]: https://pypi.org/project/azure-core/\n\n\n# Release History\n\n## 1.35.0 (2025-07-02)\n\n### Features Added\n\n- Added a `start_time` keyword argument to the `start_span` and `start_as_current_span` methods in the `OpenTelemetryTracer` class. This allows users to specify a custom start time for created spans. #41106\n- Added a `context` keyword argument to the `start_span` and `start_as_current_span` methods in the `OpenTelemetryTracer` class. This allows users to specify a custom parent context for created spans. #41511\n- Added method `as_attribute_dict` to `azure.core.serialization` for backcompat migration purposes. Will return a generated model as a dictionary where the keys are in attribute syntax.\n- Added `is_generated_model` method to `azure.core.serialization`. Returns whether a given input is a model from one of our generated sdks. #41445\n- Added `attribute_list` method to `azure.core.serialization`. Returns all of the attributes of a given model from one of our generated sdks. #41571\n\n### Other Changes\n\n- A timeout error when using the `aiohttp` transport (the default for async SDKs) will now be raised as a `azure.core.exceptions.ServiceResponseTimeoutError`, a subtype of the previously raised `ServiceResponseError`.\n- When using with `aiohttp` 3.10 or later, a connection timeout error will now be raised as a `azure.core.exceptions.ServiceRequestTimeoutError`, which can be retried.\n- The default implementation of `on_challenge` in `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` will now cache the retrieved token. #41857\n\n## 1.34.0 (2025-05-01)\n\n### Features Added\n\n- Added a `set_span_error_status` method to the `OpenTelemetryTracer` class. This method allows users to set the status of a span to `ERROR` after it has been created. #40703\n\n### Other Changes\n\n- Python 3.8 is no longer supported. Please use Python version 3.9 or later.\n\n## 1.33.0 (2025-04-03)\n\n### Features Added\n\n- Added native OpenTelemetry tracing to Azure Core which enables users to use OpenTelemetry to trace Azure SDK operations without needing to install a plugin. #39563\n - To enable native OpenTelemetry tracing, users need to:\n 1. Have `opentelemetry-api` installed.\n 2. Ensure that `settings.tracing_implementation` is not set.\n 3. Ensure that `settings.tracing_enabled` is set to `True`.\n - If `setting.tracing_implementation` is set, the tracing plugin will be used instead of the native tracing.\n - If `settings.tracing_enabled` is set to `False`, tracing will be disabled.\n - The `OpenTelemetryTracer` class was added to the `azure.core.tracing.opentelemetry` module. This is a wrapper around the OpenTelemetry tracer that is used to create spans for Azure SDK operations.\n - Added a `get_tracer` method to the new `azure.core.instrumentation` module. This method returns an instance of the `OpenTelemetryTracer` class if OpenTelemetry is available.\n - A `TracingOptions` TypedDict class was added to define the options that SDK users can use to configure tracing per-operation. These options include the ability to enable or disable tracing and set additional attributes on spans.\n - Example usage: `client.method(tracing_options={\"enabled\": True, \"attributes\": {\"foo\": \"bar\"}})`\n - The `DistributedTracingPolicy` and `distributed_trace`/`distributed_trace_async` decorators now uses the OpenTelemetry tracer if it is available and native tracing is enabled.\n - SDK clients can define an `_instrumentation_config` class variable to configure the OpenTelemetry tracer used in method span creation. Possible configuration options are `library_name`, `library_version`, `schema_url`, and `attributes`.\n - `DistributedTracingPolicy` now accepts a `instrumentation_config` keyword argument to configure the OpenTelemetry tracer used in HTTP span creation.\n\n### Breaking Changes\n\n- Removed automatic tracing enablement for the OpenTelemetry plugin if `opentelemetry` was imported. To enable tracing with the plugin, please import `azure.core.settings.settings` and set `settings.tracing_implementation` to `\"opentelemetry\"`. #39563\n- In `DistributedTracingPolicy`, the default span name is now just the HTTP method (e.g., \"GET\", \"POST\") and no longer includes the URL path. This change was made to converge with the OpenTelemetry HTTP semantic conventions. The full URL is still included in the span attributes.\n- Renamed span attributes in `DistributedTracingPolicy`:\n - \"x-ms-client-request-id\" is now \"az.client_request_id\"\n - \"x-ms-request-id\" is now \"az.service_request_id\"\n\n### Bugs Fixed\n\n- Fixed an issue where the `traceparent` header was not being set correctly in the `DistributedTracingPolicy`. The `traceparent` header will now set based on the context of the HTTP client span. #40074\n\n### Other Changes\n\n- Added `opentelemetry-api` as an optional dependency for tracing. This can be installed with `pip install azure-core[tracing]`. #39563\n\n## 1.32.0 (2024-10-31)\n\n### Features Added\n\n- Added a default implementation to handle token challenges in `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy`.\n\n### Bugs Fixed\n\n- Fixed an issue where the `tracing_attributes` keyword argument wasn't being handled at the request/method level. #38164\n\n### Other Changes\n\n- Log \"x-vss-e2eid\" and \"x-msedge-ref\" headers in `HttpLoggingPolicy`.\n\n## 1.31.0 (2024-09-12)\n\n### Features Added\n\n- Added azure.core.AzureClouds enum to represent the different Azure clouds.\n- Added two new credential protocol classes, `SupportsTokenInfo` and `AsyncSupportsTokenInfo`, to offer more extensibility in supporting various token acquisition scenarios. #36565\n - Each new protocol class defines a `get_token_info` method that returns an `AccessTokenInfo` object.\n- Added a new `TokenRequestOptions` class, which is a `TypedDict` with optional parameters, that can be used to define options for token requests through the `get_token_info` method. #36565\n- Added a new `AccessTokenInfo` class, which is returned by `get_token_info` implementations. This class contains the token, its expiration time, and optional additional information like when a token should be refreshed. #36565\n- `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` now first check if a credential has the `get_token_info` method defined. If so, the `get_token_info` method is used to acquire a token. Otherwise, the `get_token` method is used. #36565\n - These policies now also check the `refresh_on` attribute when determining if a new token request should be made.\n\n### Other Changes\n\n- The Azure Core OpenTelemetry tracing plugin will now be the preferred tracing plugin over the OpenCensus plugin. If both plugins are installed and `opentelemetry` is imported, then OpenTelemetry will be used to trace Azure SDK operations. #35050\n\n## 1.30.2 (2024-06-06)\n\n### Features Added\n\n- Tracing: `DistributedTracingPolicy` will now set an attribute, `http.request.resend_count`, on HTTP spans for resent requests to indicate the resend attempt number. #35069\n\n### Bugs Fixed\n\n- Raise correct exception if transport is used while already closed #35559\n\n### Other Changes\n\n- HTTP tracing spans will now include an `error.type` attribute if an error status code is returned. #34619\n- Minimum required Python version is now 3.8\n\n## 1.30.1 (2024-02-29)\n\n### Other Changes\n\n- Accept float for `retry_after` header. #34203\n\n## 1.30.0 (2024-02-01)\n\n### Features Added\n\n- Support tuple input for file values to `azure.core.rest.HttpRequest` #33948\n- Support tuple input to `files` with duplicate field names `azure.core.rest.HttpRequest` #34021\n\n## 1.29.7 (2024-01-18)\n\n### Other Changes\n\n- Removed dependency on `anyio`. #33282\n\n## 1.29.6 (2023-12-14)\n\n### Bugs Fixed\n\n- Adjusted `AsyncBearerTokenCredentialPolicy` to work properly with `trio` concurrency mechanisms. ([#33307](https://github.com/Azure/azure-sdk-for-python/pull/33307))\n\n### Other Changes\n\n- Added dependency on `anyio` >=3.0,<5.0\n- Bumped minimum dependency on `requests` to 2.21.0.\n\n## 1.29.5 (2023-10-19)\n\n### Bugs Fixed\n\n- Fixed an issue with `multipart/form-data` in the async transport where `data` was not getting encoded into the request body. #32473\n\n### Other Changes\n\n- Use ssl context from aiohttp by default.\n\n## 1.29.4 (2023-09-07)\n\n### Bugs Fixed\n\n- Fixed the issue that some urls trigger an infinite loop. #31346\n- Fixed issue where IndexError was raised if multipart responses did not match the number of requests. #31471\n- Fixed issue unbound variable exception if dict is invalid in CloudEvent.from_dict. #31835\n- Fixed issue asyncBearerTokenCredentialPolicy is not backward compatible with SansIOHTTPPolicy. #31836\n- Fixed issue mypy complains with new version of azure-core. #31564\n\n## 1.29.3 (2023-08-22)\n\n### Bugs Fixed\n\n- Typing fix: `message` cannot be `None` in `AzureError`. #31564\n\n## 1.29.2 (2023-08-14)\n\n### Bugs Fixed\n\n- Added a default implementation for `AsyncTokenCredential.__aexit__()` #31573\n\n### Other Changes\n\n- Bumped `typing-extensions` version to 4.6.0.\n\n## 1.29.1 (2023-08-09)\n\n### Bugs Fixed\n\n- Not pass `enabled_cae` unless it is explicitly enabled.\n\n## 1.29.0 (2023-08-03)\n\n### Features Added\n\n- A keyword argument `enable_cae` was added to the `get_token` method of the `TokenCredential` protocol. #31012\n- `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` now accept `enable_cae` keyword arguments in their constructors. This is used in determining if [Continuous Access Evaluation (CAE)](https://learn.microsoft.com/azure/active-directory/conditional-access/concept-continuous-access-evaluation) should be enabled for each `get_token` request. #31012\n\n## 1.28.0 (2023-07-06)\n\n### Features Added\n\n- Added header name parameter to `RequestIdPolicy`. #30772\n- Added `SensitiveHeaderCleanupPolicy` that cleans up sensitive headers if a redirect happens and the new destination is in another domain. #28349\n\n### Other Changes\n\n- Catch aiohttp errors and translate them into azure-core errors.\n\n## 1.27.1 (2023-06-13)\n\n### Bugs Fixed\n\n- Fix url building for some complex query parameters scenarios #30707\n\n## 1.27.0 (2023-06-01)\n\n### Features Added\n\n- Added support to use sync credentials in `AsyncBearerTokenCredentialPolicy`. #30381\n- Added \"prefix\" parameter to AzureKeyCredentialPolicy #29901\n\n### Bugs Fixed\n\n- Improve error message when providing the wrong credential type for AzureKeyCredential #30380\n\n## 1.26.4 (2023-04-06)\n\n### Features Added\n\n- Updated settings to include OpenTelemetry as a tracer provider. #29095\n\n### Other Changes\n\n- Improved typing\n\n## 1.26.3 (2023-02-02)\n\n### Bugs Fixed\n\n- Fixed deflate decompression for aiohttp #28483\n\n## 1.26.2 (2023-01-05)\n\n### Bugs Fixed\n\n- Fix 'ClientSession' object has no attribute 'auto_decompress' (thanks to @mghextreme for the contribution)\n\n### Other Changes\n\n- Add \"x-ms-error-code\" as secure header to log\n- Rename \"DEFAULT_HEADERS_WHITELIST\" to \"DEFAULT_HEADERS_ALLOWLIST\". Added a backward compatible alias.\n\n## 1.26.1 (2022-11-03)\n\n### Other Changes\n\n- Added example of RequestsTransport with custom session. (thanks to @inirudebwoy for the contribution) #26768\n- Added Python 3.11 support.\n\n## 1.26.0 (2022-10-06)\n\n### Other Changes\n\n- LRO polling will not wait anymore before doing the first status check #26376\n- Added extra dependency for [aio]. pip install azure-core[aio] installs aiohttp too.\n\n## 1.25.1 (2022-09-01)\n\n### Bugs Fixed\n\n- Added @runtime_checkable to `TokenCredential` protocol definitions #25187\n\n## 1.25.0 (2022-08-04)\n\nAzure-core is supported on Python 3.7 or later. For more details, please read our page on [Azure SDK for Python version support policy](https://github.com/Azure/azure-sdk-for-python/wiki/Azure-SDKs-Python-version-support-policy).\n\n### Features Added\n\n- Added `CaseInsensitiveDict` implementation in `azure.core.utils` removing dependency on `requests` and `aiohttp`\n\n## 1.24.2 (2022-06-30)\n\n### Bugs Fixed\n\n- Fixed the bug that azure-core could not be imported under Python 3.11.0b3 #24928\n- `ContentDecodePolicy` can now correctly deserialize more JSON bodies with different mime types #22410\n\n## 1.24.1 (2022-06-01)\n\n### Bugs Fixed\n\n- Declare method level span as INTERNAL by default #24492\n- Fixed type hints for `azure.core.paging.ItemPaged` #24548\n\n## 1.24.0 (2022-05-06)\n\n### Features Added\n\n- Add `SerializationError` and `DeserializationError` in `azure.core.exceptions` for errors raised during serialization / deserialization #24312\n\n## 1.23.1 (2022-03-31)\n\n### Bugs Fixed\n\n- Allow stream inputs to the `content` kwarg of `azure.core.rest.HttpRequest` from objects with a `read` method #23578\n\n## 1.23.0 (2022-03-03)\n\n### Features Added\n\n- Improve intellisense type hinting for service client methods. #22891\n\n- Add a case insensitive dict `case_insensitive_dict` in `azure.core.utils`. #23206\n\n### Bugs Fixed\n\n- Use \"\\n\" rather than \"/n\" for new line in log. #23261\n\n### Other Changes\n\n- Log \"WWW-Authenticate\" header in `HttpLoggingPolicy` #22990\n- Added dependency on `typing-extensions` >= 4.0.1\n\n## 1.22.1 (2022-02-09)\n\n### Bugs Fixed\n\n- Limiting `final-state-via` scope to POST until consuming SDKs has been fixed to use this option properly on PUT. #22989\n\n## 1.22.0 (2022-02-03)\n_[**This version is deprecated.**]_\n\n### Features Added\n\n- Add support for `final-state-via` LRO option in core. #22713\n\n### Bugs Fixed\n\n- Add response body to string representation of `HttpResponseError` if we're not able to parse out information #22302\n- Raise `AttributeError` when calling azure.core.pipeline.transport.\\_\\_bases__ #22469\n\n### Other Changes\n\n- Python 2.7 is no longer supported. Please use Python version 3.6 or later.\n\n## 1.21.1 (2021-12-06)\n\n### Other Changes\n\n- Revert change in str method #22023\n\n## 1.21.0 (2021-12-02)\n\n### Breaking Changes\n\n- Sync stream downloading now raises `azure.core.exceptions.DecodeError` rather than `requests.exceptions.ContentDecodingError`\n\n### Bugs Fixed\n\n- Add response body to string representation of `HttpResponseError` if we're not able to parse out information #21800\n\n## 1.20.1 (2021-11-08)\n\n### Bugs Fixed\n\n- Correctly set response's content to decompressed body when users are using aiohttp transport with decompression headers #21620\n\n## 1.20.0 (2021-11-04)\n\n### Features Added\n\n- GA `send_request` onto the `azure.core.PipelineClient` and `azure.core.AsyncPipelineClient`. This method takes in\nrequests and sends them through our pipelines.\n- GA `azure.core.rest`. `azure.core.rest` is our new public simple HTTP library in `azure.core` that users will use to create requests, and consume responses.\n- GA errors `StreamConsumedError`, `StreamClosedError`, and `ResponseNotReadError` to `azure.core.exceptions`. These errors\nare thrown if you mishandle streamed responses from the `azure.core.rest` module\n- add kwargs to the methods for `iter_raw` and `iter_bytes` #21529\n- no longer raise JSON errors if users pass in file descriptors of JSON to the `json` kwarg in `HttpRequest` #21504\n- Added new error type `IncompleteReadError` which is raised if peer closes the connection before we have received the complete message body.\n\n### Breaking Changes\n\n- SansIOHTTPPolicy.on_exception returns None instead of bool.\n\n### Bugs Fixed\n\n- The `Content-Length` header in a http response is strictly checked against the actual number of bytes in the body,\n rather than silently truncating data in case the underlying tcp connection is closed prematurely.\n (thanks to @jochen-ott-by for the contribution) #20412\n- UnboundLocalError when SansIOHTTPPolicy handles an exception #15222\n- Add default content type header of `text/plain` and content length header for users who pass unicode strings to the `content` kwarg of `HttpRequest` in 2.7 #21550\n\n## 1.19.1 (2021-11-01)\n\n### Bugs Fixed\n\n- respect text encoding specified in argument (thanks to @ryohji for the contribution) #20796\n- Fix \"coroutine x.read() was never awaited\" warning from `ContentDecodePolicy` #21318\n- fix type check for `data` input to `azure.core.rest` for python 2.7 users #21341\n- use `charset_normalizer` if `chardet` is not installed to migrate aiohttp 3.8.0 changes.\n\n### Other Changes\n\n- Refactor AzureJSONEncoder (thanks to @Codejune for the contribution) #21028\n\n## 1.19.0 (2021-09-30)\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse` are now abstract base classes. They should not be initialized directly, instead\nyour transport responses should inherit from them and implement them.\n- The properties of the `azure.core.rest` responses are now all read-only\n\n- HttpLoggingPolicy integrates logs into one record #19925\n\n## 1.18.0 (2021-09-02)\n\n### Features Added\n\n- `azure.core.serialization.AzureJSONEncoder` (introduced in 1.17.0) serializes `datetime.datetime` objects in ISO 8601 format, conforming to RFC 3339's specification. #20190\n- We now use `azure.core.serialization.AzureJSONEncoder` to serialize `json` input to `azure.core.rest.HttpRequest`.\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- The `text` property on `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse` has changed to a method, which also takes\nan `encoding` parameter.\n- Removed `iter_text` and `iter_lines` from `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse`\n\n### Bugs Fixed\n\n- The behaviour of the headers returned in `azure.core.rest` responses now aligns across sync and async. Items can now be checked case-insensitively and without raising an error for format.\n\n## 1.17.0 (2021-08-05)\n\n### Features Added\n\n- Cut hard dependency on requests library\n- Added a `from_json` method which now accepts storage QueueMessage, eventhub's EventData or ServiceBusMessage or simply json bytes to return a `CloudEvent`\n\n### Fixed\n\n- Not override \"x-ms-client-request-id\" if it already exists in the header. #17757\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- `azure.core.rest` will not try to guess the `charset` anymore if it was impossible to extract it from `HttpResponse` analysis. This removes our dependency on `charset`.\n\n## 1.16.0 (2021-07-01)\n\n### Features Added\n\n- Add new ***provisional*** methods `send_request` onto the `azure.core.PipelineClient` and `azure.core.AsyncPipelineClient`. This method takes in\nrequests and sends them through our pipelines.\n- Add new ***provisional*** module `azure.core.rest`. `azure.core.rest` is our new public simple HTTP library in `azure.core` that users will use to create requests, and consume responses.\n- Add new ***provisional*** errors `StreamConsumedError`, `StreamClosedError`, and `ResponseNotReadError` to `azure.core.exceptions`. These errors\nare thrown if you mishandle streamed responses from the provisional `azure.core.rest` module\n\n### Fixed\n\n- Improved error message in the `from_dict` method of `CloudEvent` when a wrong schema is sent.\n\n## 1.15.0 (2021-06-04)\n\n### New Features\n\n- Added `BearerTokenCredentialPolicy.on_challenge` and `.authorize_request` to allow subclasses to optionally handle authentication challenges\n\n### Bug Fixes\n\n- Retry policies don't sleep after operations time out\n- The `from_dict` methhod in the `CloudEvent` can now convert a datetime string to datetime object when microsecond exceeds the python limitation\n\n## 1.14.0 (2021-05-13)\n\n### New Features\n\n- Added `azure.core.credentials.AzureNamedKeyCredential` credential #17548.\n- Added `decompress` parameter for `stream_download` method. If it is set to `False`, will not do decompression upon the stream. #17920\n\n## 1.13.0 (2021-04-02)\n\nAzure core requires Python 2.7 or Python 3.6+ since this release.\n\n### New Features\n\n- Added `azure.core.utils.parse_connection_string` function to parse connection strings across SDKs, with common validation and support for case insensitive keys.\n- Supported adding custom policies #16519\n- Added `~azure.core.tracing.Link` that should be used while passing `Links` to `AbstractSpan`.\n- `AbstractSpan` constructor can now take in additional keyword only args.\n\n### Bug fixes\n\n- Make NetworkTraceLoggingPolicy show the auth token in plain text. #14191\n- Fixed RetryPolicy overriding default connection timeout with an extreme value #17481\n\n## 1.12.0 (2021-03-08)\n\nThis version will be the last version to officially support Python 3.5, future versions will require Python 2.7 or Python 3.6+.\n\n### Features\n\n- Added `azure.core.messaging.CloudEvent` model that follows the cloud event spec.\n- Added `azure.core.serialization.NULL` sentinel value\n- Improve `repr`s for `HttpRequest` and `HttpResponse`s #16972\n\n### Bug Fixes\n\n- Disable retry in stream downloading. (thanks to @jochen-ott-by @hoffmann for the contribution) #16723\n\n## 1.11.0 (2021-02-08)\n\n### Features\n\n- Added `CaseInsensitiveEnumMeta` class for case-insensitive enums. #16316\n- Add `raise_for_status` method onto `HttpResponse`. Calling `response.raise_for_status()` on a response with an error code\nwill raise an `HttpResponseError`. Calling it on a good response will do nothing #16399\n\n### Bug Fixes\n\n- Update conn.conn_kw rather than overriding it when setting block size. (thanks for @jiasli for the contribution) #16587\n\n## 1.10.0 (2021-01-11)\n\n### Features\n\n- Added `AzureSasCredential` and its respective policy. #15946\n\n## 1.9.0 (2020-11-09)\n\n### Features\n\n- Add a `continuation_token` attribute to the base `AzureError` exception, and set this value for errors raised\n during paged or long-running operations.\n\n### Bug Fixes\n\n- Set retry_interval to 1 second instead of 1000 seconds (thanks **vbarbaresi** for contributing) #14357\n\n\n## 1.8.2 (2020-10-05)\n\n### Bug Fixes\n\n- Fixed bug to allow polling in the case of parameterized endpoints with relative polling urls #14097\n\n\n## 1.8.1 (2020-09-08)\n\n### Bug fixes\n\n- SAS credential replicated \"/\" fix #13159\n\n## 1.8.0 (2020-08-10)\n\n### Features\n\n- Support params as list for exploding parameters #12410\n\n\n## 1.7.0 (2020-07-06)\n\n### Bug fixes\n\n- `AzureKeyCredentialPolicy` will now accept (and ignore) passed in kwargs #11963\n- Better error messages if passed endpoint is incorrect #12106\n- Do not JSON encore a string if content type is \"text\" #12137\n\n### Features\n\n- Added `http_logging_policy` property on the `Configuration` object, allowing users to individually\nset the http logging policy of the config #12218\n\n## 1.6.0 (2020-06-03)\n\n### Bug fixes\n\n- Fixed deadlocks in AsyncBearerTokenCredentialPolicy #11543\n- Fix AttributeException in StreamDownloadGenerator #11462\n\n### Features\n\n- Added support for changesets as part of multipart message support #10485\n- Add AsyncLROPoller in azure.core.polling #10801\n- Add get_continuation_token/from_continuation_token/polling_method methods in pollers (sync and async) #10801\n- HttpResponse and PipelineContext objects are now pickable #10801\n\n## 1.5.0 (2020-05-04)\n\n### Features\n\n- Support \"x-ms-retry-after-ms\" in response header #10743\n- `link` and `link_from_headers` now accepts attributes #10765\n\n### Bug fixes\n\n- Not retry if the status code is less than 400 #10778\n- \"x-ms-request-id\" is not considered safe header for logging #10967\n\n## 1.4.0 (2020-04-06)\n\n### Features\n\n- Support a default error type in map_error #9773\n- Added `AzureKeyCredential` and its respective policy. #10509\n- Added `azure.core.polling.base_polling` module with a \"Microsoft One API\" polling implementation #10090\n Also contains the async version in `azure.core.polling.async_base_polling`\n- Support kwarg `enforce_https` to disable HTTPS check on authentication #9821\n- Support additional kwargs in `HttpRequest.set_multipart_mixed` that will be passed into pipeline context.\n\n## 1.3.0 (2020-03-09)\n\n### Bug fixes\n\n- Appended RequestIdPolicy to the default pipeline #9841\n- Rewind the body position in async_retry #10117\n\n### Features\n\n- Add raw_request_hook support in custom_hook_policy #9958\n- Add timeout support in retry_policy #10011\n- Add OdataV4 error format auto-parsing in all exceptions ('error' attribute) #9738\n\n## 1.2.2 (2020-02-10)\n\n### Bug fixes\n\n- Fixed a bug that sends None as request_id #9545\n- Enable mypy for customers #9572\n- Handle TypeError in deep copy #9620\n- Fix text/plain content-type in decoder #9589\n\n## 1.2.1 (2020-01-14)\n\n### Bug fixes\n\n- Fixed a regression in 1.2.0 that was incompatible with azure-keyvault-* 4.0.0\n[#9462](https://github.com/Azure/azure-sdk-for-python/issues/9462)\n\n\n## 1.2.0 (2020-01-14)\n\n### Features\n\n- Add user_agent & sdk_moniker kwargs in UserAgentPolicy init #9355\n- Support OPTIONS HTTP verb #9322\n- Add tracing_attributes to tracing decorator #9297\n- Support auto_request_id in RequestIdPolicy #9163\n- Support fixed retry #6419\n- Support \"retry-after-ms\" in response header #9240\n\n### Bug fixes\n\n- Removed `__enter__` and `__exit__` from async context managers #9313\n\n## 1.1.1 (2019-12-03)\n\n### Bug fixes\n\n- Bearer token authorization requires HTTPS\n- Rewind the body position in retry #8307\n\n## 1.1.0 (2019-11-25)\n\n### Features\n\n- New RequestIdPolicy #8437\n- Enable logging policy in default pipeline #8053\n- Normalize transport timeout. #8000\n Now we have:\n * 'connection_timeout' - a single float in seconds for the connection timeout. Default 5min\n * 'read_timeout' - a single float in seconds for the read timeout. Default 5min\n\n### Bug fixes\n\n- RequestHistory: deepcopy fails if request contains a stream #7732\n- Retry: retry raises error if response does not have http_response #8629\n- Client kwargs are now passed to DistributedTracingPolicy correctly #8051\n- NetworkLoggingPolicy now logs correctly all requests in case of retry #8262\n\n## 1.0.0 (2019-10-29)\n\n### Features\n\n- Tracing: DistributedTracingPolicy now accepts kwargs network_span_namer to change network span name #7773\n- Tracing: Implementation of AbstractSpan can now use the mixin HttpSpanMixin to get HTTP span update automatically #7773\n- Tracing: AbstractSpan contract \"change_context\" introduced #7773\n- Introduce new policy HttpLoggingPolicy #7988\n\n### Bug fixes\n\n- Fix AsyncioRequestsTransport if input stream is an async generator #7743\n- Fix form-data with aiohttp transport #7749\n\n### Breaking changes\n\n- Tracing: AbstractSpan.set_current_span is longer supported. Use change_context instead. #7773\n- azure.core.pipeline.policies.ContentDecodePolicy.deserialize_from_text changed\n\n## 1.0.0b4 (2019-10-07)\n\n### Features\n\n- Tracing: network span context is available with the TRACING_CONTEXT in pipeline response #7252\n- Tracing: Span contract now has `kind`, `traceparent` and is a context manager #7252\n- SansIOHTTPPolicy methods can now be coroutines #7497\n- Add multipart/mixed support #7083:\n\n - HttpRequest now has a \"set_multipart_mixed\" method to set the parts of this request\n - HttpRequest now has a \"prepare_multipart_body\" method to build final body.\n - HttpResponse now has a \"parts\" method to return an iterator of parts\n - AsyncHttpResponse now has a \"parts\" methods to return an async iterator of parts\n - Note that multipart/mixed is a Python 3.x only feature\n\n### Bug fixes\n\n- Tracing: policy cannot fail the pipeline, even in the worst condition #7252\n- Tracing: policy pass correctly status message if exception #7252\n- Tracing: incorrect span if exception raised from decorated function #7133\n- Fixed urllib3 ConnectTimeoutError being raised by Requests during a socket timeout. Now this exception is caught and wrapped as a `ServiceRequestError` #7542\n\n### Breaking changes\n\n- Tracing: `azure.core.tracing.context` removed\n- Tracing: `azure.core.tracing.context.tracing_context.with_current_context` renamed to `azure.core.tracing.common.with_current_context` #7252\n- Tracing: `link` renamed `link_from_headers` and `link` takes now a string\n- Tracing: opencensus implementation has been moved to the package `azure-core-tracing-opencensus`\n- Some modules and classes that were importables from several different places have been removed:\n\n - `azure.core.HttpResponseError` is now only `azure.core.exceptions.HttpResponseError`\n - `azure.core.Configuration` is now only `azure.core.configuration.Configuration`\n - `azure.core.HttpRequest` is now only `azure.core.pipeline.transport.HttpRequest`\n - `azure.core.version` module has been removed. Use `azure.core.__version__` to get version number.\n - `azure.core.pipeline_client` has been removed. Import from `azure.core` instead.\n - `azure.core.pipeline_client_async` has been removed. Import from `azure.core` instead.\n - `azure.core.pipeline.base` has been removed. Import from `azure.core.pipeline` instead.\n - `azure.core.pipeline.base_async` has been removed. Import from `azure.core.pipeline` instead.\n - `azure.core.pipeline.policies.base` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.base_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.authentication` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.authentication_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.custom_hook` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.redirect` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.redirect_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.retry` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.retry_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.distributed_tracing` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.universal` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.tracing.abstract_span` has been removed. Import from `azure.core.tracing` instead.\n - `azure.core.pipeline.transport.base` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.base_async` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_basic` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_asyncio` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_trio` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.aiohttp` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.polling.poller` has been removed. Import from `azure.core.polling` instead.\n - `azure.core.polling.async_poller` has been removed. Import from `azure.core.polling` instead.\n\n## 1.0.0b3 (2019-09-09)\n\n### Bug fixes\n\n- Fix aiohttp auto-headers #6992\n- Add tracing to policies module init #6951\n\n## 1.0.0b2 (2019-08-05)\n\n### Breaking changes\n\n- Transport classes don't take `config` parameter anymore (use kwargs instead) #6372\n- `azure.core.paging` has been completely refactored #6420\n- HttpResponse.content_type attribute is now a string (was a list) #6490\n- For `StreamDownloadGenerator` subclasses, `response` is now an `HttpResponse`, and not a transport response like `aiohttp.ClientResponse` or `requests.Response`. The transport response is available in `internal_response` attribute #6490\n\n### Bug fixes\n\n- aiohttp is not required to import async pipelines classes #6496\n- `AsyncioRequestsTransport.sleep` is now a coroutine as expected #6490\n- `RequestsTransport` is not tight to `ProxyPolicy` implementation details anymore #6372\n- `AiohttpTransport` does not raise on unexpected kwargs #6355\n\n### Features\n\n- New paging base classes that support `continuation_token` and `by_page()` #6420\n- Proxy support for `AiohttpTransport` #6372\n\n## 1.0.0b1 (2019-06-26)\n\n- Preview 1 release", - "release_date": "2025-07-03T00:55:25", + "description": "Microsoft Azure Core Library for Python\n# Azure Core shared client library for Python\n\nAzure core provides shared exceptions and modules for Python SDK client libraries.\nThese libraries follow the [Azure SDK Design Guidelines for Python](https://azure.github.io/azure-sdk/python/guidelines/index.html) .\n\nIf you are a client library developer, please reference [client library developer reference](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/CLIENT_LIBRARY_DEVELOPER.md) for more information.\n\n[Source code](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/)\n| [Package (Pypi)][package]\n| [Package (Conda)](https://anaconda.org/microsoft/azure-core/)\n| [API reference documentation](https://learn.microsoft.com/python/api/overview/azure/core-readme)\n\n## Getting started\n\nTypically, you will not need to install azure core;\nit will be installed when you install one of the client libraries using it.\nIn case you want to install it explicitly (to implement your own client library, for example),\nyou can find it [here](https://pypi.org/project/azure-core/).\n\n## Key concepts\n\n### Azure Core Library Exceptions\n\n#### AzureError\n\nAzureError is the base exception for all errors.\n\n```python\nclass AzureError(Exception):\n def __init__(self, message, *args, **kwargs):\n self.inner_exception = kwargs.get(\"error\")\n self.exc_type, self.exc_value, self.exc_traceback = sys.exc_info()\n self.exc_type = self.exc_type.__name__ if self.exc_type else type(self.inner_exception)\n self.exc_msg = \"{}, {}: {}\".format(message, self.exc_type, self.exc_value) # type: ignore\n self.message = str(message)\n self.continuation_token = kwargs.get(\"continuation_token\")\n super(AzureError, self).__init__(self.message, *args)\n```\n\n*message* is any message (str) to be associated with the exception.\n\n*args* are any additional args to be included with exception.\n\n*kwargs* are keyword arguments to include with the exception. Use the keyword *error* to pass in an internal exception and *continuation_token* for a token reference to continue an incomplete operation.\n\n**The following exceptions inherit from AzureError:**\n\n#### ServiceRequestError\n\nAn error occurred while attempt to make a request to the service. No request was sent.\n\n#### ServiceResponseError\n\nThe request was sent, but the client failed to understand the response.\nThe connection may have timed out. These errors can be retried for idempotent or safe operations.\n\n#### HttpResponseError\n\nA request was made, and a non-success status code was received from the service.\n\n```python\nclass HttpResponseError(AzureError):\n def __init__(self, message=None, response=None, **kwargs):\n self.reason = None\n self.response = response\n if response:\n self.reason = response.reason\n self.status_code = response.status_code\n self.error = self._parse_odata_body(ODataV4Format, response) # type: Optional[ODataV4Format]\n if self.error:\n message = str(self.error)\n else:\n message = message or \"Operation returned an invalid status '{}'\".format(\n self.reason\n )\n\n super(HttpResponseError, self).__init__(message=message, **kwargs)\n```\n\n*message* is the HTTP response error message (optional)\n\n*response* is the HTTP response (optional).\n\n*kwargs* are keyword arguments to include with the exception.\n\n**The following exceptions inherit from HttpResponseError:**\n\n#### DecodeError\n\nAn error raised during response de-serialization.\n\n#### IncompleteReadError\n\nAn error raised if peer closes the connection before we have received the complete message body.\n\n#### ResourceExistsError\n\nAn error response with status code 4xx. This will not be raised directly by the Azure core pipeline.\n\n#### ResourceNotFoundError\n\nAn error response, typically triggered by a 412 response (for update) or 404 (for get/post).\n\n#### ResourceModifiedError\n\nAn error response with status code 4xx, typically 412 Conflict. This will not be raised directly by the Azure core pipeline.\n\n#### ResourceNotModifiedError\n\nAn error response with status code 304. This will not be raised directly by the Azure core pipeline.\n\n#### ClientAuthenticationError\n\nAn error response with status code 4xx. This will not be raised directly by the Azure core pipeline.\n\n#### TooManyRedirectsError\n\nAn error raised when the maximum number of redirect attempts is reached. The maximum amount of redirects can be configured in the RedirectPolicy.\n\n```python\nclass TooManyRedirectsError(HttpResponseError):\n def __init__(self, history, *args, **kwargs):\n self.history = history\n message = \"Reached maximum redirect attempts.\"\n super(TooManyRedirectsError, self).__init__(message, *args, **kwargs)\n```\n\n*history* is used to document the requests/responses that resulted in redirected requests.\n\n*args* are any additional args to be included with exception.\n\n*kwargs* are keyword arguments to include with the exception.\n\n#### StreamConsumedError\n\nAn error thrown if you try to access the stream of `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` once\nthe response stream has been consumed.\n\n#### StreamClosedError\n\nAn error thrown if you try to access the stream of the `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` once\nthe response stream has been closed.\n\n#### ResponseNotReadError\n\nAn error thrown if you try to access the `content` of `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` before\nreading in the response's bytes first.\n\n### Configurations\n\nWhen calling the methods, some properties can be configured by passing in as kwargs arguments.\n\n| Parameters | Description |\n| --- | --- |\n| headers | The HTTP Request headers. |\n| request_id | The request id to be added into header. |\n| user_agent | If specified, this will be added in front of the user agent string. |\n| logging_enable| Use to enable per operation. Defaults to `False`. |\n| logger | If specified, it will be used to log information. |\n| response_encoding | The encoding to use if known for this service (will disable auto-detection). |\n| raw_request_hook | Callback function. Will be invoked on request. |\n| raw_response_hook | Callback function. Will be invoked on response. |\n| network_span_namer | A callable to customize the span name. |\n| tracing_attributes | Attributes to set on all created spans. |\n| permit_redirects | Whether the client allows redirects. Defaults to `True`. |\n| redirect_max | The maximum allowed redirects. Defaults to `30`. |\n| retry_total | Total number of retries to allow. Takes precedence over other counts. Default value is `10`. |\n| retry_connect | How many connection-related errors to retry on. These are errors raised before the request is sent to the remote server, which we assume has not triggered the server to process the request. Default value is `3`. |\n| retry_read | How many times to retry on read errors. These errors are raised after the request was sent to the server, so the request may have side-effects. Default value is `3`. |\n| retry_status | How many times to retry on bad status codes. Default value is `3`. |\n| retry_backoff_factor | A backoff factor to apply between attempts after the second try (most errors are resolved immediately by a second try without a delay). Retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))` seconds. If the backoff_factor is 0.1, then the retry will sleep for [0.0s, 0.2s, 0.4s, ...] between retries. The default value is `0.8`. |\n| retry_backoff_max | The maximum back off time. Default value is `120` seconds (2 minutes). |\n| retry_mode | Fixed or exponential delay between attempts, default is `Exponential`. |\n| timeout | Timeout setting for the operation in seconds, default is `604800`s (7 days). |\n| connection_timeout | A single float in seconds for the connection timeout. Defaults to `300` seconds. |\n| read_timeout | A single float in seconds for the read timeout. Defaults to `300` seconds. |\n| connection_verify | SSL certificate verification. Enabled by default. Set to False to disable, alternatively can be set to the path to a CA_BUNDLE file or directory with certificates of trusted CAs. |\n| connection_cert | Client-side certificates. You can specify a local cert to use as client side certificate, as a single file (containing the private key and the certificate) or as a tuple of both files' paths. |\n| proxies | Dictionary mapping protocol or protocol and hostname to the URL of the proxy. |\n| cookies | Dict or CookieJar object to send with the `Request`. |\n| connection_data_block_size | The block size of data sent over the connection. Defaults to `4096` bytes. |\n\n### Async transport\n\nThe async transport is designed to be opt-in. [AioHttp](https://pypi.org/project/aiohttp/) is one of the supported implementations of async transport. It is not installed by default. You need to install it separately.\n\n### Shared modules\n\n#### MatchConditions\n\nMatchConditions is an enum to describe match conditions.\n\n```python\nclass MatchConditions(Enum):\n Unconditionally = 1 # Matches any condition\n IfNotModified = 2 # If the target object is not modified. Usually it maps to etag=\n IfModified = 3 # Only if the target object is modified. Usually it maps to etag!=\n IfPresent = 4 # If the target object exists. Usually it maps to etag='*'\n IfMissing = 5 # If the target object does not exist. Usually it maps to etag!='*'\n```\n\n#### CaseInsensitiveEnumMeta\n\nA metaclass to support case-insensitive enums.\n\n```python\nfrom enum import Enum\n\nfrom azure.core import CaseInsensitiveEnumMeta\n\nclass MyCustomEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta):\n FOO = 'foo'\n BAR = 'bar'\n```\n\n#### Null Sentinel Value\n\nA falsy sentinel object which is supposed to be used to specify attributes\nwith no data. This gets serialized to `null` on the wire.\n\n```python\nfrom azure.core.serialization import NULL\n\nassert bool(NULL) is False\n\nfoo = Foo(\n attr=NULL\n)\n```\n\n## Logging\n\nAzure libraries follow the guidance of Python's standard [logging](https://docs.python.org/3/library/logging.html) module. By following the Python documentation on logging, you should be able to configure logging for Azure libraries effectively.\n\nAzure library loggers use a dot-based separated syntax, where the first section is always `azure`, followed by the package name. For example, the Azure Core library uses logger names that start with `azure.core`.\n\nHere's an example of how to configure logging for Azure libraries:\n\n```python\nimport logging\nimport sys\n\n# Enable detailed console logs across Azure libraries\nazure_logger = logging.getLogger(\"azure\")\nazure_logger.setLevel(logging.DEBUG)\nazure_logger.addHandler(logging.StreamHandler(stream=sys.stdout))\n\n# Exclude detailed logs for network calls associated with getting Entra ID token.\nidentity_logger = logging.getLogger(\"azure.identity\")\nidentity_logger.setLevel(logging.ERROR)\n\n# Make sure regular (redacted) detailed azure.core logs are not shown, as we are about to\n# turn on non-redacted logs by passing 'logging_enable=True' to the client constructor \nlogger = logging.getLogger(\"azure.core.pipeline.policies.http_logging_policy\")\nlogger.setLevel(logging.ERROR)\n```\n\n## Contributing\n\nThis project welcomes contributions and suggestions. Most contributions require\nyou to agree to a Contributor License Agreement (CLA) declaring that you have\nthe right to, and actually do, grant us the rights to use your contribution.\nFor details, visit [https://cla.microsoft.com](https://cla.microsoft.com).\n\nWhen you submit a pull request, a CLA-bot will automatically determine whether\nyou need to provide a CLA and decorate the PR appropriately (e.g., label,\ncomment). Simply follow the instructions provided by the bot. You will only\nneed to do this once across all repos using our CLA.\n\nThis project has adopted the\n[Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).\nFor more information, see the\n[Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)\nor contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any\nadditional questions or comments.\n\n\n[package]: https://pypi.org/project/azure-core/\n\n\n# Release History\n\n## 1.35.1 (2025-09-11)\n\n### Bugs Fixed\n\n- Fixed an issue where the `retry_backoff_max` parameter in `RetryPolicy` and `AsyncRetryPolicy` constructors was being ignored, causing retry operations to use default maximum backoff values instead of the user-specified limits. #42444\n\n### Other Changes\n\n- `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` will now properly surface credential exceptions when handling claims challenges. Previously, exceptions from credential token requests were suppressed; now they are raised and chained with the original 401 `HttpResponseError` response for better debugging visibility. #42536\n\n## 1.35.0 (2025-07-02)\n\n### Features Added\n\n- Added a `start_time` keyword argument to the `start_span` and `start_as_current_span` methods in the `OpenTelemetryTracer` class. This allows users to specify a custom start time for created spans. #41106\n- Added a `context` keyword argument to the `start_span` and `start_as_current_span` methods in the `OpenTelemetryTracer` class. This allows users to specify a custom parent context for created spans. #41511\n- Added method `as_attribute_dict` to `azure.core.serialization` for backcompat migration purposes. Will return a generated model as a dictionary where the keys are in attribute syntax.\n- Added `is_generated_model` method to `azure.core.serialization`. Returns whether a given input is a model from one of our generated sdks. #41445\n- Added `attribute_list` method to `azure.core.serialization`. Returns all of the attributes of a given model from one of our generated sdks. #41571\n\n### Other Changes\n\n- A timeout error when using the `aiohttp` transport (the default for async SDKs) will now be raised as a `azure.core.exceptions.ServiceResponseTimeoutError`, a subtype of the previously raised `ServiceResponseError`.\n- When using with `aiohttp` 3.10 or later, a connection timeout error will now be raised as a `azure.core.exceptions.ServiceRequestTimeoutError`, which can be retried.\n- The default implementation of `on_challenge` in `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` will now cache the retrieved token. #41857\n\n## 1.34.0 (2025-05-01)\n\n### Features Added\n\n- Added a `set_span_error_status` method to the `OpenTelemetryTracer` class. This method allows users to set the status of a span to `ERROR` after it has been created. #40703\n\n### Other Changes\n\n- Python 3.8 is no longer supported. Please use Python version 3.9 or later.\n\n## 1.33.0 (2025-04-03)\n\n### Features Added\n\n- Added native OpenTelemetry tracing to Azure Core which enables users to use OpenTelemetry to trace Azure SDK operations without needing to install a plugin. #39563\n - To enable native OpenTelemetry tracing, users need to:\n 1. Have `opentelemetry-api` installed.\n 2. Ensure that `settings.tracing_implementation` is not set.\n 3. Ensure that `settings.tracing_enabled` is set to `True`.\n - If `setting.tracing_implementation` is set, the tracing plugin will be used instead of the native tracing.\n - If `settings.tracing_enabled` is set to `False`, tracing will be disabled.\n - The `OpenTelemetryTracer` class was added to the `azure.core.tracing.opentelemetry` module. This is a wrapper around the OpenTelemetry tracer that is used to create spans for Azure SDK operations.\n - Added a `get_tracer` method to the new `azure.core.instrumentation` module. This method returns an instance of the `OpenTelemetryTracer` class if OpenTelemetry is available.\n - A `TracingOptions` TypedDict class was added to define the options that SDK users can use to configure tracing per-operation. These options include the ability to enable or disable tracing and set additional attributes on spans.\n - Example usage: `client.method(tracing_options={\"enabled\": True, \"attributes\": {\"foo\": \"bar\"}})`\n - The `DistributedTracingPolicy` and `distributed_trace`/`distributed_trace_async` decorators now uses the OpenTelemetry tracer if it is available and native tracing is enabled.\n - SDK clients can define an `_instrumentation_config` class variable to configure the OpenTelemetry tracer used in method span creation. Possible configuration options are `library_name`, `library_version`, `schema_url`, and `attributes`.\n - `DistributedTracingPolicy` now accepts a `instrumentation_config` keyword argument to configure the OpenTelemetry tracer used in HTTP span creation.\n\n### Breaking Changes\n\n- Removed automatic tracing enablement for the OpenTelemetry plugin if `opentelemetry` was imported. To enable tracing with the plugin, please import `azure.core.settings.settings` and set `settings.tracing_implementation` to `\"opentelemetry\"`. #39563\n- In `DistributedTracingPolicy`, the default span name is now just the HTTP method (e.g., \"GET\", \"POST\") and no longer includes the URL path. This change was made to converge with the OpenTelemetry HTTP semantic conventions. The full URL is still included in the span attributes.\n- Renamed span attributes in `DistributedTracingPolicy`:\n - \"x-ms-client-request-id\" is now \"az.client_request_id\"\n - \"x-ms-request-id\" is now \"az.service_request_id\"\n\n### Bugs Fixed\n\n- Fixed an issue where the `traceparent` header was not being set correctly in the `DistributedTracingPolicy`. The `traceparent` header will now set based on the context of the HTTP client span. #40074\n\n### Other Changes\n\n- Added `opentelemetry-api` as an optional dependency for tracing. This can be installed with `pip install azure-core[tracing]`. #39563\n\n## 1.32.0 (2024-10-31)\n\n### Features Added\n\n- Added a default implementation to handle token challenges in `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy`.\n\n### Bugs Fixed\n\n- Fixed an issue where the `tracing_attributes` keyword argument wasn't being handled at the request/method level. #38164\n\n### Other Changes\n\n- Log \"x-vss-e2eid\" and \"x-msedge-ref\" headers in `HttpLoggingPolicy`.\n\n## 1.31.0 (2024-09-12)\n\n### Features Added\n\n- Added azure.core.AzureClouds enum to represent the different Azure clouds.\n- Added two new credential protocol classes, `SupportsTokenInfo` and `AsyncSupportsTokenInfo`, to offer more extensibility in supporting various token acquisition scenarios. #36565\n - Each new protocol class defines a `get_token_info` method that returns an `AccessTokenInfo` object.\n- Added a new `TokenRequestOptions` class, which is a `TypedDict` with optional parameters, that can be used to define options for token requests through the `get_token_info` method. #36565\n- Added a new `AccessTokenInfo` class, which is returned by `get_token_info` implementations. This class contains the token, its expiration time, and optional additional information like when a token should be refreshed. #36565\n- `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` now first check if a credential has the `get_token_info` method defined. If so, the `get_token_info` method is used to acquire a token. Otherwise, the `get_token` method is used. #36565\n - These policies now also check the `refresh_on` attribute when determining if a new token request should be made.\n\n### Other Changes\n\n- The Azure Core OpenTelemetry tracing plugin will now be the preferred tracing plugin over the OpenCensus plugin. If both plugins are installed and `opentelemetry` is imported, then OpenTelemetry will be used to trace Azure SDK operations. #35050\n\n## 1.30.2 (2024-06-06)\n\n### Features Added\n\n- Tracing: `DistributedTracingPolicy` will now set an attribute, `http.request.resend_count`, on HTTP spans for resent requests to indicate the resend attempt number. #35069\n\n### Bugs Fixed\n\n- Raise correct exception if transport is used while already closed #35559\n\n### Other Changes\n\n- HTTP tracing spans will now include an `error.type` attribute if an error status code is returned. #34619\n- Minimum required Python version is now 3.8\n\n## 1.30.1 (2024-02-29)\n\n### Other Changes\n\n- Accept float for `retry_after` header. #34203\n\n## 1.30.0 (2024-02-01)\n\n### Features Added\n\n- Support tuple input for file values to `azure.core.rest.HttpRequest` #33948\n- Support tuple input to `files` with duplicate field names `azure.core.rest.HttpRequest` #34021\n\n## 1.29.7 (2024-01-18)\n\n### Other Changes\n\n- Removed dependency on `anyio`. #33282\n\n## 1.29.6 (2023-12-14)\n\n### Bugs Fixed\n\n- Adjusted `AsyncBearerTokenCredentialPolicy` to work properly with `trio` concurrency mechanisms. ([#33307](https://github.com/Azure/azure-sdk-for-python/pull/33307))\n\n### Other Changes\n\n- Added dependency on `anyio` >=3.0,<5.0\n- Bumped minimum dependency on `requests` to 2.21.0.\n\n## 1.29.5 (2023-10-19)\n\n### Bugs Fixed\n\n- Fixed an issue with `multipart/form-data` in the async transport where `data` was not getting encoded into the request body. #32473\n\n### Other Changes\n\n- Use ssl context from aiohttp by default.\n\n## 1.29.4 (2023-09-07)\n\n### Bugs Fixed\n\n- Fixed the issue that some urls trigger an infinite loop. #31346\n- Fixed issue where IndexError was raised if multipart responses did not match the number of requests. #31471\n- Fixed issue unbound variable exception if dict is invalid in CloudEvent.from_dict. #31835\n- Fixed issue asyncBearerTokenCredentialPolicy is not backward compatible with SansIOHTTPPolicy. #31836\n- Fixed issue mypy complains with new version of azure-core. #31564\n\n## 1.29.3 (2023-08-22)\n\n### Bugs Fixed\n\n- Typing fix: `message` cannot be `None` in `AzureError`. #31564\n\n## 1.29.2 (2023-08-14)\n\n### Bugs Fixed\n\n- Added a default implementation for `AsyncTokenCredential.__aexit__()` #31573\n\n### Other Changes\n\n- Bumped `typing-extensions` version to 4.6.0.\n\n## 1.29.1 (2023-08-09)\n\n### Bugs Fixed\n\n- Not pass `enabled_cae` unless it is explicitly enabled.\n\n## 1.29.0 (2023-08-03)\n\n### Features Added\n\n- A keyword argument `enable_cae` was added to the `get_token` method of the `TokenCredential` protocol. #31012\n- `BearerTokenCredentialPolicy` and `AsyncBearerTokenCredentialPolicy` now accept `enable_cae` keyword arguments in their constructors. This is used in determining if [Continuous Access Evaluation (CAE)](https://learn.microsoft.com/azure/active-directory/conditional-access/concept-continuous-access-evaluation) should be enabled for each `get_token` request. #31012\n\n## 1.28.0 (2023-07-06)\n\n### Features Added\n\n- Added header name parameter to `RequestIdPolicy`. #30772\n- Added `SensitiveHeaderCleanupPolicy` that cleans up sensitive headers if a redirect happens and the new destination is in another domain. #28349\n\n### Other Changes\n\n- Catch aiohttp errors and translate them into azure-core errors.\n\n## 1.27.1 (2023-06-13)\n\n### Bugs Fixed\n\n- Fix url building for some complex query parameters scenarios #30707\n\n## 1.27.0 (2023-06-01)\n\n### Features Added\n\n- Added support to use sync credentials in `AsyncBearerTokenCredentialPolicy`. #30381\n- Added \"prefix\" parameter to AzureKeyCredentialPolicy #29901\n\n### Bugs Fixed\n\n- Improve error message when providing the wrong credential type for AzureKeyCredential #30380\n\n## 1.26.4 (2023-04-06)\n\n### Features Added\n\n- Updated settings to include OpenTelemetry as a tracer provider. #29095\n\n### Other Changes\n\n- Improved typing\n\n## 1.26.3 (2023-02-02)\n\n### Bugs Fixed\n\n- Fixed deflate decompression for aiohttp #28483\n\n## 1.26.2 (2023-01-05)\n\n### Bugs Fixed\n\n- Fix 'ClientSession' object has no attribute 'auto_decompress' (thanks to @mghextreme for the contribution)\n\n### Other Changes\n\n- Add \"x-ms-error-code\" as secure header to log\n- Rename \"DEFAULT_HEADERS_WHITELIST\" to \"DEFAULT_HEADERS_ALLOWLIST\". Added a backward compatible alias.\n\n## 1.26.1 (2022-11-03)\n\n### Other Changes\n\n- Added example of RequestsTransport with custom session. (thanks to @inirudebwoy for the contribution) #26768\n- Added Python 3.11 support.\n\n## 1.26.0 (2022-10-06)\n\n### Other Changes\n\n- LRO polling will not wait anymore before doing the first status check #26376\n- Added extra dependency for [aio]. pip install azure-core[aio] installs aiohttp too.\n\n## 1.25.1 (2022-09-01)\n\n### Bugs Fixed\n\n- Added @runtime_checkable to `TokenCredential` protocol definitions #25187\n\n## 1.25.0 (2022-08-04)\n\nAzure-core is supported on Python 3.7 or later. For more details, please read our page on [Azure SDK for Python version support policy](https://github.com/Azure/azure-sdk-for-python/wiki/Azure-SDKs-Python-version-support-policy).\n\n### Features Added\n\n- Added `CaseInsensitiveDict` implementation in `azure.core.utils` removing dependency on `requests` and `aiohttp`\n\n## 1.24.2 (2022-06-30)\n\n### Bugs Fixed\n\n- Fixed the bug that azure-core could not be imported under Python 3.11.0b3 #24928\n- `ContentDecodePolicy` can now correctly deserialize more JSON bodies with different mime types #22410\n\n## 1.24.1 (2022-06-01)\n\n### Bugs Fixed\n\n- Declare method level span as INTERNAL by default #24492\n- Fixed type hints for `azure.core.paging.ItemPaged` #24548\n\n## 1.24.0 (2022-05-06)\n\n### Features Added\n\n- Add `SerializationError` and `DeserializationError` in `azure.core.exceptions` for errors raised during serialization / deserialization #24312\n\n## 1.23.1 (2022-03-31)\n\n### Bugs Fixed\n\n- Allow stream inputs to the `content` kwarg of `azure.core.rest.HttpRequest` from objects with a `read` method #23578\n\n## 1.23.0 (2022-03-03)\n\n### Features Added\n\n- Improve intellisense type hinting for service client methods. #22891\n\n- Add a case insensitive dict `case_insensitive_dict` in `azure.core.utils`. #23206\n\n### Bugs Fixed\n\n- Use \"\\n\" rather than \"/n\" for new line in log. #23261\n\n### Other Changes\n\n- Log \"WWW-Authenticate\" header in `HttpLoggingPolicy` #22990\n- Added dependency on `typing-extensions` >= 4.0.1\n\n## 1.22.1 (2022-02-09)\n\n### Bugs Fixed\n\n- Limiting `final-state-via` scope to POST until consuming SDKs has been fixed to use this option properly on PUT. #22989\n\n## 1.22.0 (2022-02-03)\n_[**This version is deprecated.**]_\n\n### Features Added\n\n- Add support for `final-state-via` LRO option in core. #22713\n\n### Bugs Fixed\n\n- Add response body to string representation of `HttpResponseError` if we're not able to parse out information #22302\n- Raise `AttributeError` when calling azure.core.pipeline.transport.\\_\\_bases__ #22469\n\n### Other Changes\n\n- Python 2.7 is no longer supported. Please use Python version 3.6 or later.\n\n## 1.21.1 (2021-12-06)\n\n### Other Changes\n\n- Revert change in str method #22023\n\n## 1.21.0 (2021-12-02)\n\n### Breaking Changes\n\n- Sync stream downloading now raises `azure.core.exceptions.DecodeError` rather than `requests.exceptions.ContentDecodingError`\n\n### Bugs Fixed\n\n- Add response body to string representation of `HttpResponseError` if we're not able to parse out information #21800\n\n## 1.20.1 (2021-11-08)\n\n### Bugs Fixed\n\n- Correctly set response's content to decompressed body when users are using aiohttp transport with decompression headers #21620\n\n## 1.20.0 (2021-11-04)\n\n### Features Added\n\n- GA `send_request` onto the `azure.core.PipelineClient` and `azure.core.AsyncPipelineClient`. This method takes in\nrequests and sends them through our pipelines.\n- GA `azure.core.rest`. `azure.core.rest` is our new public simple HTTP library in `azure.core` that users will use to create requests, and consume responses.\n- GA errors `StreamConsumedError`, `StreamClosedError`, and `ResponseNotReadError` to `azure.core.exceptions`. These errors\nare thrown if you mishandle streamed responses from the `azure.core.rest` module\n- add kwargs to the methods for `iter_raw` and `iter_bytes` #21529\n- no longer raise JSON errors if users pass in file descriptors of JSON to the `json` kwarg in `HttpRequest` #21504\n- Added new error type `IncompleteReadError` which is raised if peer closes the connection before we have received the complete message body.\n\n### Breaking Changes\n\n- SansIOHTTPPolicy.on_exception returns None instead of bool.\n\n### Bugs Fixed\n\n- The `Content-Length` header in a http response is strictly checked against the actual number of bytes in the body,\n rather than silently truncating data in case the underlying tcp connection is closed prematurely.\n (thanks to @jochen-ott-by for the contribution) #20412\n- UnboundLocalError when SansIOHTTPPolicy handles an exception #15222\n- Add default content type header of `text/plain` and content length header for users who pass unicode strings to the `content` kwarg of `HttpRequest` in 2.7 #21550\n\n## 1.19.1 (2021-11-01)\n\n### Bugs Fixed\n\n- respect text encoding specified in argument (thanks to @ryohji for the contribution) #20796\n- Fix \"coroutine x.read() was never awaited\" warning from `ContentDecodePolicy` #21318\n- fix type check for `data` input to `azure.core.rest` for python 2.7 users #21341\n- use `charset_normalizer` if `chardet` is not installed to migrate aiohttp 3.8.0 changes.\n\n### Other Changes\n\n- Refactor AzureJSONEncoder (thanks to @Codejune for the contribution) #21028\n\n## 1.19.0 (2021-09-30)\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse` are now abstract base classes. They should not be initialized directly, instead\nyour transport responses should inherit from them and implement them.\n- The properties of the `azure.core.rest` responses are now all read-only\n\n- HttpLoggingPolicy integrates logs into one record #19925\n\n## 1.18.0 (2021-09-02)\n\n### Features Added\n\n- `azure.core.serialization.AzureJSONEncoder` (introduced in 1.17.0) serializes `datetime.datetime` objects in ISO 8601 format, conforming to RFC 3339's specification. #20190\n- We now use `azure.core.serialization.AzureJSONEncoder` to serialize `json` input to `azure.core.rest.HttpRequest`.\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- The `text` property on `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse` has changed to a method, which also takes\nan `encoding` parameter.\n- Removed `iter_text` and `iter_lines` from `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse`\n\n### Bugs Fixed\n\n- The behaviour of the headers returned in `azure.core.rest` responses now aligns across sync and async. Items can now be checked case-insensitively and without raising an error for format.\n\n## 1.17.0 (2021-08-05)\n\n### Features Added\n\n- Cut hard dependency on requests library\n- Added a `from_json` method which now accepts storage QueueMessage, eventhub's EventData or ServiceBusMessage or simply json bytes to return a `CloudEvent`\n\n### Fixed\n\n- Not override \"x-ms-client-request-id\" if it already exists in the header. #17757\n\n### Breaking Changes in the Provisional `azure.core.rest` package\n\n- `azure.core.rest` will not try to guess the `charset` anymore if it was impossible to extract it from `HttpResponse` analysis. This removes our dependency on `charset`.\n\n## 1.16.0 (2021-07-01)\n\n### Features Added\n\n- Add new ***provisional*** methods `send_request` onto the `azure.core.PipelineClient` and `azure.core.AsyncPipelineClient`. This method takes in\nrequests and sends them through our pipelines.\n- Add new ***provisional*** module `azure.core.rest`. `azure.core.rest` is our new public simple HTTP library in `azure.core` that users will use to create requests, and consume responses.\n- Add new ***provisional*** errors `StreamConsumedError`, `StreamClosedError`, and `ResponseNotReadError` to `azure.core.exceptions`. These errors\nare thrown if you mishandle streamed responses from the provisional `azure.core.rest` module\n\n### Fixed\n\n- Improved error message in the `from_dict` method of `CloudEvent` when a wrong schema is sent.\n\n## 1.15.0 (2021-06-04)\n\n### New Features\n\n- Added `BearerTokenCredentialPolicy.on_challenge` and `.authorize_request` to allow subclasses to optionally handle authentication challenges\n\n### Bug Fixes\n\n- Retry policies don't sleep after operations time out\n- The `from_dict` methhod in the `CloudEvent` can now convert a datetime string to datetime object when microsecond exceeds the python limitation\n\n## 1.14.0 (2021-05-13)\n\n### New Features\n\n- Added `azure.core.credentials.AzureNamedKeyCredential` credential #17548.\n- Added `decompress` parameter for `stream_download` method. If it is set to `False`, will not do decompression upon the stream. #17920\n\n## 1.13.0 (2021-04-02)\n\nAzure core requires Python 2.7 or Python 3.6+ since this release.\n\n### New Features\n\n- Added `azure.core.utils.parse_connection_string` function to parse connection strings across SDKs, with common validation and support for case insensitive keys.\n- Supported adding custom policies #16519\n- Added `~azure.core.tracing.Link` that should be used while passing `Links` to `AbstractSpan`.\n- `AbstractSpan` constructor can now take in additional keyword only args.\n\n### Bug fixes\n\n- Make NetworkTraceLoggingPolicy show the auth token in plain text. #14191\n- Fixed RetryPolicy overriding default connection timeout with an extreme value #17481\n\n## 1.12.0 (2021-03-08)\n\nThis version will be the last version to officially support Python 3.5, future versions will require Python 2.7 or Python 3.6+.\n\n### Features\n\n- Added `azure.core.messaging.CloudEvent` model that follows the cloud event spec.\n- Added `azure.core.serialization.NULL` sentinel value\n- Improve `repr`s for `HttpRequest` and `HttpResponse`s #16972\n\n### Bug Fixes\n\n- Disable retry in stream downloading. (thanks to @jochen-ott-by @hoffmann for the contribution) #16723\n\n## 1.11.0 (2021-02-08)\n\n### Features\n\n- Added `CaseInsensitiveEnumMeta` class for case-insensitive enums. #16316\n- Add `raise_for_status` method onto `HttpResponse`. Calling `response.raise_for_status()` on a response with an error code\nwill raise an `HttpResponseError`. Calling it on a good response will do nothing #16399\n\n### Bug Fixes\n\n- Update conn.conn_kw rather than overriding it when setting block size. (thanks for @jiasli for the contribution) #16587\n\n## 1.10.0 (2021-01-11)\n\n### Features\n\n- Added `AzureSasCredential` and its respective policy. #15946\n\n## 1.9.0 (2020-11-09)\n\n### Features\n\n- Add a `continuation_token` attribute to the base `AzureError` exception, and set this value for errors raised\n during paged or long-running operations.\n\n### Bug Fixes\n\n- Set retry_interval to 1 second instead of 1000 seconds (thanks **vbarbaresi** for contributing) #14357\n\n\n## 1.8.2 (2020-10-05)\n\n### Bug Fixes\n\n- Fixed bug to allow polling in the case of parameterized endpoints with relative polling urls #14097\n\n\n## 1.8.1 (2020-09-08)\n\n### Bug fixes\n\n- SAS credential replicated \"/\" fix #13159\n\n## 1.8.0 (2020-08-10)\n\n### Features\n\n- Support params as list for exploding parameters #12410\n\n\n## 1.7.0 (2020-07-06)\n\n### Bug fixes\n\n- `AzureKeyCredentialPolicy` will now accept (and ignore) passed in kwargs #11963\n- Better error messages if passed endpoint is incorrect #12106\n- Do not JSON encore a string if content type is \"text\" #12137\n\n### Features\n\n- Added `http_logging_policy` property on the `Configuration` object, allowing users to individually\nset the http logging policy of the config #12218\n\n## 1.6.0 (2020-06-03)\n\n### Bug fixes\n\n- Fixed deadlocks in AsyncBearerTokenCredentialPolicy #11543\n- Fix AttributeException in StreamDownloadGenerator #11462\n\n### Features\n\n- Added support for changesets as part of multipart message support #10485\n- Add AsyncLROPoller in azure.core.polling #10801\n- Add get_continuation_token/from_continuation_token/polling_method methods in pollers (sync and async) #10801\n- HttpResponse and PipelineContext objects are now pickable #10801\n\n## 1.5.0 (2020-05-04)\n\n### Features\n\n- Support \"x-ms-retry-after-ms\" in response header #10743\n- `link` and `link_from_headers` now accepts attributes #10765\n\n### Bug fixes\n\n- Not retry if the status code is less than 400 #10778\n- \"x-ms-request-id\" is not considered safe header for logging #10967\n\n## 1.4.0 (2020-04-06)\n\n### Features\n\n- Support a default error type in map_error #9773\n- Added `AzureKeyCredential` and its respective policy. #10509\n- Added `azure.core.polling.base_polling` module with a \"Microsoft One API\" polling implementation #10090\n Also contains the async version in `azure.core.polling.async_base_polling`\n- Support kwarg `enforce_https` to disable HTTPS check on authentication #9821\n- Support additional kwargs in `HttpRequest.set_multipart_mixed` that will be passed into pipeline context.\n\n## 1.3.0 (2020-03-09)\n\n### Bug fixes\n\n- Appended RequestIdPolicy to the default pipeline #9841\n- Rewind the body position in async_retry #10117\n\n### Features\n\n- Add raw_request_hook support in custom_hook_policy #9958\n- Add timeout support in retry_policy #10011\n- Add OdataV4 error format auto-parsing in all exceptions ('error' attribute) #9738\n\n## 1.2.2 (2020-02-10)\n\n### Bug fixes\n\n- Fixed a bug that sends None as request_id #9545\n- Enable mypy for customers #9572\n- Handle TypeError in deep copy #9620\n- Fix text/plain content-type in decoder #9589\n\n## 1.2.1 (2020-01-14)\n\n### Bug fixes\n\n- Fixed a regression in 1.2.0 that was incompatible with azure-keyvault-* 4.0.0\n[#9462](https://github.com/Azure/azure-sdk-for-python/issues/9462)\n\n\n## 1.2.0 (2020-01-14)\n\n### Features\n\n- Add user_agent & sdk_moniker kwargs in UserAgentPolicy init #9355\n- Support OPTIONS HTTP verb #9322\n- Add tracing_attributes to tracing decorator #9297\n- Support auto_request_id in RequestIdPolicy #9163\n- Support fixed retry #6419\n- Support \"retry-after-ms\" in response header #9240\n\n### Bug fixes\n\n- Removed `__enter__` and `__exit__` from async context managers #9313\n\n## 1.1.1 (2019-12-03)\n\n### Bug fixes\n\n- Bearer token authorization requires HTTPS\n- Rewind the body position in retry #8307\n\n## 1.1.0 (2019-11-25)\n\n### Features\n\n- New RequestIdPolicy #8437\n- Enable logging policy in default pipeline #8053\n- Normalize transport timeout. #8000\n Now we have:\n * 'connection_timeout' - a single float in seconds for the connection timeout. Default 5min\n * 'read_timeout' - a single float in seconds for the read timeout. Default 5min\n\n### Bug fixes\n\n- RequestHistory: deepcopy fails if request contains a stream #7732\n- Retry: retry raises error if response does not have http_response #8629\n- Client kwargs are now passed to DistributedTracingPolicy correctly #8051\n- NetworkLoggingPolicy now logs correctly all requests in case of retry #8262\n\n## 1.0.0 (2019-10-29)\n\n### Features\n\n- Tracing: DistributedTracingPolicy now accepts kwargs network_span_namer to change network span name #7773\n- Tracing: Implementation of AbstractSpan can now use the mixin HttpSpanMixin to get HTTP span update automatically #7773\n- Tracing: AbstractSpan contract \"change_context\" introduced #7773\n- Introduce new policy HttpLoggingPolicy #7988\n\n### Bug fixes\n\n- Fix AsyncioRequestsTransport if input stream is an async generator #7743\n- Fix form-data with aiohttp transport #7749\n\n### Breaking changes\n\n- Tracing: AbstractSpan.set_current_span is longer supported. Use change_context instead. #7773\n- azure.core.pipeline.policies.ContentDecodePolicy.deserialize_from_text changed\n\n## 1.0.0b4 (2019-10-07)\n\n### Features\n\n- Tracing: network span context is available with the TRACING_CONTEXT in pipeline response #7252\n- Tracing: Span contract now has `kind`, `traceparent` and is a context manager #7252\n- SansIOHTTPPolicy methods can now be coroutines #7497\n- Add multipart/mixed support #7083:\n\n - HttpRequest now has a \"set_multipart_mixed\" method to set the parts of this request\n - HttpRequest now has a \"prepare_multipart_body\" method to build final body.\n - HttpResponse now has a \"parts\" method to return an iterator of parts\n - AsyncHttpResponse now has a \"parts\" methods to return an async iterator of parts\n - Note that multipart/mixed is a Python 3.x only feature\n\n### Bug fixes\n\n- Tracing: policy cannot fail the pipeline, even in the worst condition #7252\n- Tracing: policy pass correctly status message if exception #7252\n- Tracing: incorrect span if exception raised from decorated function #7133\n- Fixed urllib3 ConnectTimeoutError being raised by Requests during a socket timeout. Now this exception is caught and wrapped as a `ServiceRequestError` #7542\n\n### Breaking changes\n\n- Tracing: `azure.core.tracing.context` removed\n- Tracing: `azure.core.tracing.context.tracing_context.with_current_context` renamed to `azure.core.tracing.common.with_current_context` #7252\n- Tracing: `link` renamed `link_from_headers` and `link` takes now a string\n- Tracing: opencensus implementation has been moved to the package `azure-core-tracing-opencensus`\n- Some modules and classes that were importables from several different places have been removed:\n\n - `azure.core.HttpResponseError` is now only `azure.core.exceptions.HttpResponseError`\n - `azure.core.Configuration` is now only `azure.core.configuration.Configuration`\n - `azure.core.HttpRequest` is now only `azure.core.pipeline.transport.HttpRequest`\n - `azure.core.version` module has been removed. Use `azure.core.__version__` to get version number.\n - `azure.core.pipeline_client` has been removed. Import from `azure.core` instead.\n - `azure.core.pipeline_client_async` has been removed. Import from `azure.core` instead.\n - `azure.core.pipeline.base` has been removed. Import from `azure.core.pipeline` instead.\n - `azure.core.pipeline.base_async` has been removed. Import from `azure.core.pipeline` instead.\n - `azure.core.pipeline.policies.base` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.base_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.authentication` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.authentication_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.custom_hook` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.redirect` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.redirect_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.retry` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.retry_async` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.distributed_tracing` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.pipeline.policies.universal` has been removed. Import from `azure.core.pipeline.policies` instead.\n - `azure.core.tracing.abstract_span` has been removed. Import from `azure.core.tracing` instead.\n - `azure.core.pipeline.transport.base` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.base_async` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_basic` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_asyncio` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.requests_trio` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.pipeline.transport.aiohttp` has been removed. Import from `azure.core.pipeline.transport` instead.\n - `azure.core.polling.poller` has been removed. Import from `azure.core.polling` instead.\n - `azure.core.polling.async_poller` has been removed. Import from `azure.core.polling` instead.\n\n## 1.0.0b3 (2019-09-09)\n\n### Bug fixes\n\n- Fix aiohttp auto-headers #6992\n- Add tracing to policies module init #6951\n\n## 1.0.0b2 (2019-08-05)\n\n### Breaking changes\n\n- Transport classes don't take `config` parameter anymore (use kwargs instead) #6372\n- `azure.core.paging` has been completely refactored #6420\n- HttpResponse.content_type attribute is now a string (was a list) #6490\n- For `StreamDownloadGenerator` subclasses, `response` is now an `HttpResponse`, and not a transport response like `aiohttp.ClientResponse` or `requests.Response`. The transport response is available in `internal_response` attribute #6490\n\n### Bug fixes\n\n- aiohttp is not required to import async pipelines classes #6496\n- `AsyncioRequestsTransport.sleep` is now a coroutine as expected #6490\n- `RequestsTransport` is not tight to `ProxyPolicy` implementation details anymore #6372\n- `AiohttpTransport` does not raise on unexpected kwargs #6355\n\n### Features\n\n- New paging base classes that support `continuation_token` and `by_page()` #6420\n- Proxy support for `AiohttpTransport` #6372\n\n## 1.0.0b1 (2019-06-26)\n\n- Preview 1 release", + "release_date": "2025-09-11T22:58:06", "parties": [ { "type": "person", @@ -155,11 +155,11 @@ "Programming Language :: Python :: 3.9" ], "homepage_url": "https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/core/azure-core", - "download_url": "https://files.pythonhosted.org/packages/d4/78/bf94897361fdd650850f0f2e405b2293e2f12808239046232bdedf554301/azure_core-1.35.0-py3-none-any.whl", - "size": 210708, + "download_url": "https://files.pythonhosted.org/packages/27/52/805980aa1ba18282077c484dba634ef0ede1e84eec8be9c92b2e162d0ed6/azure_core-1.35.1-py3-none-any.whl", + "size": 211800, "sha1": null, - "md5": "a5bb28aab86f7accdd9c7c36533d6a2d", - "sha256": "8db78c72868a58f3de8991eb4d22c4d368fae226dac1002998d6c50437e7dad1", + "md5": "ece20e5c0b954f1f10defd30a0cc86a0", + "sha256": "12da0c9e08e48e198f9158b56ddbe33b421477e1dc98c2e1c8f9e254d92c468b", "sha512": null, "bug_tracking_url": null, "code_view_url": null, @@ -179,9 +179,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/azure-core/1.35.0/json", + "api_data_url": "https://pypi.org/pypi/azure-core/1.35.1/json", "datasource_id": null, - "purl": "pkg:pypi/azure-core@1.35.0" + "purl": "pkg:pypi/azure-core@1.35.1" }, { "type": "pypi", @@ -255,12 +255,12 @@ "type": "pypi", "namespace": null, "name": "azure-storage-blob", - "version": "12.25.1", + "version": "12.26.0", "qualifiers": {}, "subpath": null, "primary_language": "Python", "description": "Microsoft Azure Blob Storage Client Library for Python\n# Azure Storage Blobs client library for Python\nAzure Blob storage is Microsoft's object storage solution for the cloud. Blob storage is optimized for storing massive amounts of unstructured data, such as text or binary data.\n\nBlob storage is ideal for:\n\n* Serving images or documents directly to a browser\n* Storing files for distributed access\n* Streaming video and audio\n* Storing data for backup and restore, disaster recovery, and archiving\n* Storing data for analysis by an on-premises or Azure-hosted service\n\n[Source code](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/azure/storage/blob)\n| [Package (PyPI)](https://pypi.org/project/azure-storage-blob/)\n| [Package (Conda)](https://anaconda.org/microsoft/azure-storage/)\n| [API reference documentation](https://aka.ms/azsdk-python-storage-blob-ref)\n| [Product documentation](https://learn.microsoft.com/azure/storage/)\n| [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples)\n\n\n## Getting started\n\n### Prerequisites\n* Python 3.8 or later is required to use this package. For more details, please read our page on [Azure SDK for Python version support policy](https://github.com/Azure/azure-sdk-for-python/wiki/Azure-SDKs-Python-version-support-policy).\n* You must have an [Azure subscription](https://azure.microsoft.com/free/) and an\n[Azure storage account](https://learn.microsoft.com/azure/storage/common/storage-account-overview) to use this package.\n\n### Install the package\nInstall the Azure Storage Blobs client library for Python with [pip](https://pypi.org/project/pip/):\n\n```bash\npip install azure-storage-blob\n```\n\n### Create a storage account\nIf you wish to create a new storage account, you can use the\n[Azure Portal](https://learn.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-portal),\n[Azure PowerShell](https://learn.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-powershell),\nor [Azure CLI](https://learn.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-cli):\n\n```bash\n# Create a new resource group to hold the storage account -\n# if using an existing resource group, skip this step\naz group create --name my-resource-group --location westus2\n\n# Create the storage account\naz storage account create -n my-storage-account-name -g my-resource-group\n```\n\n### Create the client\nThe Azure Storage Blobs client library for Python allows you to interact with three types of resources: the storage\naccount itself, blob storage containers, and blobs. Interaction with these resources starts with an instance of a\n[client](#clients). To create a client object, you will need the storage account's blob service account URL and a\ncredential that allows you to access the storage account:\n\n```python\nfrom azure.storage.blob import BlobServiceClient\n\nservice = BlobServiceClient(account_url=\"https://.blob.core.windows.net/\", credential=credential)\n```\n\n#### Looking up the account URL\nYou can find the storage account's blob service URL using the\n[Azure Portal](https://learn.microsoft.com/azure/storage/common/storage-account-overview#storage-account-endpoints),\n[Azure PowerShell](https://learn.microsoft.com/powershell/module/az.storage/get-azstorageaccount),\nor [Azure CLI](https://learn.microsoft.com/cli/azure/storage/account?view=azure-cli-latest#az-storage-account-show):\n\n```bash\n# Get the blob service account url for the storage account\naz storage account show -n my-storage-account-name -g my-resource-group --query \"primaryEndpoints.blob\"\n```\n\n#### Types of credentials\nThe `credential` parameter may be provided in a number of different forms, depending on the type of\n[authorization](https://learn.microsoft.com/azure/storage/common/storage-auth) you wish to use:\n1. To use an [Azure Active Directory (AAD) token credential](https://learn.microsoft.com/azure/storage/common/storage-auth-aad),\n provide an instance of the desired credential type obtained from the\n [azure-identity](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#credentials) library.\n For example, [DefaultAzureCredential](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#defaultazurecredential)\n can be used to authenticate the client.\n\n This requires some initial setup:\n * [Install azure-identity](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#install-the-package)\n * [Register a new AAD application](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) and give permissions to access Azure Storage\n * [Grant access](https://learn.microsoft.com/azure/storage/common/storage-auth-aad-rbac-portal) to Azure Blob data with RBAC in the Azure Portal\n * Set the values of the client ID, tenant ID, and client secret of the AAD application as environment variables:\n AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET\n\n Use the returned token credential to authenticate the client:\n ```python\n from azure.identity import DefaultAzureCredential\n from azure.storage.blob import BlobServiceClient\n token_credential = DefaultAzureCredential()\n\n blob_service_client = BlobServiceClient(\n account_url=\"https://.blob.core.windows.net\",\n credential=token_credential\n )\n ```\n\n2. To use a [shared access signature (SAS) token](https://learn.microsoft.com/azure/storage/common/storage-sas-overview),\n provide the token as a string. If your account URL includes the SAS token, omit the credential parameter.\n You can generate a SAS token from the Azure Portal under \"Shared access signature\" or use one of the `generate_sas()`\n functions to create a sas token for the storage account, container, or blob:\n\n ```python\n from datetime import datetime, timedelta\n from azure.storage.blob import BlobServiceClient, generate_account_sas, ResourceTypes, AccountSasPermissions\n\n sas_token = generate_account_sas(\n account_name=\"\",\n account_key=\"\",\n resource_types=ResourceTypes(service=True),\n permission=AccountSasPermissions(read=True),\n expiry=datetime.utcnow() + timedelta(hours=1)\n )\n\n blob_service_client = BlobServiceClient(account_url=\"https://.blob.core.windows.net\", credential=sas_token)\n ```\n\n3. To use a storage account [shared key](https://learn.microsoft.com/rest/api/storageservices/authenticate-with-shared-key/)\n (aka account key or access key), provide the key as a string. This can be found in the Azure Portal under the \"Access Keys\"\n section or by running the following Azure CLI command:\n\n ```az storage account keys list -g MyResourceGroup -n MyStorageAccount```\n\n Use the key as the credential parameter to authenticate the client:\n ```python\n from azure.storage.blob import BlobServiceClient\n service = BlobServiceClient(account_url=\"https://.blob.core.windows.net\", credential=\"\")\n ```\n \n If you are using **customized url** (which means the url is not in this format `.blob.core.windows.net`),\n please instantiate the client using the credential below:\n ```python\n from azure.storage.blob import BlobServiceClient\n service = BlobServiceClient(account_url=\"https://.blob.core.windows.net\", \n credential={\"account_name\": \"\", \"account_key\":\"\"})\n ```\n\n4. To use [anonymous public read access](https://learn.microsoft.com/azure/storage/blobs/storage-manage-access-to-resources),\n simply omit the credential parameter.\n\n#### Creating the client from a connection string\nDepending on your use case and authorization method, you may prefer to initialize a client instance with a storage\nconnection string instead of providing the account URL and credential separately. To do this, pass the storage\nconnection string to the client's `from_connection_string` class method:\n\n```python\nfrom azure.storage.blob import BlobServiceClient\n\nconnection_string = \"DefaultEndpointsProtocol=https;AccountName=xxxx;AccountKey=xxxx;EndpointSuffix=core.windows.net\"\nservice = BlobServiceClient.from_connection_string(conn_str=connection_string)\n```\n\nThe connection string to your storage account can be found in the Azure Portal under the \"Access Keys\" section or by running the following CLI command:\n\n```bash\naz storage account show-connection-string -g MyResourceGroup -n MyStorageAccount\n```\n\n## Key concepts\nThe following components make up the Azure Blob Service:\n* The storage account itself\n* A container within the storage account\n* A blob within a container\n\nThe Azure Storage Blobs client library for Python allows you to interact with each of these components through the\nuse of a dedicated client object.\n\n### Clients\nFour different clients are provided to interact with the various components of the Blob Service:\n1. [BlobServiceClient](https://aka.ms/azsdk-python-storage-blob-blobserviceclient) -\n this client represents interaction with the Azure storage account itself, and allows you to acquire preconfigured\n client instances to access the containers and blobs within. It provides operations to retrieve and configure the\n account properties as well as list, create, and delete containers within the account. To perform operations on a\n specific container or blob, retrieve a client using the `get_container_client` or `get_blob_client` methods.\n2. [ContainerClient](https://aka.ms/azsdk-python-storage-blob-containerclient) -\n this client represents interaction with a specific container (which need not exist yet), and allows you to acquire\n preconfigured client instances to access the blobs within. It provides operations to create, delete, or configure a\n container and includes operations to list, upload, and delete the blobs within it. To perform operations on a\n specific blob within the container, retrieve a client using the `get_blob_client` method.\n3. [BlobClient](https://aka.ms/azsdk-python-storage-blob-blobclient) -\n this client represents interaction with a specific blob (which need not exist yet). It provides operations to\n upload, download, delete, and create snapshots of a blob, as well as specific operations per blob type.\n4. [BlobLeaseClient](https://aka.ms/azsdk-python-storage-blob-blobleaseclient) -\n this client represents lease interactions with a `ContainerClient` or `BlobClient`. It provides operations to\n acquire, renew, release, change, and break a lease on a specified resource.\n\n### Async Clients \nThis library includes a complete async API supported on Python 3.5+. To use it, you must\nfirst install an async transport, such as [aiohttp](https://pypi.org/project/aiohttp/).\nSee\n[azure-core documentation](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/CLIENT_LIBRARY_DEVELOPER.md#transport)\nfor more information.\n\nAsync clients and credentials should be closed when they're no longer needed. These\nobjects are async context managers and define async `close` methods.\n\n### Blob Types\nOnce you've initialized a Client, you can choose from the different types of blobs:\n* [Block blobs](https://learn.microsoft.com/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs#about-block-blobs)\n store text and binary data, up to approximately 4.75 TiB. Block blobs are made up of blocks of data that can be\n managed individually\n* [Append blobs](https://learn.microsoft.com/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs#about-append-blobs)\n are made up of blocks like block blobs, but are optimized for append operations. Append blobs are ideal for scenarios\n such as logging data from virtual machines\n* [Page blobs](https://learn.microsoft.com/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs#about-page-blobs)\n store random access files up to 8 TiB in size. Page blobs store virtual hard drive (VHD) files and serve as disks for\n Azure virtual machines\n\n## Examples\nThe following sections provide several code snippets covering some of the most common Storage Blob tasks, including:\n\n* [Create a container](#create-a-container \"Create a container\")\n* [Uploading a blob](#uploading-a-blob \"Uploading a blob\")\n* [Downloading a blob](#downloading-a-blob \"Downloading a blob\")\n* [Enumerating blobs](#enumerating-blobs \"Enumerating blobs\")\n\nNote that a container must be created before to upload or download a blob.\n\n### Create a container\n\nCreate a container from where you can upload or download blobs.\n```python\nfrom azure.storage.blob import ContainerClient\n\ncontainer_client = ContainerClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\")\n\ncontainer_client.create_container()\n```\n\nUse the async client to create a container\n\n```python\nfrom azure.storage.blob.aio import ContainerClient\n\ncontainer_client = ContainerClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\")\n\nawait container_client.create_container()\n```\n\n### Uploading a blob\nUpload a blob to your container\n\n```python\nfrom azure.storage.blob import BlobClient\n\nblob = BlobClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\", blob_name=\"my_blob\")\n\nwith open(\"./SampleSource.txt\", \"rb\") as data:\n blob.upload_blob(data)\n```\n\nUse the async client to upload a blob\n\n```python\nfrom azure.storage.blob.aio import BlobClient\n\nblob = BlobClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\", blob_name=\"my_blob\")\n\nwith open(\"./SampleSource.txt\", \"rb\") as data:\n await blob.upload_blob(data)\n```\n\n### Downloading a blob\nDownload a blob from your container\n\n```python\nfrom azure.storage.blob import BlobClient\n\nblob = BlobClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\", blob_name=\"my_blob\")\n\nwith open(\"./BlockDestination.txt\", \"wb\") as my_blob:\n blob_data = blob.download_blob()\n blob_data.readinto(my_blob)\n```\n\nDownload a blob asynchronously\n\n```python\nfrom azure.storage.blob.aio import BlobClient\n\nblob = BlobClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\", blob_name=\"my_blob\")\n\nwith open(\"./BlockDestination.txt\", \"wb\") as my_blob:\n stream = await blob.download_blob()\n data = await stream.readall()\n my_blob.write(data)\n```\n\n### Enumerating blobs\nList the blobs in your container\n\n```python\nfrom azure.storage.blob import ContainerClient\n\ncontainer = ContainerClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\")\n\nblob_list = container.list_blobs()\nfor blob in blob_list:\n print(blob.name + '\\n')\n```\n\nList the blobs asynchronously\n\n```python\nfrom azure.storage.blob.aio import ContainerClient\n\ncontainer = ContainerClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\")\n\nblob_list = []\nasync for blob in container.list_blobs():\n blob_list.append(blob)\nprint(blob_list)\n```\n\n## Optional Configuration\n\nOptional keyword arguments that can be passed in at the client and per-operation level.\n\n### Retry Policy configuration\n\nUse the following keyword arguments when instantiating a client to configure the retry policy:\n\n* __retry_total__ (int): Total number of retries to allow. Takes precedence over other counts.\nPass in `retry_total=0` if you do not want to retry on requests. Defaults to 10.\n* __retry_connect__ (int): How many connection-related errors to retry on. Defaults to 3.\n* __retry_read__ (int): How many times to retry on read errors. Defaults to 3.\n* __retry_status__ (int): How many times to retry on bad status codes. Defaults to 3.\n* __retry_to_secondary__ (bool): Whether the request should be retried to secondary, if able.\nThis should only be enabled of RA-GRS accounts are used and potentially stale data can be handled.\nDefaults to `False`.\n\n### Encryption configuration\n\nUse the following keyword arguments when instantiating a client to configure encryption:\n\n* __require_encryption__ (bool): If set to True, will enforce that objects are encrypted and decrypt them.\n* __encryption_version__ (str): Specifies the version of encryption to use. Current options are `'2.0'` or `'1.0'` and\nthe default value is `'1.0'`. Version 1.0 is deprecated, and it is **highly recommended** to use version 2.0.\n* __key_encryption_key__ (object): The user-provided key-encryption-key. The instance must implement the following methods:\n - `wrap_key(key)`--wraps the specified key using an algorithm of the user's choice.\n - `get_key_wrap_algorithm()`--returns the algorithm used to wrap the specified symmetric key.\n - `get_kid()`--returns a string key id for this key-encryption-key.\n* __key_resolver_function__ (callable): The user-provided key resolver. Uses the kid string to return a key-encryption-key\nimplementing the interface defined above.\n\n### Other client / per-operation configuration\n\nOther optional configuration keyword arguments that can be specified on the client or per-operation.\n\n**Client keyword arguments:**\n\n* __connection_timeout__ (int): The number of seconds the client will wait to establish a connection to the server.\nDefaults to 20 seconds.\n* __read_timeout__ (int): The number of seconds the client will wait, between consecutive read operations, for a\nresponse from the server. This is a socket level timeout and is not affected by overall data size. Client-side read \ntimeouts will be automatically retried. Defaults to 60 seconds.\n* __transport__ (Any): User-provided transport to send the HTTP request.\n\n**Per-operation keyword arguments:**\n\n* __raw_response_hook__ (callable): The given callback uses the response returned from the service.\n* __raw_request_hook__ (callable): The given callback uses the request before being sent to service.\n* __client_request_id__ (str): Optional user specified identification of the request.\n* __user_agent__ (str): Appends the custom value to the user-agent header to be sent with the request.\n* __logging_enable__ (bool): Enables logging at the DEBUG level. Defaults to False. Can also be passed in at\nthe client level to enable it for all requests.\n* __logging_body__ (bool): Enables logging the request and response body. Defaults to False. Can also be passed in at\nthe client level to enable it for all requests.\n* __headers__ (dict): Pass in custom headers as key, value pairs. E.g. `headers={'CustomValue': value}`\n\n## Troubleshooting\n### General\nStorage Blob clients raise exceptions defined in [Azure Core](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/README.md).\n\nThis list can be used for reference to catch thrown exceptions. To get the specific error code of the exception, use the `error_code` attribute, i.e, `exception.error_code`.\n\n### Logging\nThis library uses the standard\n[logging](https://docs.python.org/3/library/logging.html) library for logging.\nBasic information about HTTP sessions (URLs, headers, etc.) is logged at INFO\nlevel.\n\nDetailed DEBUG level logging, including request/response bodies and unredacted\nheaders, can be enabled on a client with the `logging_enable` argument:\n```python\nimport sys\nimport logging\nfrom azure.storage.blob import BlobServiceClient\n\n# Create a logger for the 'azure.storage.blob' SDK\nlogger = logging.getLogger('azure.storage.blob')\nlogger.setLevel(logging.DEBUG)\n\n# Configure a console output\nhandler = logging.StreamHandler(stream=sys.stdout)\nlogger.addHandler(handler)\n\n# This client will log detailed information about its HTTP sessions, at DEBUG level\nservice_client = BlobServiceClient.from_connection_string(\"your_connection_string\", logging_enable=True)\n```\n\nSimilarly, `logging_enable` can enable detailed logging for a single operation,\neven when it isn't enabled for the client:\n```python\nservice_client.get_service_stats(logging_enable=True)\n```\n\n## Next steps\n\n### More sample code\n\nGet started with our [Blob samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples).\n\nSeveral Storage Blobs Python SDK samples are available to you in the SDK's GitHub repository. These samples provide example code for additional scenarios commonly encountered while working with Storage Blobs:\n\n* [blob_samples_container_access_policy.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy_async.py)) - Examples to set Access policies:\n * Set up Access Policy for container\n\n* [blob_samples_hello_world.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world_async.py)) - Examples for common Storage Blob tasks:\n * Set up a container\n * Create a block, page, or append blob\n * Upload blobs\n * Download blobs\n * Delete blobs\n\n* [blob_samples_authentication.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_authentication.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_authentication_async.py)) - Examples for authenticating and creating the client:\n * From a connection string\n * From a shared access key\n * From a shared access signature token\n * From active directory\n\n* [blob_samples_service.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_service.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_service_async.py)) - Examples for interacting with the blob service:\n * Get account information\n * Get and set service properties\n * Get service statistics\n * Create, list, and delete containers\n * Get the Blob or Container client\n\n* [blob_samples_containers.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_containers.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_containers_async.py)) - Examples for interacting with containers:\n * Create a container and delete containers\n * Set metadata on containers\n * Get container properties\n * Acquire a lease on container\n * Set an access policy on a container\n * Upload, list, delete blobs in container\n * Get the blob client to interact with a specific blob\n\n* [blob_samples_common.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_common.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_common_async.py)) - Examples common to all types of blobs:\n * Create a snapshot\n * Delete a blob snapshot\n * Soft delete a blob\n * Undelete a blob\n * Acquire a lease on a blob\n * Copy a blob from a URL\n\n* [blob_samples_directory_interface.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_directory_interface.py) - Examples for interfacing with Blob storage as if it were a directory on a filesystem:\n * Copy (upload or download) a single file or directory\n * List files or directories at a single level or recursively\n * Delete a single file or recursively delete a directory\n\n### Additional documentation\nFor more extensive documentation on Azure Blob storage, see the [Azure Blob storage documentation](https://learn.microsoft.com/azure/storage/blobs/) on learn.microsoft.com.\n\n## Contributing\nThis project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit https://cla.microsoft.com.\n\nWhen you submit a pull request, a CLA-bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA.\n\nThis project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.", - "release_date": "2025-03-27T17:13:06", + "release_date": "2025-07-16T21:34:09", "parties": [ { "type": "person", @@ -284,11 +284,11 @@ "Programming Language :: Python :: 3.9" ], "homepage_url": "https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob", - "download_url": "https://files.pythonhosted.org/packages/57/33/085d9352d416e617993821b9d9488222fbb559bc15c3641d6cbd6d16d236/azure_storage_blob-12.25.1-py3-none-any.whl", - "size": 406990, + "download_url": "https://files.pythonhosted.org/packages/5b/64/63dbfdd83b31200ac58820a7951ddfdeed1fbee9285b0f3eae12d1357155/azure_storage_blob-12.26.0-py3-none-any.whl", + "size": 412907, "sha1": null, - "md5": "20b5072c0d73c87cc0bd020da5c5f2f4", - "sha256": "1f337aab12e918ec3f1b638baada97550673911c4ceed892acc8e4e891b74167", + "md5": "b7ee3d0eec2bce8bbf60fc238d4349b7", + "sha256": "8c5631b8b22b4f53ec5fff2f3bededf34cfef111e2af613ad42c9e6de00a77fe", "sha512": null, "bug_tracking_url": null, "code_view_url": null, @@ -308,20 +308,20 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/azure-storage-blob/12.25.1/json", + "api_data_url": "https://pypi.org/pypi/azure-storage-blob/12.26.0/json", "datasource_id": null, - "purl": "pkg:pypi/azure-storage-blob@12.25.1" + "purl": "pkg:pypi/azure-storage-blob@12.26.0" }, { "type": "pypi", "namespace": null, "name": "certifi", - "version": "2025.7.14", + "version": "2025.8.3", "qualifiers": {}, "subpath": null, "primary_language": "Python", "description": "Python package for providing Mozilla's CA Bundle.\nCertifi: Python SSL Certificates\n================================\n\nCertifi provides Mozilla's carefully curated collection of Root Certificates for\nvalidating the trustworthiness of SSL certificates while verifying the identity\nof TLS hosts. It has been extracted from the `Requests`_ project.\n\nInstallation\n------------\n\n``certifi`` is available on PyPI. Simply install it with ``pip``::\n\n $ pip install certifi\n\nUsage\n-----\n\nTo reference the installed certificate authority (CA) bundle, you can use the\nbuilt-in function::\n\n >>> import certifi\n\n >>> certifi.where()\n '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'\n\nOr from the command line::\n\n $ python -m certifi\n /usr/local/lib/python3.7/site-packages/certifi/cacert.pem\n\nEnjoy!\n\n.. _`Requests`: https://requests.readthedocs.io/en/master/\n\nAddition/Removal of Certificates\n--------------------------------\n\nCertifi does not support any addition/removal or other modification of the\nCA trust store content. This project is intended to provide a reliable and\nhighly portable root of trust to python deployments. Look to upstream projects\nfor methods to use alternate trust.", - "release_date": "2025-07-14T03:29:26", + "release_date": "2025-08-03T03:07:45", "parties": [ { "type": "person", @@ -347,11 +347,11 @@ "Programming Language :: Python :: 3.9" ], "homepage_url": "https://github.com/certifi/python-certifi", - "download_url": "https://files.pythonhosted.org/packages/4f/52/34c6cf5bb9285074dc3531c437b3919e825d976fde097a7a73f79e726d03/certifi-2025.7.14-py3-none-any.whl", - "size": 162722, + "download_url": "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", + "size": 161216, "sha1": null, - "md5": "8561c6b29236cd268f57ddb4f22281d3", - "sha256": "6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2", + "md5": "f9b6740cffcf397b47bc7fb7782b1354", + "sha256": "f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/certifi/python-certifi", @@ -371,26 +371,33 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/certifi/2025.7.14/json", + "api_data_url": "https://pypi.org/pypi/certifi/2025.8.3/json", "datasource_id": null, - "purl": "pkg:pypi/certifi@2025.7.14" + "purl": "pkg:pypi/certifi@2025.8.3" }, { "type": "pypi", "namespace": null, "name": "cffi", - "version": "1.17.1", + "version": "2.0.0", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "CFFI\n====\n\nForeign Function Interface for Python calling C code.\nPlease see the `Documentation `_.\n\nContact\n-------\n\n`Mailing list `_", - "release_date": "2024-09-04T20:44:38", + "description": "[![GitHub Actions Status](https://github.com/python-cffi/cffi/actions/workflows/ci.yaml/badge.svg?branch=main)](https://github.com/python-cffi/cffi/actions/workflows/ci.yaml?query=branch%3Amain++)\n[![PyPI version](https://img.shields.io/pypi/v/cffi.svg)](https://pypi.org/project/cffi)\n[![Read the Docs](https://img.shields.io/badge/docs-latest-blue.svg)][Documentation]\n\n\nCFFI\n====\n\nForeign Function Interface for Python calling C code.\n\nPlease see the [Documentation] or uncompiled in the `doc/` subdirectory.\n\nDownload\n--------\n\n[Download page](https://github.com/python-cffi/cffi/releases)\n\nSource Code\n-----------\n\nSource code is publicly available on\n[GitHub](https://github.com/python-cffi/cffi).\n\nContact\n-------\n\n[Mailing list](https://groups.google.com/forum/#!forum/python-cffi)\n\nTesting/development tips\n------------------------\n\nAfter `git clone` or `wget && tar`, we will get a directory called `cffi` or `cffi-x.x.x`. we call it `repo-directory`. To run tests under CPython, run the following in the `repo-directory`:\n\n pip install pytest\n pip install -e . # editable install of CFFI for local development\n pytest src/c/ testing/\n\n[Documentation]: http://cffi.readthedocs.org/", + "release_date": "2025-09-08T23:23:09", "parties": [ { "type": "person", "role": "author", "name": "Armin Rigo, Maciej Fijalkowski", - "email": "python-cffi@googlegroups.com", + "email": null, + "url": null + }, + { + "type": "person", + "role": "maintainer", + "name": "Matt Davis, Matt Clay, Matti Picus", + "email": null, "url": null } ], @@ -401,29 +408,24 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy" + "Programming Language :: Python :: Free Threading :: 2 - Beta", + "Programming Language :: Python :: Implementation :: CPython" ], - "homepage_url": "http://cffi.readthedocs.org", - "download_url": "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "size": 479447, + "homepage_url": null, + "download_url": "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", + "size": 219499, "sha1": null, - "md5": "58501ab3757c9ebf9815c7e121fc8593", - "sha256": "dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", + "md5": "46c48c8a80e0c29868c77150c85a4c48", + "sha256": "c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", "sha512": null, "bug_tracking_url": "https://github.com/python-cffi/cffi/issues", "code_view_url": "https://github.com/python-cffi/cffi", "vcs_url": null, "copyright": null, - "license_expression": null, - "declared_license": { - "license": "MIT", - "classifiers": [ - "License :: OSI Approved :: MIT License" - ] - }, + "license_expression": "MIT", + "declared_license": {}, "notice_text": null, "source_packages": [], "file_references": [], @@ -431,20 +433,20 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/cffi/1.17.1/json", + "api_data_url": "https://pypi.org/pypi/cffi/2.0.0/json", "datasource_id": null, - "purl": "pkg:pypi/cffi@1.17.1" + "purl": "pkg:pypi/cffi@2.0.0" }, { "type": "pypi", "namespace": null, "name": "charset-normalizer", - "version": "3.4.2", + "version": "3.4.3", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.\n

Charset Detection, for Everyone \ud83d\udc4b

\n\n

\n The Real First Universal Charset Detector
\n \n \n \n \n \"Download\n \n \n \n \n

\n

\n Featured Packages
\n \n \"Static\n \n \n \"Static\n \n

\n

\n In other language (unofficial port - by the community)
\n \n \"Static\n \n

\n\n> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`,\n> I'm trying to resolve the issue by taking a new approach.\n> All IANA character set names for which the Python core library provides codecs are supported.\n\n

\n >>>>> \ud83d\udc49 Try Me Online Now, Then Adopt Me \ud83d\udc48 <<<<<\n

\n\nThis project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**.\n\n| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) |\n|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:|\n| `Fast` | \u274c | \u2705 | \u2705 |\n| `Universal**` | \u274c | \u2705 | \u274c |\n| `Reliable` **without** distinguishable standards | \u274c | \u2705 | \u2705 |\n| `Reliable` **with** distinguishable standards | \u2705 | \u2705 | \u2705 |\n| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ |\n| `Native Python` | \u2705 | \u2705 | \u274c |\n| `Detect spoken language` | \u274c | \u2705 | N/A |\n| `UnicodeDecodeError Safety` | \u274c | \u2705 | \u274c |\n| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB |\n| `Supported Encoding` | 33 | \ud83c\udf89 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 |\n\n

\n\"Reading\"Cat\n

\n\n*\\*\\* : They are clearly using specific code for a specific encoding even if covering most of used one*
\n\n## \u26a1 Performance\n\nThis package offer better performance than its counterpart Chardet. Here are some numbers.\n\n| Package | Accuracy | Mean per file (ms) | File per sec (est) |\n|-----------------------------------------------|:--------:|:------------------:|:------------------:|\n| [chardet](https://github.com/chardet/chardet) | 86 % | 63 ms | 16 file/sec |\n| charset-normalizer | **98 %** | **10 ms** | 100 file/sec |\n\n| Package | 99th percentile | 95th percentile | 50th percentile |\n|-----------------------------------------------|:---------------:|:---------------:|:---------------:|\n| [chardet](https://github.com/chardet/chardet) | 265 ms | 71 ms | 7 ms |\n| charset-normalizer | 100 ms | 50 ms | 5 ms |\n\n_updated as of december 2024 using CPython 3.12_\n\nChardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload.\n\n> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows.\n> And yes, these results might change at any time. The dataset can be updated to include more files.\n> The actual delays heavily depends on your CPU capabilities. The factors should remain the same.\n> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability\n> (e.g. Supported Encoding) Challenge-them if you want.\n\n## \u2728 Installation\n\nUsing pip:\n\n```sh\npip install charset-normalizer -U\n```\n\n## \ud83d\ude80 Basic Usage\n\n### CLI\nThis package comes with a CLI.\n\n```\nusage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD]\n file [file ...]\n\nThe Real First Universal Charset Detector. Discover originating encoding used\non text file. Normalize text to unicode.\n\npositional arguments:\n files File(s) to be analysed\n\noptional arguments:\n -h, --help show this help message and exit\n -v, --verbose Display complementary information about file if any.\n Stdout will contain logs about the detection process.\n -a, --with-alternative\n Output complementary possibilities if any. Top-level\n JSON WILL be a list.\n -n, --normalize Permit to normalize input file. If not set, program\n does not write anything.\n -m, --minimal Only output the charset detected to STDOUT. Disabling\n JSON output.\n -r, --replace Replace file when trying to normalize it instead of\n creating a new one.\n -f, --force Replace file without asking if you are sure, use this\n flag with caution.\n -t THRESHOLD, --threshold THRESHOLD\n Define a custom maximum amount of chaos allowed in\n decoded content. 0. <= chaos <= 1.\n --version Show version information and exit.\n```\n\n```bash\nnormalizer ./data/sample.1.fr.srt\n```\n\nor\n\n```bash\npython -m charset_normalizer ./data/sample.1.fr.srt\n```\n\n\ud83c\udf89 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format.\n\n```json\n{\n \"path\": \"/home/default/projects/charset_normalizer/data/sample.1.fr.srt\",\n \"encoding\": \"cp1252\",\n \"encoding_aliases\": [\n \"1252\",\n \"windows_1252\"\n ],\n \"alternative_encodings\": [\n \"cp1254\",\n \"cp1256\",\n \"cp1258\",\n \"iso8859_14\",\n \"iso8859_15\",\n \"iso8859_16\",\n \"iso8859_3\",\n \"iso8859_9\",\n \"latin_1\",\n \"mbcs\"\n ],\n \"language\": \"French\",\n \"alphabets\": [\n \"Basic Latin\",\n \"Latin-1 Supplement\"\n ],\n \"has_sig_or_bom\": false,\n \"chaos\": 0.149,\n \"coherence\": 97.152,\n \"unicode_path\": null,\n \"is_preferred\": true\n}\n```\n\n### Python\n*Just print out normalized text*\n```python\nfrom charset_normalizer import from_path\n\nresults = from_path('./my_subtitle.srt')\n\nprint(str(results.best()))\n```\n\n*Upgrade your code without effort*\n```python\nfrom charset_normalizer import detect\n```\n\nThe above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible.\n\nSee the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/)\n\n## \ud83d\ude07 Why\n\nWhen I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a\nreliable alternative using a completely different method. Also! I never back down on a good challenge!\n\nI **don't care** about the **originating charset** encoding, because **two different tables** can\nproduce **two identical rendered string.**\nWhat I want is to get readable text, the best I can.\n\nIn a way, **I'm brute forcing text decoding.** How cool is that ? \ud83d\ude0e\n\nDon't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair Unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode.\n\n## \ud83c\udf70 How\n\n - Discard all charset encoding table that could not fit the binary content.\n - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding.\n - Extract matches with the lowest mess detected.\n - Additionally, we measure coherence / probe for a language.\n\n**Wait a minute**, what is noise/mess and coherence according to **YOU ?**\n\n*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then\n**I established** some ground rules about **what is obvious** when **it seems like** a mess (aka. defining noise in rendered text).\n I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to\n improve or rewrite it.\n\n*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought\nthat intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design.\n\n## \u26a1 Known limitations\n\n - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters))\n - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content.\n\n## \u26a0\ufe0f About Python EOLs\n\n**If you are running:**\n\n- Python >=2.7,<3.5: Unsupported\n- Python 3.5: charset-normalizer < 2.1\n- Python 3.6: charset-normalizer < 3.1\n- Python 3.7: charset-normalizer < 4.0\n\nUpgrade your Python interpreter as soon as possible.\n\n## \ud83d\udc64 Contributing\n\nContributions, issues and feature requests are very much welcome.
\nFeel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute.\n\n## \ud83d\udcdd License\n\nCopyright \u00a9 [Ahmed TAHRI @Ousret](https://github.com/Ousret).
\nThis project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed.\n\nCharacters frequencies used in this project \u00a9 2012 [Denny Vrande\u010di\u0107](http://simia.net/letters/)\n\n## \ud83d\udcbc For Enterprise\n\nProfessional support for charset-normalizer is available as part of the [Tidelift\nSubscription][1]. Tidelift gives software development teams a single source for\npurchasing and maintaining their software, with professional grade assurances\nfrom the experts who know it best, while seamlessly integrating with existing\ntools.\n\n[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme\n\n[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/7297/badge)](https://www.bestpractices.dev/projects/7297)\n\n# Changelog\nAll notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).\n\n## [3.4.2](https://github.com/Ousret/charset_normalizer/compare/3.4.1...3.4.2) (2025-05-02)\n\n### Fixed\n- Addressed the DeprecationWarning in our CLI regarding `argparse.FileType` by backporting the target class into the package. (#591)\n- Improved the overall reliability of the detector with CJK Ideographs. (#605) (#587)\n\n### Changed\n- Optional mypyc compilation upgraded to version 1.15 for Python >= 3.8\n\n## [3.4.1](https://github.com/Ousret/charset_normalizer/compare/3.4.0...3.4.1) (2024-12-24)\n\n### Changed\n- Project metadata are now stored using `pyproject.toml` instead of `setup.cfg` using setuptools as the build backend.\n- Enforce annotation delayed loading for a simpler and consistent types in the project.\n- Optional mypyc compilation upgraded to version 1.14 for Python >= 3.8\n\n### Added\n- pre-commit configuration.\n- noxfile.\n\n### Removed\n- `build-requirements.txt` as per using `pyproject.toml` native build configuration.\n- `bin/integration.py` and `bin/serve.py` in favor of downstream integration test (see noxfile).\n- `setup.cfg` in favor of `pyproject.toml` metadata configuration.\n- Unused `utils.range_scan` function.\n\n### Fixed\n- Converting content to Unicode bytes may insert `utf_8` instead of preferred `utf-8`. (#572)\n- Deprecation warning \"'count' is passed as positional argument\" when converting to Unicode bytes on Python 3.13+\n\n## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08)\n\n### Added\n- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints.\n- Support for Python 3.13 (#512)\n\n### Fixed\n- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch.\n- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537)\n- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381)\n\n## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31)\n\n### Fixed\n- Unintentional memory usage regression when using large payload that match several encoding (#376)\n- Regression on some detection case showcased in the documentation (#371)\n\n### Added\n- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife)\n\n## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22)\n\n### Changed\n- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8\n- Improved the general detection reliability based on reports from the community\n\n## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30)\n\n### Added\n- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer`\n- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323)\n\n### Removed\n- (internal) Redundant utils.is_ascii function and unused function is_private_use_only\n- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant\n\n### Changed\n- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection\n- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8\n\n### Fixed\n- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \\_\\_lt\\_\\_ (#350)\n\n## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07)\n\n### Changed\n- Typehint for function `from_path` no longer enforce `PathLike` as its first argument\n- Minor improvement over the global detection reliability\n\n### Added\n- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries\n- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True)\n- Explicit support for Python 3.12\n\n### Fixed\n- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289)\n\n## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06)\n\n### Added\n- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262)\n\n### Removed\n- Support for Python 3.6 (PR #260)\n\n### Changed\n- Optional speedup provided by mypy/c 1.0.1\n\n## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18)\n\n### Fixed\n- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233)\n\n### Changed\n- Speedup provided by mypy/c 0.990 on Python >= 3.7\n\n## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20)\n\n### Added\n- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results\n- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES\n- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio\n- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)\n\n### Changed\n- Build with static metadata using 'build' frontend\n- Make the language detection stricter\n- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1\n\n### Fixed\n- CLI with opt --normalize fail when using full path for files\n- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it\n- Sphinx warnings when generating the documentation\n\n### Removed\n- Coherence detector no longer return 'Simple English' instead return 'English'\n- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'\n- Breaking: Method `first()` and `best()` from CharsetMatch\n- UTF-7 will no longer appear as \"detected\" without a recognized SIG/mark (is unreliable/conflict with ASCII)\n- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches\n- Breaking: Top-level function `normalize`\n- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch\n- Support for the backport `unicodedata2`\n\n## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18)\n\n### Added\n- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results\n- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES\n- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio\n\n### Changed\n- Build with static metadata using 'build' frontend\n- Make the language detection stricter\n\n### Fixed\n- CLI with opt --normalize fail when using full path for files\n- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it\n\n### Removed\n- Coherence detector no longer return 'Simple English' instead return 'English'\n- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'\n\n## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21)\n\n### Added\n- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)\n\n### Removed\n- Breaking: Method `first()` and `best()` from CharsetMatch\n- UTF-7 will no longer appear as \"detected\" without a recognized SIG/mark (is unreliable/conflict with ASCII)\n\n### Fixed\n- Sphinx warnings when generating the documentation\n\n## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15)\n\n### Changed\n- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1\n\n### Removed\n- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches\n- Breaking: Top-level function `normalize`\n- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch\n- Support for the backport `unicodedata2`\n\n## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19)\n\n### Deprecated\n- Function `normalize` scheduled for removal in 3.0\n\n### Changed\n- Removed useless call to decode in fn is_unprintable (#206)\n\n### Fixed\n- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204)\n\n## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19)\n\n### Added\n- Output the Unicode table version when running the CLI with `--version` (PR #194)\n\n### Changed\n- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175)\n- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183)\n\n### Fixed\n- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175)\n- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181)\n\n### Removed\n- Support for Python 3.5 (PR #192)\n\n### Deprecated\n- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194)\n\n## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12)\n\n### Fixed\n- ASCII miss-detection on rare cases (PR #170)\n\n## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30)\n\n### Added\n- Explicit support for Python 3.11 (PR #164)\n\n### Changed\n- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165)\n\n## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04)\n\n### Fixed\n- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154)\n\n### Changed\n- Skipping the language-detection (CD) on ASCII (PR #155)\n\n## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03)\n\n### Changed\n- Moderating the logging impact (since 2.0.8) for specific environments (PR #147)\n\n### Fixed\n- Wrong logging level applied when setting kwarg `explain` to True (PR #146)\n\n## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24)\n### Changed\n- Improvement over Vietnamese detection (PR #126)\n- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124)\n- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122)\n- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129)\n- Code style as refactored by Sourcery-AI (PR #131)\n- Minor adjustment on the MD around european words (PR #133)\n- Remove and replace SRTs from assets / tests (PR #139)\n- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135)\n- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135)\n\n### Fixed\n- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137)\n- Avoid using too insignificant chunk (PR #137)\n\n### Added\n- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135)\n- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141)\n\n## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11)\n### Added\n- Add support for Kazakh (Cyrillic) language detection (PR #109)\n\n### Changed\n- Further, improve inferring the language from a given single-byte code page (PR #112)\n- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116)\n- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113)\n- Various detection improvement (MD+CD) (PR #117)\n\n### Removed\n- Remove redundant logging entry about detected language(s) (PR #115)\n\n### Fixed\n- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102)\n\n## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18)\n### Fixed\n- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100)\n- Fix CLI crash when using --minimal output in certain cases (PR #103)\n\n### Changed\n- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101)\n\n## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14)\n### Changed\n- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81)\n- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82)\n- The Unicode detection is slightly improved (PR #93)\n- Add syntax sugar \\_\\_bool\\_\\_ for results CharsetMatches list-container (PR #91)\n\n### Removed\n- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92)\n\n### Fixed\n- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95)\n- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96)\n- The MANIFEST.in was not exhaustive (PR #78)\n\n## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30)\n### Fixed\n- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70)\n- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68)\n- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72)\n- Submatch factoring could be wrong in rare edge cases (PR #72)\n- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72)\n- Fix line endings from CRLF to LF for certain project files (PR #67)\n\n### Changed\n- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76)\n- Allow fallback on specified encoding if any (PR #71)\n\n## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16)\n### Changed\n- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63)\n- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64)\n\n## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15)\n### Fixed\n- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59)\n\n### Changed\n- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57)\n\n## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13)\n### Fixed\n- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55)\n- Using explain=False permanently disable the verbose output in the current runtime (PR #47)\n- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47)\n- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52)\n\n### Changed\n- Public function normalize default args values were not aligned with from_bytes (PR #53)\n\n### Added\n- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47)\n\n## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02)\n### Changed\n- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet.\n- Accent has been made on UTF-8 detection, should perform rather instantaneous.\n- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible.\n- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time)\n- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+\n- utf_7 detection has been reinstated.\n\n### Removed\n- This package no longer require anything when used with Python 3.5 (Dropped cached_property)\n- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volap\u00fck, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian.\n- The exception hook on UnicodeDecodeError has been removed.\n\n### Deprecated\n- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0\n\n### Fixed\n- The CLI output used the relative path of the file(s). Should be absolute.\n\n## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28)\n### Fixed\n- Logger configuration/usage no longer conflict with others (PR #44)\n\n## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21)\n### Removed\n- Using standard logging instead of using the package loguru.\n- Dropping nose test framework in favor of the maintained pytest.\n- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text.\n- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version.\n- Stop support for UTF-7 that does not contain a SIG.\n- Dropping PrettyTable, replaced with pure JSON output in CLI.\n\n### Fixed\n- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process.\n- Not searching properly for the BOM when trying utf32/16 parent codec.\n\n### Changed\n- Improving the package final size by compressing frequencies.json.\n- Huge improvement over the larges payload.\n\n### Added\n- CLI now produces JSON consumable output.\n- Return ASCII if given sequences fit. Given reasonable confidence.\n\n## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13)\n\n### Fixed\n- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40)\n\n## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12)\n\n### Fixed\n- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39)\n\n## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12)\n\n### Fixed\n- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38)\n\n## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09)\n\n### Changed\n- Amend the previous release to allow prettytable 2.0 (PR #35)\n\n## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08)\n\n### Fixed\n- Fix error while using the package with a python pre-release interpreter (PR #33)\n\n### Changed\n- Dependencies refactoring, constraints revised.\n\n### Added\n- Add python 3.9 and 3.10 to the supported interpreters\n\nMIT License\n\nCopyright (c) 2025 TAHRI Ahmed R.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.", - "release_date": "2025-05-02T08:33:04", + "description": "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.\n

Charset Detection, for Everyone \ud83d\udc4b

\n\n

\n The Real First Universal Charset Detector
\n \n \n \n \n \"Download\n \n \n \n \n

\n

\n Featured Packages
\n \n \"Static\n \n \n \"Static\n \n

\n

\n In other language (unofficial port - by the community)
\n \n \"Static\n \n

\n\n> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`,\n> I'm trying to resolve the issue by taking a new approach.\n> All IANA character set names for which the Python core library provides codecs are supported.\n\n

\n >>>>> \ud83d\udc49 Try Me Online Now, Then Adopt Me \ud83d\udc48 <<<<<\n

\n\nThis project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**.\n\n| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) |\n|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:|\n| `Fast` | \u274c | \u2705 | \u2705 |\n| `Universal**` | \u274c | \u2705 | \u274c |\n| `Reliable` **without** distinguishable standards | \u274c | \u2705 | \u2705 |\n| `Reliable` **with** distinguishable standards | \u2705 | \u2705 | \u2705 |\n| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ |\n| `Native Python` | \u2705 | \u2705 | \u274c |\n| `Detect spoken language` | \u274c | \u2705 | N/A |\n| `UnicodeDecodeError Safety` | \u274c | \u2705 | \u274c |\n| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB |\n| `Supported Encoding` | 33 | \ud83c\udf89 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 |\n\n

\n\"Reading\"Cat\n

\n\n*\\*\\* : They are clearly using specific code for a specific encoding even if covering most of used one*
\n\n## \u26a1 Performance\n\nThis package offer better performance than its counterpart Chardet. Here are some numbers.\n\n| Package | Accuracy | Mean per file (ms) | File per sec (est) |\n|-----------------------------------------------|:--------:|:------------------:|:------------------:|\n| [chardet](https://github.com/chardet/chardet) | 86 % | 63 ms | 16 file/sec |\n| charset-normalizer | **98 %** | **10 ms** | 100 file/sec |\n\n| Package | 99th percentile | 95th percentile | 50th percentile |\n|-----------------------------------------------|:---------------:|:---------------:|:---------------:|\n| [chardet](https://github.com/chardet/chardet) | 265 ms | 71 ms | 7 ms |\n| charset-normalizer | 100 ms | 50 ms | 5 ms |\n\n_updated as of december 2024 using CPython 3.12_\n\nChardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload.\n\n> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows.\n> And yes, these results might change at any time. The dataset can be updated to include more files.\n> The actual delays heavily depends on your CPU capabilities. The factors should remain the same.\n> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability\n> (e.g. Supported Encoding) Challenge-them if you want.\n\n## \u2728 Installation\n\nUsing pip:\n\n```sh\npip install charset-normalizer -U\n```\n\n## \ud83d\ude80 Basic Usage\n\n### CLI\nThis package comes with a CLI.\n\n```\nusage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD]\n file [file ...]\n\nThe Real First Universal Charset Detector. Discover originating encoding used\non text file. Normalize text to unicode.\n\npositional arguments:\n files File(s) to be analysed\n\noptional arguments:\n -h, --help show this help message and exit\n -v, --verbose Display complementary information about file if any.\n Stdout will contain logs about the detection process.\n -a, --with-alternative\n Output complementary possibilities if any. Top-level\n JSON WILL be a list.\n -n, --normalize Permit to normalize input file. If not set, program\n does not write anything.\n -m, --minimal Only output the charset detected to STDOUT. Disabling\n JSON output.\n -r, --replace Replace file when trying to normalize it instead of\n creating a new one.\n -f, --force Replace file without asking if you are sure, use this\n flag with caution.\n -t THRESHOLD, --threshold THRESHOLD\n Define a custom maximum amount of chaos allowed in\n decoded content. 0. <= chaos <= 1.\n --version Show version information and exit.\n```\n\n```bash\nnormalizer ./data/sample.1.fr.srt\n```\n\nor\n\n```bash\npython -m charset_normalizer ./data/sample.1.fr.srt\n```\n\n\ud83c\udf89 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format.\n\n```json\n{\n \"path\": \"/home/default/projects/charset_normalizer/data/sample.1.fr.srt\",\n \"encoding\": \"cp1252\",\n \"encoding_aliases\": [\n \"1252\",\n \"windows_1252\"\n ],\n \"alternative_encodings\": [\n \"cp1254\",\n \"cp1256\",\n \"cp1258\",\n \"iso8859_14\",\n \"iso8859_15\",\n \"iso8859_16\",\n \"iso8859_3\",\n \"iso8859_9\",\n \"latin_1\",\n \"mbcs\"\n ],\n \"language\": \"French\",\n \"alphabets\": [\n \"Basic Latin\",\n \"Latin-1 Supplement\"\n ],\n \"has_sig_or_bom\": false,\n \"chaos\": 0.149,\n \"coherence\": 97.152,\n \"unicode_path\": null,\n \"is_preferred\": true\n}\n```\n\n### Python\n*Just print out normalized text*\n```python\nfrom charset_normalizer import from_path\n\nresults = from_path('./my_subtitle.srt')\n\nprint(str(results.best()))\n```\n\n*Upgrade your code without effort*\n```python\nfrom charset_normalizer import detect\n```\n\nThe above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible.\n\nSee the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/)\n\n## \ud83d\ude07 Why\n\nWhen I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a\nreliable alternative using a completely different method. Also! I never back down on a good challenge!\n\nI **don't care** about the **originating charset** encoding, because **two different tables** can\nproduce **two identical rendered string.**\nWhat I want is to get readable text, the best I can.\n\nIn a way, **I'm brute forcing text decoding.** How cool is that ? \ud83d\ude0e\n\nDon't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair Unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode.\n\n## \ud83c\udf70 How\n\n - Discard all charset encoding table that could not fit the binary content.\n - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding.\n - Extract matches with the lowest mess detected.\n - Additionally, we measure coherence / probe for a language.\n\n**Wait a minute**, what is noise/mess and coherence according to **YOU ?**\n\n*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then\n**I established** some ground rules about **what is obvious** when **it seems like** a mess (aka. defining noise in rendered text).\n I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to\n improve or rewrite it.\n\n*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought\nthat intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design.\n\n## \u26a1 Known limitations\n\n - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters))\n - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content.\n\n## \u26a0\ufe0f About Python EOLs\n\n**If you are running:**\n\n- Python >=2.7,<3.5: Unsupported\n- Python 3.5: charset-normalizer < 2.1\n- Python 3.6: charset-normalizer < 3.1\n- Python 3.7: charset-normalizer < 4.0\n\nUpgrade your Python interpreter as soon as possible.\n\n## \ud83d\udc64 Contributing\n\nContributions, issues and feature requests are very much welcome.
\nFeel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute.\n\n## \ud83d\udcdd License\n\nCopyright \u00a9 [Ahmed TAHRI @Ousret](https://github.com/Ousret).
\nThis project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed.\n\nCharacters frequencies used in this project \u00a9 2012 [Denny Vrande\u010di\u0107](http://simia.net/letters/)\n\n## \ud83d\udcbc For Enterprise\n\nProfessional support for charset-normalizer is available as part of the [Tidelift\nSubscription][1]. Tidelift gives software development teams a single source for\npurchasing and maintaining their software, with professional grade assurances\nfrom the experts who know it best, while seamlessly integrating with existing\ntools.\n\n[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme\n\n[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/7297/badge)](https://www.bestpractices.dev/projects/7297)\n\n# Changelog\nAll notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).\n\n## [3.4.3](https://github.com/Ousret/charset_normalizer/compare/3.4.2...3.4.3) (2025-08-09)\n\n### Changed\n- mypy(c) is no longer a required dependency at build time if `CHARSET_NORMALIZER_USE_MYPYC` isn't set to `1`. (#595) (#583)\n- automatically lower confidence on small bytes samples that are not Unicode in `detect` output legacy function. (#391)\n\n### Added\n- Custom build backend to overcome inability to mark mypy as an optional dependency in the build phase.\n- Support for Python 3.14\n\n### Fixed\n- sdist archive contained useless directories.\n- automatically fallback on valid UTF-16 or UTF-32 even if the md says it's noisy. (#633)\n\n### Misc\n- SBOM are automatically published to the relevant GitHub release to comply with regulatory changes.\n Each published wheel comes with its SBOM. We choose CycloneDX as the format.\n- Prebuilt optimized wheel are no longer distributed by default for CPython 3.7 due to a change in cibuildwheel.\n\n## [3.4.2](https://github.com/Ousret/charset_normalizer/compare/3.4.1...3.4.2) (2025-05-02)\n\n### Fixed\n- Addressed the DeprecationWarning in our CLI regarding `argparse.FileType` by backporting the target class into the package. (#591)\n- Improved the overall reliability of the detector with CJK Ideographs. (#605) (#587)\n\n### Changed\n- Optional mypyc compilation upgraded to version 1.15 for Python >= 3.8\n\n## [3.4.1](https://github.com/Ousret/charset_normalizer/compare/3.4.0...3.4.1) (2024-12-24)\n\n### Changed\n- Project metadata are now stored using `pyproject.toml` instead of `setup.cfg` using setuptools as the build backend.\n- Enforce annotation delayed loading for a simpler and consistent types in the project.\n- Optional mypyc compilation upgraded to version 1.14 for Python >= 3.8\n\n### Added\n- pre-commit configuration.\n- noxfile.\n\n### Removed\n- `build-requirements.txt` as per using `pyproject.toml` native build configuration.\n- `bin/integration.py` and `bin/serve.py` in favor of downstream integration test (see noxfile).\n- `setup.cfg` in favor of `pyproject.toml` metadata configuration.\n- Unused `utils.range_scan` function.\n\n### Fixed\n- Converting content to Unicode bytes may insert `utf_8` instead of preferred `utf-8`. (#572)\n- Deprecation warning \"'count' is passed as positional argument\" when converting to Unicode bytes on Python 3.13+\n\n## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08)\n\n### Added\n- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints.\n- Support for Python 3.13 (#512)\n\n### Fixed\n- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch.\n- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537)\n- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381)\n\n## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31)\n\n### Fixed\n- Unintentional memory usage regression when using large payload that match several encoding (#376)\n- Regression on some detection case showcased in the documentation (#371)\n\n### Added\n- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife)\n\n## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22)\n\n### Changed\n- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8\n- Improved the general detection reliability based on reports from the community\n\n## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30)\n\n### Added\n- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer`\n- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323)\n\n### Removed\n- (internal) Redundant utils.is_ascii function and unused function is_private_use_only\n- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant\n\n### Changed\n- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection\n- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8\n\n### Fixed\n- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \\_\\_lt\\_\\_ (#350)\n\n## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07)\n\n### Changed\n- Typehint for function `from_path` no longer enforce `PathLike` as its first argument\n- Minor improvement over the global detection reliability\n\n### Added\n- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries\n- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True)\n- Explicit support for Python 3.12\n\n### Fixed\n- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289)\n\n## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06)\n\n### Added\n- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262)\n\n### Removed\n- Support for Python 3.6 (PR #260)\n\n### Changed\n- Optional speedup provided by mypy/c 1.0.1\n\n## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18)\n\n### Fixed\n- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233)\n\n### Changed\n- Speedup provided by mypy/c 0.990 on Python >= 3.7\n\n## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20)\n\n### Added\n- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results\n- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES\n- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio\n- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)\n\n### Changed\n- Build with static metadata using 'build' frontend\n- Make the language detection stricter\n- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1\n\n### Fixed\n- CLI with opt --normalize fail when using full path for files\n- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it\n- Sphinx warnings when generating the documentation\n\n### Removed\n- Coherence detector no longer return 'Simple English' instead return 'English'\n- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'\n- Breaking: Method `first()` and `best()` from CharsetMatch\n- UTF-7 will no longer appear as \"detected\" without a recognized SIG/mark (is unreliable/conflict with ASCII)\n- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches\n- Breaking: Top-level function `normalize`\n- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch\n- Support for the backport `unicodedata2`\n\n## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18)\n\n### Added\n- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results\n- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES\n- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio\n\n### Changed\n- Build with static metadata using 'build' frontend\n- Make the language detection stricter\n\n### Fixed\n- CLI with opt --normalize fail when using full path for files\n- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it\n\n### Removed\n- Coherence detector no longer return 'Simple English' instead return 'English'\n- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'\n\n## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21)\n\n### Added\n- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)\n\n### Removed\n- Breaking: Method `first()` and `best()` from CharsetMatch\n- UTF-7 will no longer appear as \"detected\" without a recognized SIG/mark (is unreliable/conflict with ASCII)\n\n### Fixed\n- Sphinx warnings when generating the documentation\n\n## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15)\n\n### Changed\n- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1\n\n### Removed\n- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches\n- Breaking: Top-level function `normalize`\n- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch\n- Support for the backport `unicodedata2`\n\n## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19)\n\n### Deprecated\n- Function `normalize` scheduled for removal in 3.0\n\n### Changed\n- Removed useless call to decode in fn is_unprintable (#206)\n\n### Fixed\n- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204)\n\n## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19)\n\n### Added\n- Output the Unicode table version when running the CLI with `--version` (PR #194)\n\n### Changed\n- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175)\n- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183)\n\n### Fixed\n- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175)\n- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181)\n\n### Removed\n- Support for Python 3.5 (PR #192)\n\n### Deprecated\n- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194)\n\n## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12)\n\n### Fixed\n- ASCII miss-detection on rare cases (PR #170)\n\n## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30)\n\n### Added\n- Explicit support for Python 3.11 (PR #164)\n\n### Changed\n- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165)\n\n## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04)\n\n### Fixed\n- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154)\n\n### Changed\n- Skipping the language-detection (CD) on ASCII (PR #155)\n\n## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03)\n\n### Changed\n- Moderating the logging impact (since 2.0.8) for specific environments (PR #147)\n\n### Fixed\n- Wrong logging level applied when setting kwarg `explain` to True (PR #146)\n\n## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24)\n### Changed\n- Improvement over Vietnamese detection (PR #126)\n- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124)\n- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122)\n- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129)\n- Code style as refactored by Sourcery-AI (PR #131)\n- Minor adjustment on the MD around european words (PR #133)\n- Remove and replace SRTs from assets / tests (PR #139)\n- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135)\n- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135)\n\n### Fixed\n- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137)\n- Avoid using too insignificant chunk (PR #137)\n\n### Added\n- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135)\n- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141)\n\n## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11)\n### Added\n- Add support for Kazakh (Cyrillic) language detection (PR #109)\n\n### Changed\n- Further, improve inferring the language from a given single-byte code page (PR #112)\n- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116)\n- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113)\n- Various detection improvement (MD+CD) (PR #117)\n\n### Removed\n- Remove redundant logging entry about detected language(s) (PR #115)\n\n### Fixed\n- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102)\n\n## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18)\n### Fixed\n- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100)\n- Fix CLI crash when using --minimal output in certain cases (PR #103)\n\n### Changed\n- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101)\n\n## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14)\n### Changed\n- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81)\n- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82)\n- The Unicode detection is slightly improved (PR #93)\n- Add syntax sugar \\_\\_bool\\_\\_ for results CharsetMatches list-container (PR #91)\n\n### Removed\n- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92)\n\n### Fixed\n- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95)\n- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96)\n- The MANIFEST.in was not exhaustive (PR #78)\n\n## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30)\n### Fixed\n- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70)\n- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68)\n- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72)\n- Submatch factoring could be wrong in rare edge cases (PR #72)\n- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72)\n- Fix line endings from CRLF to LF for certain project files (PR #67)\n\n### Changed\n- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76)\n- Allow fallback on specified encoding if any (PR #71)\n\n## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16)\n### Changed\n- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63)\n- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64)\n\n## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15)\n### Fixed\n- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59)\n\n### Changed\n- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57)\n\n## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13)\n### Fixed\n- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55)\n- Using explain=False permanently disable the verbose output in the current runtime (PR #47)\n- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47)\n- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52)\n\n### Changed\n- Public function normalize default args values were not aligned with from_bytes (PR #53)\n\n### Added\n- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47)\n\n## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02)\n### Changed\n- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet.\n- Accent has been made on UTF-8 detection, should perform rather instantaneous.\n- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible.\n- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time)\n- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+\n- utf_7 detection has been reinstated.\n\n### Removed\n- This package no longer require anything when used with Python 3.5 (Dropped cached_property)\n- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volap\u00fck, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian.\n- The exception hook on UnicodeDecodeError has been removed.\n\n### Deprecated\n- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0\n\n### Fixed\n- The CLI output used the relative path of the file(s). Should be absolute.\n\n## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28)\n### Fixed\n- Logger configuration/usage no longer conflict with others (PR #44)\n\n## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21)\n### Removed\n- Using standard logging instead of using the package loguru.\n- Dropping nose test framework in favor of the maintained pytest.\n- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text.\n- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version.\n- Stop support for UTF-7 that does not contain a SIG.\n- Dropping PrettyTable, replaced with pure JSON output in CLI.\n\n### Fixed\n- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process.\n- Not searching properly for the BOM when trying utf32/16 parent codec.\n\n### Changed\n- Improving the package final size by compressing frequencies.json.\n- Huge improvement over the larges payload.\n\n### Added\n- CLI now produces JSON consumable output.\n- Return ASCII if given sequences fit. Given reasonable confidence.\n\n## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13)\n\n### Fixed\n- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40)\n\n## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12)\n\n### Fixed\n- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39)\n\n## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12)\n\n### Fixed\n- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38)\n\n## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09)\n\n### Changed\n- Amend the previous release to allow prettytable 2.0 (PR #35)\n\n## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08)\n\n### Fixed\n- Fix error while using the package with a python pre-release interpreter (PR #33)\n\n### Changed\n- Dependencies refactoring, constraints revised.\n\n### Added\n- Add python 3.9 and 3.10 to the supported interpreters\n\nMIT License\n\nCopyright (c) 2025 TAHRI Ahmed R.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.", + "release_date": "2025-08-09T07:57:26", "parties": [ { "type": "person", @@ -480,6 +482,7 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -490,11 +493,11 @@ "Typing :: Typed" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "size": 148243, + "download_url": "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", + "size": 53175, "sha1": null, - "md5": "33e977b54a1ee45dbead0da58594fa8f", - "sha256": "6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", + "md5": "4a43811bb5747201dc3694e76763e446", + "sha256": "ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/jawah/charset_normalizer", @@ -502,10 +505,7 @@ "copyright": null, "license_expression": null, "declared_license": { - "license": "MIT", - "classifiers": [ - "License :: OSI Approved :: MIT License" - ] + "license": "MIT" }, "notice_text": null, "source_packages": [], @@ -514,20 +514,20 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/charset-normalizer/3.4.2/json", + "api_data_url": "https://pypi.org/pypi/charset-normalizer/3.4.3/json", "datasource_id": null, - "purl": "pkg:pypi/charset-normalizer@3.4.2" + "purl": "pkg:pypi/charset-normalizer@3.4.3" }, { "type": "pypi", "namespace": null, "name": "click", - "version": "8.2.1", + "version": "8.2.2", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Composable command line interface toolkit\n# $ click_\n\nClick is a Python package for creating beautiful command line interfaces\nin a composable way with as little code as necessary. It's the \"Command\nLine Interface Creation Kit\". It's highly configurable but comes with\nsensible defaults out of the box.\n\nIt aims to make the process of writing command line tools quick and fun\nwhile also preventing any frustration caused by the inability to\nimplement an intended CLI API.\n\nClick in three points:\n\n- Arbitrary nesting of commands\n- Automatic help page generation\n- Supports lazy loading of subcommands at runtime\n\n\n## A Simple Example\n\n```python\nimport click\n\n@click.command()\n@click.option(\"--count\", default=1, help=\"Number of greetings.\")\n@click.option(\"--name\", prompt=\"Your name\", help=\"The person to greet.\")\ndef hello(count, name):\n \"\"\"Simple program that greets NAME for a total of COUNT times.\"\"\"\n for _ in range(count):\n click.echo(f\"Hello, {name}!\")\n\nif __name__ == '__main__':\n hello()\n```\n\n```\n$ python hello.py --count=3\nYour name: Click\nHello, Click!\nHello, Click!\nHello, Click!\n```\n\n\n## Donate\n\nThe Pallets organization develops and supports Click and other popular\npackages. In order to grow the community of contributors and users, and\nallow the maintainers to devote more time to the projects, [please\ndonate today][].\n\n[please donate today]: https://palletsprojects.com/donate\n\n## Contributing\n\nSee our [detailed contributing documentation][contrib] for many ways to\ncontribute, including reporting issues, requesting features, asking or answering\nquestions, and making PRs.\n\n[contrib]: https://palletsprojects.com/contributing/", - "release_date": "2025-05-20T23:19:47", + "description": "Composable command line interface toolkit\n
\"\"
\n\n# Click\n\nClick is a Python package for creating beautiful command line interfaces\nin a composable way with as little code as necessary. It's the \"Command\nLine Interface Creation Kit\". It's highly configurable but comes with\nsensible defaults out of the box.\n\nIt aims to make the process of writing command line tools quick and fun\nwhile also preventing any frustration caused by the inability to\nimplement an intended CLI API.\n\nClick in three points:\n\n- Arbitrary nesting of commands\n- Automatic help page generation\n- Supports lazy loading of subcommands at runtime\n\n\n## A Simple Example\n\n```python\nimport click\n\n@click.command()\n@click.option(\"--count\", default=1, help=\"Number of greetings.\")\n@click.option(\"--name\", prompt=\"Your name\", help=\"The person to greet.\")\ndef hello(count, name):\n \"\"\"Simple program that greets NAME for a total of COUNT times.\"\"\"\n for _ in range(count):\n click.echo(f\"Hello, {name}!\")\n\nif __name__ == '__main__':\n hello()\n```\n\n```\n$ python hello.py --count=3\nYour name: Click\nHello, Click!\nHello, Click!\nHello, Click!\n```\n\n\n## Donate\n\nThe Pallets organization develops and supports Click and other popular\npackages. In order to grow the community of contributors and users, and\nallow the maintainers to devote more time to the projects, [please\ndonate today][].\n\n[please donate today]: https://palletsprojects.com/donate\n\n## Contributing\n\nSee our [detailed contributing documentation][contrib] for many ways to\ncontribute, including reporting issues, requesting features, asking or answering\nquestions, and making PRs.\n\n[contrib]: https://palletsprojects.com/contributing/", + "release_date": "2025-08-02T02:23:39", "parties": [ { "type": "person", @@ -545,11 +545,11 @@ "Typing :: Typed" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", - "size": 102215, + "download_url": "https://files.pythonhosted.org/packages/ec/85/e7297e34133ae1cfde3bffd30c24e1ef055248251baa877834e048687a28/click-8.2.2-py3-none-any.whl", + "size": 103900, "sha1": null, - "md5": "aeead16d8bed93caa7107ac87b1e5ec8", - "sha256": "61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", + "md5": "7d180e1baded1a50d5ad31b43a965888", + "sha256": "52e1e9f5d3db8c85aa76968c7c67ed41ddbacb167f43201511c8fd61eb5ba2ca", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/pallets/click/", @@ -564,25 +564,25 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/click/8.2.1/json", + "api_data_url": "https://pypi.org/pypi/click/8.2.2/json", "datasource_id": null, - "purl": "pkg:pypi/click@8.2.1" + "purl": "pkg:pypi/click@8.2.2" }, { "type": "pypi", "namespace": null, "name": "cryptography", - "version": "45.0.5", + "version": "46.0.1", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "cryptography is a package which provides cryptographic recipes and primitives to Python developers.\npyca/cryptography\n=================\n\n.. image:: https://img.shields.io/pypi/v/cryptography.svg\n :target: https://pypi.org/project/cryptography/\n :alt: Latest Version\n\n.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest\n :target: https://cryptography.io\n :alt: Latest Docs\n\n.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=main\n :target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amain\n\n\n``cryptography`` is a package which provides cryptographic recipes and\nprimitives to Python developers. Our goal is for it to be your \"cryptographic\nstandard library\". It supports Python 3.7+ and PyPy3 7.3.11+.\n\n``cryptography`` includes both high level recipes and low level interfaces to\ncommon cryptographic algorithms such as symmetric ciphers, message digests, and\nkey derivation functions. For example, to encrypt something with\n``cryptography``'s high level symmetric encryption recipe:\n\n.. code-block:: pycon\n\n >>> from cryptography.fernet import Fernet\n >>> # Put this somewhere safe!\n >>> key = Fernet.generate_key()\n >>> f = Fernet(key)\n >>> token = f.encrypt(b\"A really secret message. Not for prying eyes.\")\n >>> token\n b'...'\n >>> f.decrypt(token)\n b'A really secret message. Not for prying eyes.'\n\nYou can find more information in the `documentation`_.\n\nYou can install ``cryptography`` with:\n\n.. code-block:: console\n\n $ pip install cryptography\n\nFor full details see `the installation documentation`_.\n\nDiscussion\n~~~~~~~~~~\n\nIf you run into bugs, you can file them in our `issue tracker`_.\n\nWe maintain a `cryptography-dev`_ mailing list for development discussion.\n\nYou can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get\ninvolved.\n\nSecurity\n~~~~~~~~\n\nNeed to report a security issue? Please consult our `security reporting`_\ndocumentation.\n\n\n.. _`documentation`: https://cryptography.io/\n.. _`the installation documentation`: https://cryptography.io/en/latest/installation/\n.. _`issue tracker`: https://github.com/pyca/cryptography/issues\n.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev\n.. _`security reporting`: https://cryptography.io/en/latest/security/", - "release_date": "2025-07-02T13:05:46", + "description": "cryptography is a package which provides cryptographic recipes and primitives to Python developers.\npyca/cryptography\n=================\n\n.. image:: https://img.shields.io/pypi/v/cryptography.svg\n :target: https://pypi.org/project/cryptography/\n :alt: Latest Version\n\n.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest\n :target: https://cryptography.io\n :alt: Latest Docs\n\n.. image:: https://github.com/pyca/cryptography/actions/workflows/ci.yml/badge.svg\n :target: https://github.com/pyca/cryptography/actions/workflows/ci.yml?query=branch%3Amain\n\n``cryptography`` is a package which provides cryptographic recipes and\nprimitives to Python developers. Our goal is for it to be your \"cryptographic\nstandard library\". It supports Python 3.8+ and PyPy3 7.3.11+.\n\n``cryptography`` includes both high level recipes and low level interfaces to\ncommon cryptographic algorithms such as symmetric ciphers, message digests, and\nkey derivation functions. For example, to encrypt something with\n``cryptography``'s high level symmetric encryption recipe:\n\n.. code-block:: pycon\n\n >>> from cryptography.fernet import Fernet\n >>> # Put this somewhere safe!\n >>> key = Fernet.generate_key()\n >>> f = Fernet(key)\n >>> token = f.encrypt(b\"A really secret message. Not for prying eyes.\")\n >>> token\n b'...'\n >>> f.decrypt(token)\n b'A really secret message. Not for prying eyes.'\n\nYou can find more information in the `documentation`_.\n\nYou can install ``cryptography`` with:\n\n.. code-block:: console\n\n $ pip install cryptography\n\nFor full details see `the installation documentation`_.\n\nDiscussion\n~~~~~~~~~~\n\nIf you run into bugs, you can file them in our `issue tracker`_.\n\nWe maintain a `cryptography-dev`_ mailing list for development discussion.\n\nYou can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get\ninvolved.\n\nSecurity\n~~~~~~~~\n\nNeed to report a security issue? Please consult our `security reporting`_\ndocumentation.\n\n\n.. _`documentation`: https://cryptography.io/\n.. _`the installation documentation`: https://cryptography.io/en/latest/installation/\n.. _`issue tracker`: https://github.com/pyca/cryptography/issues\n.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev\n.. _`security reporting`: https://cryptography.io/en/latest/security/", + "release_date": "2025-09-17T00:08:49", "parties": [ { "type": "person", "role": "author", - "name": "The cryptography developers ", + "name": null, "email": "The Python Cryptographic Authority and individual contributors ", "url": null } @@ -603,28 +603,27 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: Free Threading :: 3 - Stable", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Security :: Cryptography" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/f2/2e/e2dbd629481b499b14516eed933f3276eb3239f7cee2dcfa4ee6b44d4711/cryptography-45.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", - "size": 4554189, + "download_url": "https://files.pythonhosted.org/packages/e5/d3/de61ad5b52433b389afca0bc70f02a7a1f074651221f599ce368da0fe437/cryptography-46.0.1-cp311-abi3-manylinux_2_28_x86_64.whl", + "size": 4604234, "sha1": null, - "md5": "e60dd7bf09e038a4508efcef2fc28cd5", - "sha256": "7259038202a47fdecee7e62e0fd0b0738b6daa335354396c6ddebdbe1206af2a", + "md5": "b3c22ab264b0f5a2ffc43bd9978e19e5", + "sha256": "f7a24ea78de345cfa7f6a8d3bde8b242c7fac27f2bd78fa23474ca38dfaeeab9", "sha512": null, "bug_tracking_url": null, "code_view_url": null, "vcs_url": null, "copyright": null, - "license_expression": null, - "declared_license": { - "license": "Apache-2.0 OR BSD-3-Clause" - }, + "license_expression": "Apache-2.0 OR BSD-3-Clause", + "declared_license": {}, "notice_text": null, "source_packages": [], "file_references": [], @@ -632,9 +631,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/cryptography/45.0.5/json", + "api_data_url": "https://pypi.org/pypi/cryptography/46.0.1/json", "datasource_id": null, - "purl": "pkg:pypi/cryptography@45.0.5" + "purl": "pkg:pypi/cryptography@46.0.1" }, { "type": "pypi", @@ -908,12 +907,12 @@ "type": "pypi", "namespace": null, "name": "pycparser", - "version": "2.22", + "version": "2.23", "qualifiers": {}, "subpath": null, "primary_language": "Python", "description": "C parser in Python\npycparser is a complete parser of the C language, written in\npure Python using the PLY parsing library.\nIt parses C code into an AST and can serve as a front-end for\nC compilers or analysis tools.", - "release_date": "2024-03-30T13:22:20", + "release_date": "2025-09-09T13:23:46", "parties": [ { "type": "person", @@ -936,15 +935,16 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9" ], "homepage_url": "https://github.com/eliben/pycparser", - "download_url": "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", - "size": 117552, + "download_url": "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", + "size": 118140, "sha1": null, - "md5": "e9bf4a92f270e6482393bd716406ff85", - "sha256": "c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", + "md5": "961daf0e0910747590f8a0101322bcd3", + "sha256": "e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", "sha512": null, "bug_tracking_url": null, "code_view_url": null, @@ -964,9 +964,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/pycparser/2.22/json", + "api_data_url": "https://pypi.org/pypi/pycparser/2.23/json", "datasource_id": null, - "purl": "pkg:pypi/pycparser@2.22" + "purl": "pkg:pypi/pycparser@2.23" }, { "type": "pypi", @@ -1038,12 +1038,12 @@ "type": "pypi", "namespace": null, "name": "requests", - "version": "2.32.4", + "version": "2.32.5", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Python HTTP for Humans.\n# Requests\n\n**Requests** is a simple, yet elegant, HTTP library.\n\n```python\n>>> import requests\n>>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass'))\n>>> r.status_code\n200\n>>> r.headers['content-type']\n'application/json; charset=utf8'\n>>> r.encoding\n'utf-8'\n>>> r.text\n'{\"authenticated\": true, ...'\n>>> r.json()\n{'authenticated': True, ...}\n```\n\nRequests allows you to send HTTP/1.1 requests extremely easily. There\u2019s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data \u2014 but nowadays, just use the `json` method!\n\nRequests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`\u2014 according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code.\n\n[![Downloads](https://static.pepy.tech/badge/requests/month)](https://pepy.tech/project/requests)\n[![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests)\n[![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors)\n\n## Installing Requests and Supported Versions\n\nRequests is available on PyPI:\n\n```console\n$ python -m pip install requests\n```\n\nRequests officially supports Python 3.8+.\n\n## Supported Features & Best\u2013Practices\n\nRequests is ready for the demands of building robust and reliable HTTP\u2013speaking applications, for the needs of today.\n\n- Keep-Alive & Connection Pooling\n- International Domains and URLs\n- Sessions with Cookie Persistence\n- Browser-style TLS/SSL Verification\n- Basic & Digest Authentication\n- Familiar `dict`\u2013like Cookies\n- Automatic Content Decompression and Decoding\n- Multi-part File Uploads\n- SOCKS Proxy Support\n- Connection Timeouts\n- Streaming Downloads\n- Automatic honoring of `.netrc`\n- Chunked HTTP Requests\n\n## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)\n\n[![Read the Docs](https://raw.githubusercontent.com/psf/requests/main/ext/ss.png)](https://requests.readthedocs.io)\n\n## Cloning the repository\n\nWhen cloning the Requests repository, you may need to add the `-c\nfetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit (see\n[this issue](https://github.com/psf/requests/issues/2690) for more background):\n\n```shell\ngit clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git\n```\n\nYou can also apply this setting to your global Git config:\n\n```shell\ngit config --global fetch.fsck.badTimezone ignore\n```\n\n---\n\n[![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/main/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/main/ext/psf.png)](https://www.python.org/psf)", - "release_date": "2025-06-09T16:43:05", + "description": "Python HTTP for Humans.\n# Requests\n\n**Requests** is a simple, yet elegant, HTTP library.\n\n```python\n>>> import requests\n>>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass'))\n>>> r.status_code\n200\n>>> r.headers['content-type']\n'application/json; charset=utf8'\n>>> r.encoding\n'utf-8'\n>>> r.text\n'{\"authenticated\": true, ...'\n>>> r.json()\n{'authenticated': True, ...}\n```\n\nRequests allows you to send HTTP/1.1 requests extremely easily. There\u2019s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data \u2014 but nowadays, just use the `json` method!\n\nRequests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`\u2014 according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code.\n\n[![Downloads](https://static.pepy.tech/badge/requests/month)](https://pepy.tech/project/requests)\n[![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests)\n[![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors)\n\n## Installing Requests and Supported Versions\n\nRequests is available on PyPI:\n\n```console\n$ python -m pip install requests\n```\n\nRequests officially supports Python 3.9+.\n\n## Supported Features & Best\u2013Practices\n\nRequests is ready for the demands of building robust and reliable HTTP\u2013speaking applications, for the needs of today.\n\n- Keep-Alive & Connection Pooling\n- International Domains and URLs\n- Sessions with Cookie Persistence\n- Browser-style TLS/SSL Verification\n- Basic & Digest Authentication\n- Familiar `dict`\u2013like Cookies\n- Automatic Content Decompression and Decoding\n- Multi-part File Uploads\n- SOCKS Proxy Support\n- Connection Timeouts\n- Streaming Downloads\n- Automatic honoring of `.netrc`\n- Chunked HTTP Requests\n\n## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)\n\n[![Read the Docs](https://raw.githubusercontent.com/psf/requests/main/ext/ss.png)](https://requests.readthedocs.io)\n\n## Cloning the repository\n\nWhen cloning the Requests repository, you may need to add the `-c\nfetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit timestamp (see\n[this issue](https://github.com/psf/requests/issues/2690) for more background):\n\n```shell\ngit clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git\n```\n\nYou can also apply this setting to your global Git config:\n\n```shell\ngit config --global fetch.fsck.badTimezone ignore\n```\n\n---\n\n[![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/main/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/main/ext/psf.png)](https://www.python.org/psf)", + "release_date": "2025-08-18T20:46:00", "parties": [ { "type": "person", @@ -1066,7 +1066,7 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", @@ -1074,11 +1074,11 @@ "Topic :: Software Development :: Libraries" ], "homepage_url": "https://requests.readthedocs.io", - "download_url": "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", - "size": 64847, + "download_url": "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", + "size": 64738, "sha1": null, - "md5": "fa8fa331f951fbc5e62f3d3e683a77a4", - "sha256": "27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", + "md5": "bd126794a95616a0da6192b288f9bb88", + "sha256": "2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/psf/requests", @@ -1098,9 +1098,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/requests/2.32.4/json", + "api_data_url": "https://pypi.org/pypi/requests/2.32.5/json", "datasource_id": null, - "purl": "pkg:pypi/requests@2.32.4" + "purl": "pkg:pypi/requests@2.32.5" }, { "type": "pypi", @@ -1162,12 +1162,12 @@ "type": "pypi", "namespace": null, "name": "typing-extensions", - "version": "4.14.1", + "version": "4.15.0", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Backported and Experimental Type Hints for Python 3.9+\n# Typing Extensions\n\n[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing)\n\n[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) \u2013\n[PyPI](https://pypi.org/project/typing-extensions/)\n\n## Overview\n\nThe `typing_extensions` module serves two related purposes:\n\n- Enable use of new type system features on older Python versions. For example,\n `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows\n users on previous Python versions to use it too.\n- Enable experimentation with new type system PEPs before they are accepted and\n added to the `typing` module.\n\n`typing_extensions` is treated specially by static type checkers such as\nmypy and pyright. Objects defined in `typing_extensions` are treated the same\nway as equivalent forms in `typing`.\n\n`typing_extensions` uses\n[Semantic Versioning](https://semver.org/). The\nmajor version will be incremented only for backwards-incompatible changes.\nTherefore, it's safe to depend\non `typing_extensions` like this: `typing_extensions >=x.y, <(x+1)`,\nwhere `x.y` is the first version that includes all features you need.\n\n## Included items\n\nSee [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a\ncomplete listing of module contents.\n\n## Contributing\n\nSee [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md)\nfor how to contribute to `typing_extensions`.", - "release_date": "2025-07-04T13:28:32", + "description": "Backported and Experimental Type Hints for Python 3.9+\n# Typing Extensions\n\n[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing)\n\n[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) \u2013\n[PyPI](https://pypi.org/project/typing-extensions/)\n\n## Overview\n\nThe `typing_extensions` module serves two related purposes:\n\n- Enable use of new type system features on older Python versions. For example,\n `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows\n users on previous Python versions to use it too.\n- Enable experimentation with new type system PEPs before they are accepted and\n added to the `typing` module.\n\n`typing_extensions` is treated specially by static type checkers such as\nmypy and pyright. Objects defined in `typing_extensions` are treated the same\nway as equivalent forms in `typing`.\n\n`typing_extensions` uses\n[Semantic Versioning](https://semver.org/). The\nmajor version will be incremented only for backwards-incompatible changes.\nTherefore, it's safe to depend\non `typing_extensions` like this: `typing_extensions ~=x.y`,\nwhere `x.y` is the first version that includes all features you need.\n[This](https://packaging.python.org/en/latest/specifications/version-specifiers/#compatible-release)\nis equivalent to `typing_extensions >=x.y, <(x+1)`. Do not depend on `~= x.y.z`\nunless you really know what you're doing; that defeats the purpose of\nsemantic versioning.\n\n## Included items\n\nSee [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a\ncomplete listing of module contents.\n\n## Contributing\n\nSee [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md)\nfor how to contribute to `typing_extensions`.", + "release_date": "2025-08-25T13:49:24", "parties": [ { "type": "person", @@ -1205,11 +1205,11 @@ "Topic :: Software Development" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", - "size": 43906, + "download_url": "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", + "size": 44614, "sha1": null, - "md5": "86905389dfed18c11e510c9e23147fcb", - "sha256": "d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", + "md5": "1394f56d85d87540f7907680572797e1", + "sha256": "f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", "sha512": null, "bug_tracking_url": "https://github.com/python/typing_extensions/issues", "code_view_url": null, @@ -1224,9 +1224,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/typing-extensions/4.14.1/json", + "api_data_url": "https://pypi.org/pypi/typing-extensions/4.15.0/json", "datasource_id": null, - "purl": "pkg:pypi/typing-extensions@4.14.1" + "purl": "pkg:pypi/typing-extensions@4.15.0" }, { "type": "pypi", @@ -1306,11 +1306,11 @@ ], "resolved_dependencies_graph": [ { - "package": "pkg:pypi/azure-core@1.35.0", + "package": "pkg:pypi/azure-core@1.35.1", "dependencies": [ - "pkg:pypi/requests@2.32.4", + "pkg:pypi/requests@2.32.5", "pkg:pypi/six@1.17.0", - "pkg:pypi/typing-extensions@4.14.1" + "pkg:pypi/typing-extensions@4.15.0" ] }, { @@ -1320,36 +1320,37 @@ ] }, { - "package": "pkg:pypi/azure-storage-blob@12.25.1", + "package": "pkg:pypi/azure-storage-blob@12.26.0", "dependencies": [ - "pkg:pypi/azure-core@1.35.0", - "pkg:pypi/cryptography@45.0.5", + "pkg:pypi/azure-core@1.35.1", + "pkg:pypi/cryptography@46.0.1", "pkg:pypi/isodate@0.7.2", - "pkg:pypi/typing-extensions@4.14.1" + "pkg:pypi/typing-extensions@4.15.0" ] }, { - "package": "pkg:pypi/certifi@2025.7.14", + "package": "pkg:pypi/certifi@2025.8.3", "dependencies": [] }, { - "package": "pkg:pypi/cffi@1.17.1", + "package": "pkg:pypi/cffi@2.0.0", "dependencies": [ - "pkg:pypi/pycparser@2.22" + "pkg:pypi/pycparser@2.23" ] }, { - "package": "pkg:pypi/charset-normalizer@3.4.2", + "package": "pkg:pypi/charset-normalizer@3.4.3", "dependencies": [] }, { - "package": "pkg:pypi/click@8.2.1", + "package": "pkg:pypi/click@8.2.2", "dependencies": [] }, { - "package": "pkg:pypi/cryptography@45.0.5", + "package": "pkg:pypi/cryptography@46.0.1", "dependencies": [ - "pkg:pypi/cffi@1.17.1" + "pkg:pypi/cffi@2.0.0", + "pkg:pypi/typing-extensions@4.15.0" ] }, { @@ -1363,11 +1364,11 @@ { "package": "pkg:pypi/msrest@0.7.1", "dependencies": [ - "pkg:pypi/azure-core@1.35.0", - "pkg:pypi/certifi@2025.7.14", + "pkg:pypi/azure-core@1.35.1", + "pkg:pypi/certifi@2025.8.3", "pkg:pypi/isodate@0.7.2", "pkg:pypi/requests-oauthlib@2.0.0", - "pkg:pypi/requests@2.32.4" + "pkg:pypi/requests@2.32.5" ] }, { @@ -1375,21 +1376,21 @@ "dependencies": [] }, { - "package": "pkg:pypi/pycparser@2.22", + "package": "pkg:pypi/pycparser@2.23", "dependencies": [] }, { "package": "pkg:pypi/requests-oauthlib@2.0.0", "dependencies": [ "pkg:pypi/oauthlib@3.3.1", - "pkg:pypi/requests@2.32.4" + "pkg:pypi/requests@2.32.5" ] }, { - "package": "pkg:pypi/requests@2.32.4", + "package": "pkg:pypi/requests@2.32.5", "dependencies": [ - "pkg:pypi/certifi@2025.7.14", - "pkg:pypi/charset-normalizer@3.4.2", + "pkg:pypi/certifi@2025.8.3", + "pkg:pypi/charset-normalizer@3.4.3", "pkg:pypi/idna@3.10", "pkg:pypi/urllib3@2.5.0" ] @@ -1399,7 +1400,7 @@ "dependencies": [] }, { - "package": "pkg:pypi/typing-extensions@4.14.1", + "package": "pkg:pypi/typing-extensions@4.15.0", "dependencies": [] }, { diff --git a/tests/data/azure-devops.req-38-expected.json b/tests/data/azure-devops.req-38-expected.json index ae717c49..67443fa0 100644 --- a/tests/data/azure-devops.req-38-expected.json +++ b/tests/data/azure-devops.req-38-expected.json @@ -256,12 +256,12 @@ "type": "pypi", "namespace": null, "name": "azure-storage-blob", - "version": "12.25.1", + "version": "12.26.0", "qualifiers": {}, "subpath": null, "primary_language": "Python", "description": "Microsoft Azure Blob Storage Client Library for Python\n# Azure Storage Blobs client library for Python\nAzure Blob storage is Microsoft's object storage solution for the cloud. Blob storage is optimized for storing massive amounts of unstructured data, such as text or binary data.\n\nBlob storage is ideal for:\n\n* Serving images or documents directly to a browser\n* Storing files for distributed access\n* Streaming video and audio\n* Storing data for backup and restore, disaster recovery, and archiving\n* Storing data for analysis by an on-premises or Azure-hosted service\n\n[Source code](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/azure/storage/blob)\n| [Package (PyPI)](https://pypi.org/project/azure-storage-blob/)\n| [Package (Conda)](https://anaconda.org/microsoft/azure-storage/)\n| [API reference documentation](https://aka.ms/azsdk-python-storage-blob-ref)\n| [Product documentation](https://learn.microsoft.com/azure/storage/)\n| [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples)\n\n\n## Getting started\n\n### Prerequisites\n* Python 3.8 or later is required to use this package. For more details, please read our page on [Azure SDK for Python version support policy](https://github.com/Azure/azure-sdk-for-python/wiki/Azure-SDKs-Python-version-support-policy).\n* You must have an [Azure subscription](https://azure.microsoft.com/free/) and an\n[Azure storage account](https://learn.microsoft.com/azure/storage/common/storage-account-overview) to use this package.\n\n### Install the package\nInstall the Azure Storage Blobs client library for Python with [pip](https://pypi.org/project/pip/):\n\n```bash\npip install azure-storage-blob\n```\n\n### Create a storage account\nIf you wish to create a new storage account, you can use the\n[Azure Portal](https://learn.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-portal),\n[Azure PowerShell](https://learn.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-powershell),\nor [Azure CLI](https://learn.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-cli):\n\n```bash\n# Create a new resource group to hold the storage account -\n# if using an existing resource group, skip this step\naz group create --name my-resource-group --location westus2\n\n# Create the storage account\naz storage account create -n my-storage-account-name -g my-resource-group\n```\n\n### Create the client\nThe Azure Storage Blobs client library for Python allows you to interact with three types of resources: the storage\naccount itself, blob storage containers, and blobs. Interaction with these resources starts with an instance of a\n[client](#clients). To create a client object, you will need the storage account's blob service account URL and a\ncredential that allows you to access the storage account:\n\n```python\nfrom azure.storage.blob import BlobServiceClient\n\nservice = BlobServiceClient(account_url=\"https://.blob.core.windows.net/\", credential=credential)\n```\n\n#### Looking up the account URL\nYou can find the storage account's blob service URL using the\n[Azure Portal](https://learn.microsoft.com/azure/storage/common/storage-account-overview#storage-account-endpoints),\n[Azure PowerShell](https://learn.microsoft.com/powershell/module/az.storage/get-azstorageaccount),\nor [Azure CLI](https://learn.microsoft.com/cli/azure/storage/account?view=azure-cli-latest#az-storage-account-show):\n\n```bash\n# Get the blob service account url for the storage account\naz storage account show -n my-storage-account-name -g my-resource-group --query \"primaryEndpoints.blob\"\n```\n\n#### Types of credentials\nThe `credential` parameter may be provided in a number of different forms, depending on the type of\n[authorization](https://learn.microsoft.com/azure/storage/common/storage-auth) you wish to use:\n1. To use an [Azure Active Directory (AAD) token credential](https://learn.microsoft.com/azure/storage/common/storage-auth-aad),\n provide an instance of the desired credential type obtained from the\n [azure-identity](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#credentials) library.\n For example, [DefaultAzureCredential](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#defaultazurecredential)\n can be used to authenticate the client.\n\n This requires some initial setup:\n * [Install azure-identity](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#install-the-package)\n * [Register a new AAD application](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) and give permissions to access Azure Storage\n * [Grant access](https://learn.microsoft.com/azure/storage/common/storage-auth-aad-rbac-portal) to Azure Blob data with RBAC in the Azure Portal\n * Set the values of the client ID, tenant ID, and client secret of the AAD application as environment variables:\n AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET\n\n Use the returned token credential to authenticate the client:\n ```python\n from azure.identity import DefaultAzureCredential\n from azure.storage.blob import BlobServiceClient\n token_credential = DefaultAzureCredential()\n\n blob_service_client = BlobServiceClient(\n account_url=\"https://.blob.core.windows.net\",\n credential=token_credential\n )\n ```\n\n2. To use a [shared access signature (SAS) token](https://learn.microsoft.com/azure/storage/common/storage-sas-overview),\n provide the token as a string. If your account URL includes the SAS token, omit the credential parameter.\n You can generate a SAS token from the Azure Portal under \"Shared access signature\" or use one of the `generate_sas()`\n functions to create a sas token for the storage account, container, or blob:\n\n ```python\n from datetime import datetime, timedelta\n from azure.storage.blob import BlobServiceClient, generate_account_sas, ResourceTypes, AccountSasPermissions\n\n sas_token = generate_account_sas(\n account_name=\"\",\n account_key=\"\",\n resource_types=ResourceTypes(service=True),\n permission=AccountSasPermissions(read=True),\n expiry=datetime.utcnow() + timedelta(hours=1)\n )\n\n blob_service_client = BlobServiceClient(account_url=\"https://.blob.core.windows.net\", credential=sas_token)\n ```\n\n3. To use a storage account [shared key](https://learn.microsoft.com/rest/api/storageservices/authenticate-with-shared-key/)\n (aka account key or access key), provide the key as a string. This can be found in the Azure Portal under the \"Access Keys\"\n section or by running the following Azure CLI command:\n\n ```az storage account keys list -g MyResourceGroup -n MyStorageAccount```\n\n Use the key as the credential parameter to authenticate the client:\n ```python\n from azure.storage.blob import BlobServiceClient\n service = BlobServiceClient(account_url=\"https://.blob.core.windows.net\", credential=\"\")\n ```\n \n If you are using **customized url** (which means the url is not in this format `.blob.core.windows.net`),\n please instantiate the client using the credential below:\n ```python\n from azure.storage.blob import BlobServiceClient\n service = BlobServiceClient(account_url=\"https://.blob.core.windows.net\", \n credential={\"account_name\": \"\", \"account_key\":\"\"})\n ```\n\n4. To use [anonymous public read access](https://learn.microsoft.com/azure/storage/blobs/storage-manage-access-to-resources),\n simply omit the credential parameter.\n\n#### Creating the client from a connection string\nDepending on your use case and authorization method, you may prefer to initialize a client instance with a storage\nconnection string instead of providing the account URL and credential separately. To do this, pass the storage\nconnection string to the client's `from_connection_string` class method:\n\n```python\nfrom azure.storage.blob import BlobServiceClient\n\nconnection_string = \"DefaultEndpointsProtocol=https;AccountName=xxxx;AccountKey=xxxx;EndpointSuffix=core.windows.net\"\nservice = BlobServiceClient.from_connection_string(conn_str=connection_string)\n```\n\nThe connection string to your storage account can be found in the Azure Portal under the \"Access Keys\" section or by running the following CLI command:\n\n```bash\naz storage account show-connection-string -g MyResourceGroup -n MyStorageAccount\n```\n\n## Key concepts\nThe following components make up the Azure Blob Service:\n* The storage account itself\n* A container within the storage account\n* A blob within a container\n\nThe Azure Storage Blobs client library for Python allows you to interact with each of these components through the\nuse of a dedicated client object.\n\n### Clients\nFour different clients are provided to interact with the various components of the Blob Service:\n1. [BlobServiceClient](https://aka.ms/azsdk-python-storage-blob-blobserviceclient) -\n this client represents interaction with the Azure storage account itself, and allows you to acquire preconfigured\n client instances to access the containers and blobs within. It provides operations to retrieve and configure the\n account properties as well as list, create, and delete containers within the account. To perform operations on a\n specific container or blob, retrieve a client using the `get_container_client` or `get_blob_client` methods.\n2. [ContainerClient](https://aka.ms/azsdk-python-storage-blob-containerclient) -\n this client represents interaction with a specific container (which need not exist yet), and allows you to acquire\n preconfigured client instances to access the blobs within. It provides operations to create, delete, or configure a\n container and includes operations to list, upload, and delete the blobs within it. To perform operations on a\n specific blob within the container, retrieve a client using the `get_blob_client` method.\n3. [BlobClient](https://aka.ms/azsdk-python-storage-blob-blobclient) -\n this client represents interaction with a specific blob (which need not exist yet). It provides operations to\n upload, download, delete, and create snapshots of a blob, as well as specific operations per blob type.\n4. [BlobLeaseClient](https://aka.ms/azsdk-python-storage-blob-blobleaseclient) -\n this client represents lease interactions with a `ContainerClient` or `BlobClient`. It provides operations to\n acquire, renew, release, change, and break a lease on a specified resource.\n\n### Async Clients \nThis library includes a complete async API supported on Python 3.5+. To use it, you must\nfirst install an async transport, such as [aiohttp](https://pypi.org/project/aiohttp/).\nSee\n[azure-core documentation](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/CLIENT_LIBRARY_DEVELOPER.md#transport)\nfor more information.\n\nAsync clients and credentials should be closed when they're no longer needed. These\nobjects are async context managers and define async `close` methods.\n\n### Blob Types\nOnce you've initialized a Client, you can choose from the different types of blobs:\n* [Block blobs](https://learn.microsoft.com/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs#about-block-blobs)\n store text and binary data, up to approximately 4.75 TiB. Block blobs are made up of blocks of data that can be\n managed individually\n* [Append blobs](https://learn.microsoft.com/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs#about-append-blobs)\n are made up of blocks like block blobs, but are optimized for append operations. Append blobs are ideal for scenarios\n such as logging data from virtual machines\n* [Page blobs](https://learn.microsoft.com/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs#about-page-blobs)\n store random access files up to 8 TiB in size. Page blobs store virtual hard drive (VHD) files and serve as disks for\n Azure virtual machines\n\n## Examples\nThe following sections provide several code snippets covering some of the most common Storage Blob tasks, including:\n\n* [Create a container](#create-a-container \"Create a container\")\n* [Uploading a blob](#uploading-a-blob \"Uploading a blob\")\n* [Downloading a blob](#downloading-a-blob \"Downloading a blob\")\n* [Enumerating blobs](#enumerating-blobs \"Enumerating blobs\")\n\nNote that a container must be created before to upload or download a blob.\n\n### Create a container\n\nCreate a container from where you can upload or download blobs.\n```python\nfrom azure.storage.blob import ContainerClient\n\ncontainer_client = ContainerClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\")\n\ncontainer_client.create_container()\n```\n\nUse the async client to create a container\n\n```python\nfrom azure.storage.blob.aio import ContainerClient\n\ncontainer_client = ContainerClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\")\n\nawait container_client.create_container()\n```\n\n### Uploading a blob\nUpload a blob to your container\n\n```python\nfrom azure.storage.blob import BlobClient\n\nblob = BlobClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\", blob_name=\"my_blob\")\n\nwith open(\"./SampleSource.txt\", \"rb\") as data:\n blob.upload_blob(data)\n```\n\nUse the async client to upload a blob\n\n```python\nfrom azure.storage.blob.aio import BlobClient\n\nblob = BlobClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\", blob_name=\"my_blob\")\n\nwith open(\"./SampleSource.txt\", \"rb\") as data:\n await blob.upload_blob(data)\n```\n\n### Downloading a blob\nDownload a blob from your container\n\n```python\nfrom azure.storage.blob import BlobClient\n\nblob = BlobClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\", blob_name=\"my_blob\")\n\nwith open(\"./BlockDestination.txt\", \"wb\") as my_blob:\n blob_data = blob.download_blob()\n blob_data.readinto(my_blob)\n```\n\nDownload a blob asynchronously\n\n```python\nfrom azure.storage.blob.aio import BlobClient\n\nblob = BlobClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\", blob_name=\"my_blob\")\n\nwith open(\"./BlockDestination.txt\", \"wb\") as my_blob:\n stream = await blob.download_blob()\n data = await stream.readall()\n my_blob.write(data)\n```\n\n### Enumerating blobs\nList the blobs in your container\n\n```python\nfrom azure.storage.blob import ContainerClient\n\ncontainer = ContainerClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\")\n\nblob_list = container.list_blobs()\nfor blob in blob_list:\n print(blob.name + '\\n')\n```\n\nList the blobs asynchronously\n\n```python\nfrom azure.storage.blob.aio import ContainerClient\n\ncontainer = ContainerClient.from_connection_string(conn_str=\"\", container_name=\"mycontainer\")\n\nblob_list = []\nasync for blob in container.list_blobs():\n blob_list.append(blob)\nprint(blob_list)\n```\n\n## Optional Configuration\n\nOptional keyword arguments that can be passed in at the client and per-operation level.\n\n### Retry Policy configuration\n\nUse the following keyword arguments when instantiating a client to configure the retry policy:\n\n* __retry_total__ (int): Total number of retries to allow. Takes precedence over other counts.\nPass in `retry_total=0` if you do not want to retry on requests. Defaults to 10.\n* __retry_connect__ (int): How many connection-related errors to retry on. Defaults to 3.\n* __retry_read__ (int): How many times to retry on read errors. Defaults to 3.\n* __retry_status__ (int): How many times to retry on bad status codes. Defaults to 3.\n* __retry_to_secondary__ (bool): Whether the request should be retried to secondary, if able.\nThis should only be enabled of RA-GRS accounts are used and potentially stale data can be handled.\nDefaults to `False`.\n\n### Encryption configuration\n\nUse the following keyword arguments when instantiating a client to configure encryption:\n\n* __require_encryption__ (bool): If set to True, will enforce that objects are encrypted and decrypt them.\n* __encryption_version__ (str): Specifies the version of encryption to use. Current options are `'2.0'` or `'1.0'` and\nthe default value is `'1.0'`. Version 1.0 is deprecated, and it is **highly recommended** to use version 2.0.\n* __key_encryption_key__ (object): The user-provided key-encryption-key. The instance must implement the following methods:\n - `wrap_key(key)`--wraps the specified key using an algorithm of the user's choice.\n - `get_key_wrap_algorithm()`--returns the algorithm used to wrap the specified symmetric key.\n - `get_kid()`--returns a string key id for this key-encryption-key.\n* __key_resolver_function__ (callable): The user-provided key resolver. Uses the kid string to return a key-encryption-key\nimplementing the interface defined above.\n\n### Other client / per-operation configuration\n\nOther optional configuration keyword arguments that can be specified on the client or per-operation.\n\n**Client keyword arguments:**\n\n* __connection_timeout__ (int): The number of seconds the client will wait to establish a connection to the server.\nDefaults to 20 seconds.\n* __read_timeout__ (int): The number of seconds the client will wait, between consecutive read operations, for a\nresponse from the server. This is a socket level timeout and is not affected by overall data size. Client-side read \ntimeouts will be automatically retried. Defaults to 60 seconds.\n* __transport__ (Any): User-provided transport to send the HTTP request.\n\n**Per-operation keyword arguments:**\n\n* __raw_response_hook__ (callable): The given callback uses the response returned from the service.\n* __raw_request_hook__ (callable): The given callback uses the request before being sent to service.\n* __client_request_id__ (str): Optional user specified identification of the request.\n* __user_agent__ (str): Appends the custom value to the user-agent header to be sent with the request.\n* __logging_enable__ (bool): Enables logging at the DEBUG level. Defaults to False. Can also be passed in at\nthe client level to enable it for all requests.\n* __logging_body__ (bool): Enables logging the request and response body. Defaults to False. Can also be passed in at\nthe client level to enable it for all requests.\n* __headers__ (dict): Pass in custom headers as key, value pairs. E.g. `headers={'CustomValue': value}`\n\n## Troubleshooting\n### General\nStorage Blob clients raise exceptions defined in [Azure Core](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/README.md).\n\nThis list can be used for reference to catch thrown exceptions. To get the specific error code of the exception, use the `error_code` attribute, i.e, `exception.error_code`.\n\n### Logging\nThis library uses the standard\n[logging](https://docs.python.org/3/library/logging.html) library for logging.\nBasic information about HTTP sessions (URLs, headers, etc.) is logged at INFO\nlevel.\n\nDetailed DEBUG level logging, including request/response bodies and unredacted\nheaders, can be enabled on a client with the `logging_enable` argument:\n```python\nimport sys\nimport logging\nfrom azure.storage.blob import BlobServiceClient\n\n# Create a logger for the 'azure.storage.blob' SDK\nlogger = logging.getLogger('azure.storage.blob')\nlogger.setLevel(logging.DEBUG)\n\n# Configure a console output\nhandler = logging.StreamHandler(stream=sys.stdout)\nlogger.addHandler(handler)\n\n# This client will log detailed information about its HTTP sessions, at DEBUG level\nservice_client = BlobServiceClient.from_connection_string(\"your_connection_string\", logging_enable=True)\n```\n\nSimilarly, `logging_enable` can enable detailed logging for a single operation,\neven when it isn't enabled for the client:\n```python\nservice_client.get_service_stats(logging_enable=True)\n```\n\n## Next steps\n\n### More sample code\n\nGet started with our [Blob samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples).\n\nSeveral Storage Blobs Python SDK samples are available to you in the SDK's GitHub repository. These samples provide example code for additional scenarios commonly encountered while working with Storage Blobs:\n\n* [blob_samples_container_access_policy.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy_async.py)) - Examples to set Access policies:\n * Set up Access Policy for container\n\n* [blob_samples_hello_world.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world_async.py)) - Examples for common Storage Blob tasks:\n * Set up a container\n * Create a block, page, or append blob\n * Upload blobs\n * Download blobs\n * Delete blobs\n\n* [blob_samples_authentication.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_authentication.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_authentication_async.py)) - Examples for authenticating and creating the client:\n * From a connection string\n * From a shared access key\n * From a shared access signature token\n * From active directory\n\n* [blob_samples_service.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_service.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_service_async.py)) - Examples for interacting with the blob service:\n * Get account information\n * Get and set service properties\n * Get service statistics\n * Create, list, and delete containers\n * Get the Blob or Container client\n\n* [blob_samples_containers.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_containers.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_containers_async.py)) - Examples for interacting with containers:\n * Create a container and delete containers\n * Set metadata on containers\n * Get container properties\n * Acquire a lease on container\n * Set an access policy on a container\n * Upload, list, delete blobs in container\n * Get the blob client to interact with a specific blob\n\n* [blob_samples_common.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_common.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_common_async.py)) - Examples common to all types of blobs:\n * Create a snapshot\n * Delete a blob snapshot\n * Soft delete a blob\n * Undelete a blob\n * Acquire a lease on a blob\n * Copy a blob from a URL\n\n* [blob_samples_directory_interface.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_directory_interface.py) - Examples for interfacing with Blob storage as if it were a directory on a filesystem:\n * Copy (upload or download) a single file or directory\n * List files or directories at a single level or recursively\n * Delete a single file or recursively delete a directory\n\n### Additional documentation\nFor more extensive documentation on Azure Blob storage, see the [Azure Blob storage documentation](https://learn.microsoft.com/azure/storage/blobs/) on learn.microsoft.com.\n\n## Contributing\nThis project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit https://cla.microsoft.com.\n\nWhen you submit a pull request, a CLA-bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA.\n\nThis project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.", - "release_date": "2025-03-27T17:13:06", + "release_date": "2025-07-16T21:34:09", "parties": [ { "type": "person", @@ -285,11 +285,11 @@ "Programming Language :: Python :: 3.9" ], "homepage_url": "https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob", - "download_url": "https://files.pythonhosted.org/packages/57/33/085d9352d416e617993821b9d9488222fbb559bc15c3641d6cbd6d16d236/azure_storage_blob-12.25.1-py3-none-any.whl", - "size": 406990, + "download_url": "https://files.pythonhosted.org/packages/5b/64/63dbfdd83b31200ac58820a7951ddfdeed1fbee9285b0f3eae12d1357155/azure_storage_blob-12.26.0-py3-none-any.whl", + "size": 412907, "sha1": null, - "md5": "20b5072c0d73c87cc0bd020da5c5f2f4", - "sha256": "1f337aab12e918ec3f1b638baada97550673911c4ceed892acc8e4e891b74167", + "md5": "b7ee3d0eec2bce8bbf60fc238d4349b7", + "sha256": "8c5631b8b22b4f53ec5fff2f3bededf34cfef111e2af613ad42c9e6de00a77fe", "sha512": null, "bug_tracking_url": null, "code_view_url": null, @@ -309,20 +309,20 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/azure-storage-blob/12.25.1/json", + "api_data_url": "https://pypi.org/pypi/azure-storage-blob/12.26.0/json", "datasource_id": null, - "purl": "pkg:pypi/azure-storage-blob@12.25.1" + "purl": "pkg:pypi/azure-storage-blob@12.26.0" }, { "type": "pypi", "namespace": null, "name": "certifi", - "version": "2025.7.14", + "version": "2025.8.3", "qualifiers": {}, "subpath": null, "primary_language": "Python", "description": "Python package for providing Mozilla's CA Bundle.\nCertifi: Python SSL Certificates\n================================\n\nCertifi provides Mozilla's carefully curated collection of Root Certificates for\nvalidating the trustworthiness of SSL certificates while verifying the identity\nof TLS hosts. It has been extracted from the `Requests`_ project.\n\nInstallation\n------------\n\n``certifi`` is available on PyPI. Simply install it with ``pip``::\n\n $ pip install certifi\n\nUsage\n-----\n\nTo reference the installed certificate authority (CA) bundle, you can use the\nbuilt-in function::\n\n >>> import certifi\n\n >>> certifi.where()\n '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'\n\nOr from the command line::\n\n $ python -m certifi\n /usr/local/lib/python3.7/site-packages/certifi/cacert.pem\n\nEnjoy!\n\n.. _`Requests`: https://requests.readthedocs.io/en/master/\n\nAddition/Removal of Certificates\n--------------------------------\n\nCertifi does not support any addition/removal or other modification of the\nCA trust store content. This project is intended to provide a reliable and\nhighly portable root of trust to python deployments. Look to upstream projects\nfor methods to use alternate trust.", - "release_date": "2025-07-14T03:29:26", + "release_date": "2025-08-03T03:07:45", "parties": [ { "type": "person", @@ -348,11 +348,11 @@ "Programming Language :: Python :: 3.9" ], "homepage_url": "https://github.com/certifi/python-certifi", - "download_url": "https://files.pythonhosted.org/packages/4f/52/34c6cf5bb9285074dc3531c437b3919e825d976fde097a7a73f79e726d03/certifi-2025.7.14-py3-none-any.whl", - "size": 162722, + "download_url": "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", + "size": 161216, "sha1": null, - "md5": "8561c6b29236cd268f57ddb4f22281d3", - "sha256": "6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2", + "md5": "f9b6740cffcf397b47bc7fb7782b1354", + "sha256": "f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/certifi/python-certifi", @@ -372,9 +372,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/certifi/2025.7.14/json", + "api_data_url": "https://pypi.org/pypi/certifi/2025.8.3/json", "datasource_id": null, - "purl": "pkg:pypi/certifi@2025.7.14" + "purl": "pkg:pypi/certifi@2025.8.3" }, { "type": "pypi", @@ -440,12 +440,12 @@ "type": "pypi", "namespace": null, "name": "charset-normalizer", - "version": "3.4.2", + "version": "3.4.3", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.\n

Charset Detection, for Everyone \ud83d\udc4b

\n\n

\n The Real First Universal Charset Detector
\n \n \n \n \n \"Download\n \n \n \n \n

\n

\n Featured Packages
\n \n \"Static\n \n \n \"Static\n \n

\n

\n In other language (unofficial port - by the community)
\n \n \"Static\n \n

\n\n> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`,\n> I'm trying to resolve the issue by taking a new approach.\n> All IANA character set names for which the Python core library provides codecs are supported.\n\n

\n >>>>> \ud83d\udc49 Try Me Online Now, Then Adopt Me \ud83d\udc48 <<<<<\n

\n\nThis project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**.\n\n| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) |\n|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:|\n| `Fast` | \u274c | \u2705 | \u2705 |\n| `Universal**` | \u274c | \u2705 | \u274c |\n| `Reliable` **without** distinguishable standards | \u274c | \u2705 | \u2705 |\n| `Reliable` **with** distinguishable standards | \u2705 | \u2705 | \u2705 |\n| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ |\n| `Native Python` | \u2705 | \u2705 | \u274c |\n| `Detect spoken language` | \u274c | \u2705 | N/A |\n| `UnicodeDecodeError Safety` | \u274c | \u2705 | \u274c |\n| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB |\n| `Supported Encoding` | 33 | \ud83c\udf89 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 |\n\n

\n\"Reading\"Cat\n

\n\n*\\*\\* : They are clearly using specific code for a specific encoding even if covering most of used one*
\n\n## \u26a1 Performance\n\nThis package offer better performance than its counterpart Chardet. Here are some numbers.\n\n| Package | Accuracy | Mean per file (ms) | File per sec (est) |\n|-----------------------------------------------|:--------:|:------------------:|:------------------:|\n| [chardet](https://github.com/chardet/chardet) | 86 % | 63 ms | 16 file/sec |\n| charset-normalizer | **98 %** | **10 ms** | 100 file/sec |\n\n| Package | 99th percentile | 95th percentile | 50th percentile |\n|-----------------------------------------------|:---------------:|:---------------:|:---------------:|\n| [chardet](https://github.com/chardet/chardet) | 265 ms | 71 ms | 7 ms |\n| charset-normalizer | 100 ms | 50 ms | 5 ms |\n\n_updated as of december 2024 using CPython 3.12_\n\nChardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload.\n\n> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows.\n> And yes, these results might change at any time. The dataset can be updated to include more files.\n> The actual delays heavily depends on your CPU capabilities. The factors should remain the same.\n> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability\n> (e.g. Supported Encoding) Challenge-them if you want.\n\n## \u2728 Installation\n\nUsing pip:\n\n```sh\npip install charset-normalizer -U\n```\n\n## \ud83d\ude80 Basic Usage\n\n### CLI\nThis package comes with a CLI.\n\n```\nusage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD]\n file [file ...]\n\nThe Real First Universal Charset Detector. Discover originating encoding used\non text file. Normalize text to unicode.\n\npositional arguments:\n files File(s) to be analysed\n\noptional arguments:\n -h, --help show this help message and exit\n -v, --verbose Display complementary information about file if any.\n Stdout will contain logs about the detection process.\n -a, --with-alternative\n Output complementary possibilities if any. Top-level\n JSON WILL be a list.\n -n, --normalize Permit to normalize input file. If not set, program\n does not write anything.\n -m, --minimal Only output the charset detected to STDOUT. Disabling\n JSON output.\n -r, --replace Replace file when trying to normalize it instead of\n creating a new one.\n -f, --force Replace file without asking if you are sure, use this\n flag with caution.\n -t THRESHOLD, --threshold THRESHOLD\n Define a custom maximum amount of chaos allowed in\n decoded content. 0. <= chaos <= 1.\n --version Show version information and exit.\n```\n\n```bash\nnormalizer ./data/sample.1.fr.srt\n```\n\nor\n\n```bash\npython -m charset_normalizer ./data/sample.1.fr.srt\n```\n\n\ud83c\udf89 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format.\n\n```json\n{\n \"path\": \"/home/default/projects/charset_normalizer/data/sample.1.fr.srt\",\n \"encoding\": \"cp1252\",\n \"encoding_aliases\": [\n \"1252\",\n \"windows_1252\"\n ],\n \"alternative_encodings\": [\n \"cp1254\",\n \"cp1256\",\n \"cp1258\",\n \"iso8859_14\",\n \"iso8859_15\",\n \"iso8859_16\",\n \"iso8859_3\",\n \"iso8859_9\",\n \"latin_1\",\n \"mbcs\"\n ],\n \"language\": \"French\",\n \"alphabets\": [\n \"Basic Latin\",\n \"Latin-1 Supplement\"\n ],\n \"has_sig_or_bom\": false,\n \"chaos\": 0.149,\n \"coherence\": 97.152,\n \"unicode_path\": null,\n \"is_preferred\": true\n}\n```\n\n### Python\n*Just print out normalized text*\n```python\nfrom charset_normalizer import from_path\n\nresults = from_path('./my_subtitle.srt')\n\nprint(str(results.best()))\n```\n\n*Upgrade your code without effort*\n```python\nfrom charset_normalizer import detect\n```\n\nThe above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible.\n\nSee the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/)\n\n## \ud83d\ude07 Why\n\nWhen I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a\nreliable alternative using a completely different method. Also! I never back down on a good challenge!\n\nI **don't care** about the **originating charset** encoding, because **two different tables** can\nproduce **two identical rendered string.**\nWhat I want is to get readable text, the best I can.\n\nIn a way, **I'm brute forcing text decoding.** How cool is that ? \ud83d\ude0e\n\nDon't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair Unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode.\n\n## \ud83c\udf70 How\n\n - Discard all charset encoding table that could not fit the binary content.\n - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding.\n - Extract matches with the lowest mess detected.\n - Additionally, we measure coherence / probe for a language.\n\n**Wait a minute**, what is noise/mess and coherence according to **YOU ?**\n\n*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then\n**I established** some ground rules about **what is obvious** when **it seems like** a mess (aka. defining noise in rendered text).\n I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to\n improve or rewrite it.\n\n*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought\nthat intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design.\n\n## \u26a1 Known limitations\n\n - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters))\n - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content.\n\n## \u26a0\ufe0f About Python EOLs\n\n**If you are running:**\n\n- Python >=2.7,<3.5: Unsupported\n- Python 3.5: charset-normalizer < 2.1\n- Python 3.6: charset-normalizer < 3.1\n- Python 3.7: charset-normalizer < 4.0\n\nUpgrade your Python interpreter as soon as possible.\n\n## \ud83d\udc64 Contributing\n\nContributions, issues and feature requests are very much welcome.
\nFeel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute.\n\n## \ud83d\udcdd License\n\nCopyright \u00a9 [Ahmed TAHRI @Ousret](https://github.com/Ousret).
\nThis project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed.\n\nCharacters frequencies used in this project \u00a9 2012 [Denny Vrande\u010di\u0107](http://simia.net/letters/)\n\n## \ud83d\udcbc For Enterprise\n\nProfessional support for charset-normalizer is available as part of the [Tidelift\nSubscription][1]. Tidelift gives software development teams a single source for\npurchasing and maintaining their software, with professional grade assurances\nfrom the experts who know it best, while seamlessly integrating with existing\ntools.\n\n[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme\n\n[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/7297/badge)](https://www.bestpractices.dev/projects/7297)\n\n# Changelog\nAll notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).\n\n## [3.4.2](https://github.com/Ousret/charset_normalizer/compare/3.4.1...3.4.2) (2025-05-02)\n\n### Fixed\n- Addressed the DeprecationWarning in our CLI regarding `argparse.FileType` by backporting the target class into the package. (#591)\n- Improved the overall reliability of the detector with CJK Ideographs. (#605) (#587)\n\n### Changed\n- Optional mypyc compilation upgraded to version 1.15 for Python >= 3.8\n\n## [3.4.1](https://github.com/Ousret/charset_normalizer/compare/3.4.0...3.4.1) (2024-12-24)\n\n### Changed\n- Project metadata are now stored using `pyproject.toml` instead of `setup.cfg` using setuptools as the build backend.\n- Enforce annotation delayed loading for a simpler and consistent types in the project.\n- Optional mypyc compilation upgraded to version 1.14 for Python >= 3.8\n\n### Added\n- pre-commit configuration.\n- noxfile.\n\n### Removed\n- `build-requirements.txt` as per using `pyproject.toml` native build configuration.\n- `bin/integration.py` and `bin/serve.py` in favor of downstream integration test (see noxfile).\n- `setup.cfg` in favor of `pyproject.toml` metadata configuration.\n- Unused `utils.range_scan` function.\n\n### Fixed\n- Converting content to Unicode bytes may insert `utf_8` instead of preferred `utf-8`. (#572)\n- Deprecation warning \"'count' is passed as positional argument\" when converting to Unicode bytes on Python 3.13+\n\n## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08)\n\n### Added\n- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints.\n- Support for Python 3.13 (#512)\n\n### Fixed\n- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch.\n- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537)\n- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381)\n\n## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31)\n\n### Fixed\n- Unintentional memory usage regression when using large payload that match several encoding (#376)\n- Regression on some detection case showcased in the documentation (#371)\n\n### Added\n- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife)\n\n## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22)\n\n### Changed\n- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8\n- Improved the general detection reliability based on reports from the community\n\n## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30)\n\n### Added\n- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer`\n- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323)\n\n### Removed\n- (internal) Redundant utils.is_ascii function and unused function is_private_use_only\n- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant\n\n### Changed\n- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection\n- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8\n\n### Fixed\n- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \\_\\_lt\\_\\_ (#350)\n\n## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07)\n\n### Changed\n- Typehint for function `from_path` no longer enforce `PathLike` as its first argument\n- Minor improvement over the global detection reliability\n\n### Added\n- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries\n- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True)\n- Explicit support for Python 3.12\n\n### Fixed\n- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289)\n\n## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06)\n\n### Added\n- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262)\n\n### Removed\n- Support for Python 3.6 (PR #260)\n\n### Changed\n- Optional speedup provided by mypy/c 1.0.1\n\n## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18)\n\n### Fixed\n- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233)\n\n### Changed\n- Speedup provided by mypy/c 0.990 on Python >= 3.7\n\n## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20)\n\n### Added\n- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results\n- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES\n- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio\n- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)\n\n### Changed\n- Build with static metadata using 'build' frontend\n- Make the language detection stricter\n- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1\n\n### Fixed\n- CLI with opt --normalize fail when using full path for files\n- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it\n- Sphinx warnings when generating the documentation\n\n### Removed\n- Coherence detector no longer return 'Simple English' instead return 'English'\n- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'\n- Breaking: Method `first()` and `best()` from CharsetMatch\n- UTF-7 will no longer appear as \"detected\" without a recognized SIG/mark (is unreliable/conflict with ASCII)\n- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches\n- Breaking: Top-level function `normalize`\n- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch\n- Support for the backport `unicodedata2`\n\n## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18)\n\n### Added\n- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results\n- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES\n- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio\n\n### Changed\n- Build with static metadata using 'build' frontend\n- Make the language detection stricter\n\n### Fixed\n- CLI with opt --normalize fail when using full path for files\n- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it\n\n### Removed\n- Coherence detector no longer return 'Simple English' instead return 'English'\n- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'\n\n## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21)\n\n### Added\n- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)\n\n### Removed\n- Breaking: Method `first()` and `best()` from CharsetMatch\n- UTF-7 will no longer appear as \"detected\" without a recognized SIG/mark (is unreliable/conflict with ASCII)\n\n### Fixed\n- Sphinx warnings when generating the documentation\n\n## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15)\n\n### Changed\n- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1\n\n### Removed\n- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches\n- Breaking: Top-level function `normalize`\n- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch\n- Support for the backport `unicodedata2`\n\n## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19)\n\n### Deprecated\n- Function `normalize` scheduled for removal in 3.0\n\n### Changed\n- Removed useless call to decode in fn is_unprintable (#206)\n\n### Fixed\n- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204)\n\n## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19)\n\n### Added\n- Output the Unicode table version when running the CLI with `--version` (PR #194)\n\n### Changed\n- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175)\n- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183)\n\n### Fixed\n- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175)\n- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181)\n\n### Removed\n- Support for Python 3.5 (PR #192)\n\n### Deprecated\n- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194)\n\n## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12)\n\n### Fixed\n- ASCII miss-detection on rare cases (PR #170)\n\n## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30)\n\n### Added\n- Explicit support for Python 3.11 (PR #164)\n\n### Changed\n- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165)\n\n## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04)\n\n### Fixed\n- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154)\n\n### Changed\n- Skipping the language-detection (CD) on ASCII (PR #155)\n\n## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03)\n\n### Changed\n- Moderating the logging impact (since 2.0.8) for specific environments (PR #147)\n\n### Fixed\n- Wrong logging level applied when setting kwarg `explain` to True (PR #146)\n\n## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24)\n### Changed\n- Improvement over Vietnamese detection (PR #126)\n- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124)\n- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122)\n- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129)\n- Code style as refactored by Sourcery-AI (PR #131)\n- Minor adjustment on the MD around european words (PR #133)\n- Remove and replace SRTs from assets / tests (PR #139)\n- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135)\n- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135)\n\n### Fixed\n- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137)\n- Avoid using too insignificant chunk (PR #137)\n\n### Added\n- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135)\n- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141)\n\n## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11)\n### Added\n- Add support for Kazakh (Cyrillic) language detection (PR #109)\n\n### Changed\n- Further, improve inferring the language from a given single-byte code page (PR #112)\n- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116)\n- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113)\n- Various detection improvement (MD+CD) (PR #117)\n\n### Removed\n- Remove redundant logging entry about detected language(s) (PR #115)\n\n### Fixed\n- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102)\n\n## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18)\n### Fixed\n- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100)\n- Fix CLI crash when using --minimal output in certain cases (PR #103)\n\n### Changed\n- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101)\n\n## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14)\n### Changed\n- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81)\n- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82)\n- The Unicode detection is slightly improved (PR #93)\n- Add syntax sugar \\_\\_bool\\_\\_ for results CharsetMatches list-container (PR #91)\n\n### Removed\n- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92)\n\n### Fixed\n- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95)\n- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96)\n- The MANIFEST.in was not exhaustive (PR #78)\n\n## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30)\n### Fixed\n- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70)\n- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68)\n- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72)\n- Submatch factoring could be wrong in rare edge cases (PR #72)\n- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72)\n- Fix line endings from CRLF to LF for certain project files (PR #67)\n\n### Changed\n- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76)\n- Allow fallback on specified encoding if any (PR #71)\n\n## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16)\n### Changed\n- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63)\n- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64)\n\n## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15)\n### Fixed\n- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59)\n\n### Changed\n- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57)\n\n## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13)\n### Fixed\n- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55)\n- Using explain=False permanently disable the verbose output in the current runtime (PR #47)\n- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47)\n- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52)\n\n### Changed\n- Public function normalize default args values were not aligned with from_bytes (PR #53)\n\n### Added\n- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47)\n\n## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02)\n### Changed\n- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet.\n- Accent has been made on UTF-8 detection, should perform rather instantaneous.\n- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible.\n- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time)\n- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+\n- utf_7 detection has been reinstated.\n\n### Removed\n- This package no longer require anything when used with Python 3.5 (Dropped cached_property)\n- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volap\u00fck, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian.\n- The exception hook on UnicodeDecodeError has been removed.\n\n### Deprecated\n- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0\n\n### Fixed\n- The CLI output used the relative path of the file(s). Should be absolute.\n\n## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28)\n### Fixed\n- Logger configuration/usage no longer conflict with others (PR #44)\n\n## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21)\n### Removed\n- Using standard logging instead of using the package loguru.\n- Dropping nose test framework in favor of the maintained pytest.\n- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text.\n- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version.\n- Stop support for UTF-7 that does not contain a SIG.\n- Dropping PrettyTable, replaced with pure JSON output in CLI.\n\n### Fixed\n- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process.\n- Not searching properly for the BOM when trying utf32/16 parent codec.\n\n### Changed\n- Improving the package final size by compressing frequencies.json.\n- Huge improvement over the larges payload.\n\n### Added\n- CLI now produces JSON consumable output.\n- Return ASCII if given sequences fit. Given reasonable confidence.\n\n## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13)\n\n### Fixed\n- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40)\n\n## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12)\n\n### Fixed\n- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39)\n\n## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12)\n\n### Fixed\n- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38)\n\n## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09)\n\n### Changed\n- Amend the previous release to allow prettytable 2.0 (PR #35)\n\n## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08)\n\n### Fixed\n- Fix error while using the package with a python pre-release interpreter (PR #33)\n\n### Changed\n- Dependencies refactoring, constraints revised.\n\n### Added\n- Add python 3.9 and 3.10 to the supported interpreters\n\nMIT License\n\nCopyright (c) 2025 TAHRI Ahmed R.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.", - "release_date": "2025-05-02T08:34:06", + "description": "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.\n

Charset Detection, for Everyone \ud83d\udc4b

\n\n

\n The Real First Universal Charset Detector
\n \n \n \n \n \"Download\n \n \n \n \n

\n

\n Featured Packages
\n \n \"Static\n \n \n \"Static\n \n

\n

\n In other language (unofficial port - by the community)
\n \n \"Static\n \n

\n\n> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`,\n> I'm trying to resolve the issue by taking a new approach.\n> All IANA character set names for which the Python core library provides codecs are supported.\n\n

\n >>>>> \ud83d\udc49 Try Me Online Now, Then Adopt Me \ud83d\udc48 <<<<<\n

\n\nThis project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**.\n\n| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) |\n|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:|\n| `Fast` | \u274c | \u2705 | \u2705 |\n| `Universal**` | \u274c | \u2705 | \u274c |\n| `Reliable` **without** distinguishable standards | \u274c | \u2705 | \u2705 |\n| `Reliable` **with** distinguishable standards | \u2705 | \u2705 | \u2705 |\n| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ |\n| `Native Python` | \u2705 | \u2705 | \u274c |\n| `Detect spoken language` | \u274c | \u2705 | N/A |\n| `UnicodeDecodeError Safety` | \u274c | \u2705 | \u274c |\n| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB |\n| `Supported Encoding` | 33 | \ud83c\udf89 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 |\n\n

\n\"Reading\"Cat\n

\n\n*\\*\\* : They are clearly using specific code for a specific encoding even if covering most of used one*
\n\n## \u26a1 Performance\n\nThis package offer better performance than its counterpart Chardet. Here are some numbers.\n\n| Package | Accuracy | Mean per file (ms) | File per sec (est) |\n|-----------------------------------------------|:--------:|:------------------:|:------------------:|\n| [chardet](https://github.com/chardet/chardet) | 86 % | 63 ms | 16 file/sec |\n| charset-normalizer | **98 %** | **10 ms** | 100 file/sec |\n\n| Package | 99th percentile | 95th percentile | 50th percentile |\n|-----------------------------------------------|:---------------:|:---------------:|:---------------:|\n| [chardet](https://github.com/chardet/chardet) | 265 ms | 71 ms | 7 ms |\n| charset-normalizer | 100 ms | 50 ms | 5 ms |\n\n_updated as of december 2024 using CPython 3.12_\n\nChardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload.\n\n> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows.\n> And yes, these results might change at any time. The dataset can be updated to include more files.\n> The actual delays heavily depends on your CPU capabilities. The factors should remain the same.\n> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability\n> (e.g. Supported Encoding) Challenge-them if you want.\n\n## \u2728 Installation\n\nUsing pip:\n\n```sh\npip install charset-normalizer -U\n```\n\n## \ud83d\ude80 Basic Usage\n\n### CLI\nThis package comes with a CLI.\n\n```\nusage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD]\n file [file ...]\n\nThe Real First Universal Charset Detector. Discover originating encoding used\non text file. Normalize text to unicode.\n\npositional arguments:\n files File(s) to be analysed\n\noptional arguments:\n -h, --help show this help message and exit\n -v, --verbose Display complementary information about file if any.\n Stdout will contain logs about the detection process.\n -a, --with-alternative\n Output complementary possibilities if any. Top-level\n JSON WILL be a list.\n -n, --normalize Permit to normalize input file. If not set, program\n does not write anything.\n -m, --minimal Only output the charset detected to STDOUT. Disabling\n JSON output.\n -r, --replace Replace file when trying to normalize it instead of\n creating a new one.\n -f, --force Replace file without asking if you are sure, use this\n flag with caution.\n -t THRESHOLD, --threshold THRESHOLD\n Define a custom maximum amount of chaos allowed in\n decoded content. 0. <= chaos <= 1.\n --version Show version information and exit.\n```\n\n```bash\nnormalizer ./data/sample.1.fr.srt\n```\n\nor\n\n```bash\npython -m charset_normalizer ./data/sample.1.fr.srt\n```\n\n\ud83c\udf89 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format.\n\n```json\n{\n \"path\": \"/home/default/projects/charset_normalizer/data/sample.1.fr.srt\",\n \"encoding\": \"cp1252\",\n \"encoding_aliases\": [\n \"1252\",\n \"windows_1252\"\n ],\n \"alternative_encodings\": [\n \"cp1254\",\n \"cp1256\",\n \"cp1258\",\n \"iso8859_14\",\n \"iso8859_15\",\n \"iso8859_16\",\n \"iso8859_3\",\n \"iso8859_9\",\n \"latin_1\",\n \"mbcs\"\n ],\n \"language\": \"French\",\n \"alphabets\": [\n \"Basic Latin\",\n \"Latin-1 Supplement\"\n ],\n \"has_sig_or_bom\": false,\n \"chaos\": 0.149,\n \"coherence\": 97.152,\n \"unicode_path\": null,\n \"is_preferred\": true\n}\n```\n\n### Python\n*Just print out normalized text*\n```python\nfrom charset_normalizer import from_path\n\nresults = from_path('./my_subtitle.srt')\n\nprint(str(results.best()))\n```\n\n*Upgrade your code without effort*\n```python\nfrom charset_normalizer import detect\n```\n\nThe above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible.\n\nSee the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/)\n\n## \ud83d\ude07 Why\n\nWhen I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a\nreliable alternative using a completely different method. Also! I never back down on a good challenge!\n\nI **don't care** about the **originating charset** encoding, because **two different tables** can\nproduce **two identical rendered string.**\nWhat I want is to get readable text, the best I can.\n\nIn a way, **I'm brute forcing text decoding.** How cool is that ? \ud83d\ude0e\n\nDon't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair Unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode.\n\n## \ud83c\udf70 How\n\n - Discard all charset encoding table that could not fit the binary content.\n - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding.\n - Extract matches with the lowest mess detected.\n - Additionally, we measure coherence / probe for a language.\n\n**Wait a minute**, what is noise/mess and coherence according to **YOU ?**\n\n*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then\n**I established** some ground rules about **what is obvious** when **it seems like** a mess (aka. defining noise in rendered text).\n I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to\n improve or rewrite it.\n\n*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought\nthat intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design.\n\n## \u26a1 Known limitations\n\n - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters))\n - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content.\n\n## \u26a0\ufe0f About Python EOLs\n\n**If you are running:**\n\n- Python >=2.7,<3.5: Unsupported\n- Python 3.5: charset-normalizer < 2.1\n- Python 3.6: charset-normalizer < 3.1\n- Python 3.7: charset-normalizer < 4.0\n\nUpgrade your Python interpreter as soon as possible.\n\n## \ud83d\udc64 Contributing\n\nContributions, issues and feature requests are very much welcome.
\nFeel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute.\n\n## \ud83d\udcdd License\n\nCopyright \u00a9 [Ahmed TAHRI @Ousret](https://github.com/Ousret).
\nThis project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed.\n\nCharacters frequencies used in this project \u00a9 2012 [Denny Vrande\u010di\u0107](http://simia.net/letters/)\n\n## \ud83d\udcbc For Enterprise\n\nProfessional support for charset-normalizer is available as part of the [Tidelift\nSubscription][1]. Tidelift gives software development teams a single source for\npurchasing and maintaining their software, with professional grade assurances\nfrom the experts who know it best, while seamlessly integrating with existing\ntools.\n\n[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme\n\n[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/7297/badge)](https://www.bestpractices.dev/projects/7297)\n\n# Changelog\nAll notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).\n\n## [3.4.3](https://github.com/Ousret/charset_normalizer/compare/3.4.2...3.4.3) (2025-08-09)\n\n### Changed\n- mypy(c) is no longer a required dependency at build time if `CHARSET_NORMALIZER_USE_MYPYC` isn't set to `1`. (#595) (#583)\n- automatically lower confidence on small bytes samples that are not Unicode in `detect` output legacy function. (#391)\n\n### Added\n- Custom build backend to overcome inability to mark mypy as an optional dependency in the build phase.\n- Support for Python 3.14\n\n### Fixed\n- sdist archive contained useless directories.\n- automatically fallback on valid UTF-16 or UTF-32 even if the md says it's noisy. (#633)\n\n### Misc\n- SBOM are automatically published to the relevant GitHub release to comply with regulatory changes.\n Each published wheel comes with its SBOM. We choose CycloneDX as the format.\n- Prebuilt optimized wheel are no longer distributed by default for CPython 3.7 due to a change in cibuildwheel.\n\n## [3.4.2](https://github.com/Ousret/charset_normalizer/compare/3.4.1...3.4.2) (2025-05-02)\n\n### Fixed\n- Addressed the DeprecationWarning in our CLI regarding `argparse.FileType` by backporting the target class into the package. (#591)\n- Improved the overall reliability of the detector with CJK Ideographs. (#605) (#587)\n\n### Changed\n- Optional mypyc compilation upgraded to version 1.15 for Python >= 3.8\n\n## [3.4.1](https://github.com/Ousret/charset_normalizer/compare/3.4.0...3.4.1) (2024-12-24)\n\n### Changed\n- Project metadata are now stored using `pyproject.toml` instead of `setup.cfg` using setuptools as the build backend.\n- Enforce annotation delayed loading for a simpler and consistent types in the project.\n- Optional mypyc compilation upgraded to version 1.14 for Python >= 3.8\n\n### Added\n- pre-commit configuration.\n- noxfile.\n\n### Removed\n- `build-requirements.txt` as per using `pyproject.toml` native build configuration.\n- `bin/integration.py` and `bin/serve.py` in favor of downstream integration test (see noxfile).\n- `setup.cfg` in favor of `pyproject.toml` metadata configuration.\n- Unused `utils.range_scan` function.\n\n### Fixed\n- Converting content to Unicode bytes may insert `utf_8` instead of preferred `utf-8`. (#572)\n- Deprecation warning \"'count' is passed as positional argument\" when converting to Unicode bytes on Python 3.13+\n\n## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08)\n\n### Added\n- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints.\n- Support for Python 3.13 (#512)\n\n### Fixed\n- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch.\n- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537)\n- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381)\n\n## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31)\n\n### Fixed\n- Unintentional memory usage regression when using large payload that match several encoding (#376)\n- Regression on some detection case showcased in the documentation (#371)\n\n### Added\n- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife)\n\n## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22)\n\n### Changed\n- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8\n- Improved the general detection reliability based on reports from the community\n\n## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30)\n\n### Added\n- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer`\n- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323)\n\n### Removed\n- (internal) Redundant utils.is_ascii function and unused function is_private_use_only\n- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant\n\n### Changed\n- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection\n- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8\n\n### Fixed\n- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \\_\\_lt\\_\\_ (#350)\n\n## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07)\n\n### Changed\n- Typehint for function `from_path` no longer enforce `PathLike` as its first argument\n- Minor improvement over the global detection reliability\n\n### Added\n- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries\n- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True)\n- Explicit support for Python 3.12\n\n### Fixed\n- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289)\n\n## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06)\n\n### Added\n- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262)\n\n### Removed\n- Support for Python 3.6 (PR #260)\n\n### Changed\n- Optional speedup provided by mypy/c 1.0.1\n\n## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18)\n\n### Fixed\n- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233)\n\n### Changed\n- Speedup provided by mypy/c 0.990 on Python >= 3.7\n\n## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20)\n\n### Added\n- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results\n- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES\n- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio\n- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)\n\n### Changed\n- Build with static metadata using 'build' frontend\n- Make the language detection stricter\n- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1\n\n### Fixed\n- CLI with opt --normalize fail when using full path for files\n- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it\n- Sphinx warnings when generating the documentation\n\n### Removed\n- Coherence detector no longer return 'Simple English' instead return 'English'\n- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'\n- Breaking: Method `first()` and `best()` from CharsetMatch\n- UTF-7 will no longer appear as \"detected\" without a recognized SIG/mark (is unreliable/conflict with ASCII)\n- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches\n- Breaking: Top-level function `normalize`\n- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch\n- Support for the backport `unicodedata2`\n\n## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18)\n\n### Added\n- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results\n- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES\n- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio\n\n### Changed\n- Build with static metadata using 'build' frontend\n- Make the language detection stricter\n\n### Fixed\n- CLI with opt --normalize fail when using full path for files\n- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it\n\n### Removed\n- Coherence detector no longer return 'Simple English' instead return 'English'\n- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'\n\n## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21)\n\n### Added\n- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)\n\n### Removed\n- Breaking: Method `first()` and `best()` from CharsetMatch\n- UTF-7 will no longer appear as \"detected\" without a recognized SIG/mark (is unreliable/conflict with ASCII)\n\n### Fixed\n- Sphinx warnings when generating the documentation\n\n## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15)\n\n### Changed\n- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1\n\n### Removed\n- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches\n- Breaking: Top-level function `normalize`\n- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch\n- Support for the backport `unicodedata2`\n\n## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19)\n\n### Deprecated\n- Function `normalize` scheduled for removal in 3.0\n\n### Changed\n- Removed useless call to decode in fn is_unprintable (#206)\n\n### Fixed\n- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204)\n\n## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19)\n\n### Added\n- Output the Unicode table version when running the CLI with `--version` (PR #194)\n\n### Changed\n- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175)\n- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183)\n\n### Fixed\n- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175)\n- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181)\n\n### Removed\n- Support for Python 3.5 (PR #192)\n\n### Deprecated\n- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194)\n\n## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12)\n\n### Fixed\n- ASCII miss-detection on rare cases (PR #170)\n\n## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30)\n\n### Added\n- Explicit support for Python 3.11 (PR #164)\n\n### Changed\n- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165)\n\n## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04)\n\n### Fixed\n- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154)\n\n### Changed\n- Skipping the language-detection (CD) on ASCII (PR #155)\n\n## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03)\n\n### Changed\n- Moderating the logging impact (since 2.0.8) for specific environments (PR #147)\n\n### Fixed\n- Wrong logging level applied when setting kwarg `explain` to True (PR #146)\n\n## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24)\n### Changed\n- Improvement over Vietnamese detection (PR #126)\n- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124)\n- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122)\n- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129)\n- Code style as refactored by Sourcery-AI (PR #131)\n- Minor adjustment on the MD around european words (PR #133)\n- Remove and replace SRTs from assets / tests (PR #139)\n- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135)\n- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135)\n\n### Fixed\n- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137)\n- Avoid using too insignificant chunk (PR #137)\n\n### Added\n- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135)\n- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141)\n\n## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11)\n### Added\n- Add support for Kazakh (Cyrillic) language detection (PR #109)\n\n### Changed\n- Further, improve inferring the language from a given single-byte code page (PR #112)\n- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116)\n- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113)\n- Various detection improvement (MD+CD) (PR #117)\n\n### Removed\n- Remove redundant logging entry about detected language(s) (PR #115)\n\n### Fixed\n- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102)\n\n## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18)\n### Fixed\n- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100)\n- Fix CLI crash when using --minimal output in certain cases (PR #103)\n\n### Changed\n- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101)\n\n## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14)\n### Changed\n- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81)\n- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82)\n- The Unicode detection is slightly improved (PR #93)\n- Add syntax sugar \\_\\_bool\\_\\_ for results CharsetMatches list-container (PR #91)\n\n### Removed\n- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92)\n\n### Fixed\n- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95)\n- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96)\n- The MANIFEST.in was not exhaustive (PR #78)\n\n## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30)\n### Fixed\n- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70)\n- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68)\n- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72)\n- Submatch factoring could be wrong in rare edge cases (PR #72)\n- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72)\n- Fix line endings from CRLF to LF for certain project files (PR #67)\n\n### Changed\n- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76)\n- Allow fallback on specified encoding if any (PR #71)\n\n## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16)\n### Changed\n- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63)\n- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64)\n\n## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15)\n### Fixed\n- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59)\n\n### Changed\n- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57)\n\n## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13)\n### Fixed\n- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55)\n- Using explain=False permanently disable the verbose output in the current runtime (PR #47)\n- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47)\n- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52)\n\n### Changed\n- Public function normalize default args values were not aligned with from_bytes (PR #53)\n\n### Added\n- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47)\n\n## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02)\n### Changed\n- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet.\n- Accent has been made on UTF-8 detection, should perform rather instantaneous.\n- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible.\n- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time)\n- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+\n- utf_7 detection has been reinstated.\n\n### Removed\n- This package no longer require anything when used with Python 3.5 (Dropped cached_property)\n- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volap\u00fck, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian.\n- The exception hook on UnicodeDecodeError has been removed.\n\n### Deprecated\n- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0\n\n### Fixed\n- The CLI output used the relative path of the file(s). Should be absolute.\n\n## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28)\n### Fixed\n- Logger configuration/usage no longer conflict with others (PR #44)\n\n## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21)\n### Removed\n- Using standard logging instead of using the package loguru.\n- Dropping nose test framework in favor of the maintained pytest.\n- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text.\n- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version.\n- Stop support for UTF-7 that does not contain a SIG.\n- Dropping PrettyTable, replaced with pure JSON output in CLI.\n\n### Fixed\n- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process.\n- Not searching properly for the BOM when trying utf32/16 parent codec.\n\n### Changed\n- Improving the package final size by compressing frequencies.json.\n- Huge improvement over the larges payload.\n\n### Added\n- CLI now produces JSON consumable output.\n- Return ASCII if given sequences fit. Given reasonable confidence.\n\n## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13)\n\n### Fixed\n- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40)\n\n## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12)\n\n### Fixed\n- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39)\n\n## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12)\n\n### Fixed\n- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38)\n\n## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09)\n\n### Changed\n- Amend the previous release to allow prettytable 2.0 (PR #35)\n\n## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08)\n\n### Fixed\n- Fix error while using the package with a python pre-release interpreter (PR #33)\n\n### Changed\n- Dependencies refactoring, constraints revised.\n\n### Added\n- Add python 3.9 and 3.10 to the supported interpreters\n\nMIT License\n\nCopyright (c) 2025 TAHRI Ahmed R.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.", + "release_date": "2025-08-09T07:57:26", "parties": [ { "type": "person", @@ -481,6 +481,7 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -491,11 +492,11 @@ "Typing :: Typed" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/ac/c6/80b93fabc626b75b1665ffe405e28c3cef0aae9237c5c05f15955af4edd8/charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_x86_64.whl", - "size": 148007, + "download_url": "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", + "size": 53175, "sha1": null, - "md5": "190b711b0064216e7f5c1119dcabd5aa", - "sha256": "dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681", + "md5": "4a43811bb5747201dc3694e76763e446", + "sha256": "ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/jawah/charset_normalizer", @@ -503,10 +504,7 @@ "copyright": null, "license_expression": null, "declared_license": { - "license": "MIT", - "classifiers": [ - "License :: OSI Approved :: MIT License" - ] + "license": "MIT" }, "notice_text": null, "source_packages": [], @@ -515,9 +513,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/charset-normalizer/3.4.2/json", + "api_data_url": "https://pypi.org/pypi/charset-normalizer/3.4.3/json", "datasource_id": null, - "purl": "pkg:pypi/charset-normalizer@3.4.2" + "purl": "pkg:pypi/charset-normalizer@3.4.3" }, { "type": "pypi", @@ -577,17 +575,17 @@ "type": "pypi", "namespace": null, "name": "cryptography", - "version": "45.0.5", + "version": "46.0.0", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "cryptography is a package which provides cryptographic recipes and primitives to Python developers.\npyca/cryptography\n=================\n\n.. image:: https://img.shields.io/pypi/v/cryptography.svg\n :target: https://pypi.org/project/cryptography/\n :alt: Latest Version\n\n.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest\n :target: https://cryptography.io\n :alt: Latest Docs\n\n.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=main\n :target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amain\n\n\n``cryptography`` is a package which provides cryptographic recipes and\nprimitives to Python developers. Our goal is for it to be your \"cryptographic\nstandard library\". It supports Python 3.7+ and PyPy3 7.3.11+.\n\n``cryptography`` includes both high level recipes and low level interfaces to\ncommon cryptographic algorithms such as symmetric ciphers, message digests, and\nkey derivation functions. For example, to encrypt something with\n``cryptography``'s high level symmetric encryption recipe:\n\n.. code-block:: pycon\n\n >>> from cryptography.fernet import Fernet\n >>> # Put this somewhere safe!\n >>> key = Fernet.generate_key()\n >>> f = Fernet(key)\n >>> token = f.encrypt(b\"A really secret message. Not for prying eyes.\")\n >>> token\n b'...'\n >>> f.decrypt(token)\n b'A really secret message. Not for prying eyes.'\n\nYou can find more information in the `documentation`_.\n\nYou can install ``cryptography`` with:\n\n.. code-block:: console\n\n $ pip install cryptography\n\nFor full details see `the installation documentation`_.\n\nDiscussion\n~~~~~~~~~~\n\nIf you run into bugs, you can file them in our `issue tracker`_.\n\nWe maintain a `cryptography-dev`_ mailing list for development discussion.\n\nYou can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get\ninvolved.\n\nSecurity\n~~~~~~~~\n\nNeed to report a security issue? Please consult our `security reporting`_\ndocumentation.\n\n\n.. _`documentation`: https://cryptography.io/\n.. _`the installation documentation`: https://cryptography.io/en/latest/installation/\n.. _`issue tracker`: https://github.com/pyca/cryptography/issues\n.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev\n.. _`security reporting`: https://cryptography.io/en/latest/security/", - "release_date": "2025-07-02T13:05:46", + "description": "cryptography is a package which provides cryptographic recipes and primitives to Python developers.\npyca/cryptography\n=================\n\n.. image:: https://img.shields.io/pypi/v/cryptography.svg\n :target: https://pypi.org/project/cryptography/\n :alt: Latest Version\n\n.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest\n :target: https://cryptography.io\n :alt: Latest Docs\n\n.. image:: https://github.com/pyca/cryptography/actions/workflows/ci.yml/badge.svg\n :target: https://github.com/pyca/cryptography/actions/workflows/ci.yml?query=branch%3Amain\n\n``cryptography`` is a package which provides cryptographic recipes and\nprimitives to Python developers. Our goal is for it to be your \"cryptographic\nstandard library\". It supports Python 3.8+ and PyPy3 7.3.11+.\n\n``cryptography`` includes both high level recipes and low level interfaces to\ncommon cryptographic algorithms such as symmetric ciphers, message digests, and\nkey derivation functions. For example, to encrypt something with\n``cryptography``'s high level symmetric encryption recipe:\n\n.. code-block:: pycon\n\n >>> from cryptography.fernet import Fernet\n >>> # Put this somewhere safe!\n >>> key = Fernet.generate_key()\n >>> f = Fernet(key)\n >>> token = f.encrypt(b\"A really secret message. Not for prying eyes.\")\n >>> token\n b'...'\n >>> f.decrypt(token)\n b'A really secret message. Not for prying eyes.'\n\nYou can find more information in the `documentation`_.\n\nYou can install ``cryptography`` with:\n\n.. code-block:: console\n\n $ pip install cryptography\n\nFor full details see `the installation documentation`_.\n\nDiscussion\n~~~~~~~~~~\n\nIf you run into bugs, you can file them in our `issue tracker`_.\n\nWe maintain a `cryptography-dev`_ mailing list for development discussion.\n\nYou can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get\ninvolved.\n\nSecurity\n~~~~~~~~\n\nNeed to report a security issue? Please consult our `security reporting`_\ndocumentation.\n\n\n.. _`documentation`: https://cryptography.io/\n.. _`the installation documentation`: https://cryptography.io/en/latest/installation/\n.. _`issue tracker`: https://github.com/pyca/cryptography/issues\n.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev\n.. _`security reporting`: https://cryptography.io/en/latest/security/", + "release_date": "2025-09-16T21:07:03", "parties": [ { "type": "person", "role": "author", - "name": "The cryptography developers ", + "name": null, "email": "The Python Cryptographic Authority and individual contributors ", "url": null } @@ -608,28 +606,27 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: Free Threading :: 3 - Stable", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Security :: Cryptography" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/f2/2e/e2dbd629481b499b14516eed933f3276eb3239f7cee2dcfa4ee6b44d4711/cryptography-45.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", - "size": 4554189, + "download_url": "https://files.pythonhosted.org/packages/da/94/f1c1f30110c05fa5247bf460b17acfd52fa3f5c77e94ba19cff8957dc5e6/cryptography-46.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", + "size": 4562561, "sha1": null, - "md5": "e60dd7bf09e038a4508efcef2fc28cd5", - "sha256": "7259038202a47fdecee7e62e0fd0b0738b6daa335354396c6ddebdbe1206af2a", + "md5": "4fe77bef21236be92883b98ea2b89580", + "sha256": "c3cd09b1490c1509bf3892bde9cef729795fae4a2fee0621f19be3321beca7e4", "sha512": null, "bug_tracking_url": null, "code_view_url": null, "vcs_url": null, "copyright": null, - "license_expression": null, - "declared_license": { - "license": "Apache-2.0 OR BSD-3-Clause" - }, + "license_expression": "Apache-2.0 OR BSD-3-Clause", + "declared_license": {}, "notice_text": null, "source_packages": [], "file_references": [], @@ -637,9 +634,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/cryptography/45.0.5/json", + "api_data_url": "https://pypi.org/pypi/cryptography/46.0.0/json", "datasource_id": null, - "purl": "pkg:pypi/cryptography@45.0.5" + "purl": "pkg:pypi/cryptography@46.0.0" }, { "type": "pypi", @@ -913,12 +910,12 @@ "type": "pypi", "namespace": null, "name": "pycparser", - "version": "2.22", + "version": "2.23", "qualifiers": {}, "subpath": null, "primary_language": "Python", "description": "C parser in Python\npycparser is a complete parser of the C language, written in\npure Python using the PLY parsing library.\nIt parses C code into an AST and can serve as a front-end for\nC compilers or analysis tools.", - "release_date": "2024-03-30T13:22:20", + "release_date": "2025-09-09T13:23:46", "parties": [ { "type": "person", @@ -941,15 +938,16 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9" ], "homepage_url": "https://github.com/eliben/pycparser", - "download_url": "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", - "size": 117552, + "download_url": "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", + "size": 118140, "sha1": null, - "md5": "e9bf4a92f270e6482393bd716406ff85", - "sha256": "c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", + "md5": "961daf0e0910747590f8a0101322bcd3", + "sha256": "e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", "sha512": null, "bug_tracking_url": null, "code_view_url": null, @@ -969,9 +967,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/pycparser/2.22/json", + "api_data_url": "https://pypi.org/pypi/pycparser/2.23/json", "datasource_id": null, - "purl": "pkg:pypi/pycparser@2.22" + "purl": "pkg:pypi/pycparser@2.23" }, { "type": "pypi", @@ -1330,26 +1328,26 @@ ] }, { - "package": "pkg:pypi/azure-storage-blob@12.25.1", + "package": "pkg:pypi/azure-storage-blob@12.26.0", "dependencies": [ "pkg:pypi/azure-core@1.33.0", - "pkg:pypi/cryptography@45.0.5", + "pkg:pypi/cryptography@46.0.0", "pkg:pypi/isodate@0.7.2", "pkg:pypi/typing-extensions@4.13.2" ] }, { - "package": "pkg:pypi/certifi@2025.7.14", + "package": "pkg:pypi/certifi@2025.8.3", "dependencies": [] }, { "package": "pkg:pypi/cffi@1.17.1", "dependencies": [ - "pkg:pypi/pycparser@2.22" + "pkg:pypi/pycparser@2.23" ] }, { - "package": "pkg:pypi/charset-normalizer@3.4.2", + "package": "pkg:pypi/charset-normalizer@3.4.3", "dependencies": [] }, { @@ -1357,9 +1355,10 @@ "dependencies": [] }, { - "package": "pkg:pypi/cryptography@45.0.5", + "package": "pkg:pypi/cryptography@46.0.0", "dependencies": [ - "pkg:pypi/cffi@1.17.1" + "pkg:pypi/cffi@1.17.1", + "pkg:pypi/typing-extensions@4.13.2" ] }, { @@ -1374,7 +1373,7 @@ "package": "pkg:pypi/msrest@0.7.1", "dependencies": [ "pkg:pypi/azure-core@1.33.0", - "pkg:pypi/certifi@2025.7.14", + "pkg:pypi/certifi@2025.8.3", "pkg:pypi/isodate@0.7.2", "pkg:pypi/requests-oauthlib@2.0.0", "pkg:pypi/requests@2.32.4" @@ -1385,7 +1384,7 @@ "dependencies": [] }, { - "package": "pkg:pypi/pycparser@2.22", + "package": "pkg:pypi/pycparser@2.23", "dependencies": [] }, { @@ -1398,8 +1397,8 @@ { "package": "pkg:pypi/requests@2.32.4", "dependencies": [ - "pkg:pypi/certifi@2025.7.14", - "pkg:pypi/charset-normalizer@3.4.2", + "pkg:pypi/certifi@2025.8.3", + "pkg:pypi/charset-normalizer@3.4.3", "pkg:pypi/idna@3.10", "pkg:pypi/urllib3@2.2.3" ] diff --git a/tests/data/example-requirements-ignore-errors-expected.json b/tests/data/example-requirements-ignore-errors-expected.json index e75bd280..ff451aea 100644 --- a/tests/data/example-requirements-ignore-errors-expected.json +++ b/tests/data/example-requirements-ignore-errors-expected.json @@ -430,12 +430,12 @@ "type": "pypi", "namespace": null, "name": "pytest", - "version": "8.4.1", + "version": "8.4.2", "qualifiers": {}, "subpath": null, "primary_language": "Python", "description": "pytest: simple powerful testing with Python\n.. image:: https://github.com/pytest-dev/pytest/raw/main/doc/en/img/pytest_logo_curves.svg\n :target: https://docs.pytest.org/en/stable/\n :align: center\n :height: 200\n :alt: pytest\n\n\n------\n\n.. image:: https://img.shields.io/pypi/v/pytest.svg\n :target: https://pypi.org/project/pytest/\n\n.. image:: https://img.shields.io/conda/vn/conda-forge/pytest.svg\n :target: https://anaconda.org/conda-forge/pytest\n\n.. image:: https://img.shields.io/pypi/pyversions/pytest.svg\n :target: https://pypi.org/project/pytest/\n\n.. image:: https://codecov.io/gh/pytest-dev/pytest/branch/main/graph/badge.svg\n :target: https://codecov.io/gh/pytest-dev/pytest\n :alt: Code coverage Status\n\n.. image:: https://github.com/pytest-dev/pytest/actions/workflows/test.yml/badge.svg\n :target: https://github.com/pytest-dev/pytest/actions?query=workflow%3Atest\n\n.. image:: https://results.pre-commit.ci/badge/github/pytest-dev/pytest/main.svg\n :target: https://results.pre-commit.ci/latest/github/pytest-dev/pytest/main\n :alt: pre-commit.ci status\n\n.. image:: https://www.codetriage.com/pytest-dev/pytest/badges/users.svg\n :target: https://www.codetriage.com/pytest-dev/pytest\n\n.. image:: https://readthedocs.org/projects/pytest/badge/?version=latest\n :target: https://pytest.readthedocs.io/en/latest/?badge=latest\n :alt: Documentation Status\n\n.. image:: https://img.shields.io/badge/Discord-pytest--dev-blue\n :target: https://discord.com/invite/pytest-dev\n :alt: Discord\n\n.. image:: https://img.shields.io/badge/Libera%20chat-%23pytest-orange\n :target: https://web.libera.chat/#pytest\n :alt: Libera chat\n\n\nThe ``pytest`` framework makes it easy to write small tests, yet\nscales to support complex functional testing for applications and libraries.\n\nAn example of a simple test:\n\n.. code-block:: python\n\n # content of test_sample.py\n def inc(x):\n return x + 1\n\n\n def test_answer():\n assert inc(3) == 5\n\n\nTo execute it::\n\n $ pytest\n ============================= test session starts =============================\n collected 1 items\n\n test_sample.py F\n\n ================================== FAILURES ===================================\n _________________________________ test_answer _________________________________\n\n def test_answer():\n > assert inc(3) == 5\n E assert 4 == 5\n E + where 4 = inc(3)\n\n test_sample.py:5: AssertionError\n ========================== 1 failed in 0.04 seconds ===========================\n\n\nDue to ``pytest``'s detailed assertion introspection, only plain ``assert`` statements are used. See `getting-started `_ for more examples.\n\n\nFeatures\n--------\n\n- Detailed info on failing `assert statements `_ (no need to remember ``self.assert*`` names)\n\n- `Auto-discovery\n `_\n of test modules and functions\n\n- `Modular fixtures `_ for\n managing small or parametrized long-lived test resources\n\n- Can run `unittest `_ (or trial)\n test suites out of the box\n\n- Python 3.9+ or PyPy3\n\n- Rich plugin architecture, with over 1300+ `external plugins `_ and thriving community\n\n\nDocumentation\n-------------\n\nFor full documentation, including installation, tutorials and PDF documents, please see https://docs.pytest.org/en/stable/.\n\n\nBugs/Requests\n-------------\n\nPlease use the `GitHub issue tracker `_ to submit bugs or request features.\n\n\nChangelog\n---------\n\nConsult the `Changelog `__ page for fixes and enhancements of each version.\n\n\nSupport pytest\n--------------\n\n`Open Collective`_ is an online funding platform for open and transparent communities.\nIt provides tools to raise money and share your finances in full transparency.\n\nIt is the platform of choice for individuals and companies that want to make one-time or\nmonthly donations directly to the project.\n\nSee more details in the `pytest collective`_.\n\n.. _Open Collective: https://opencollective.com\n.. _pytest collective: https://opencollective.com/pytest\n\n\npytest for enterprise\n---------------------\n\nAvailable as part of the Tidelift Subscription.\n\nThe maintainers of pytest and thousands of other packages are working with Tidelift to deliver commercial support and\nmaintenance for the open source dependencies you use to build your applications.\nSave time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use.\n\n`Learn more. `_\n\nSecurity\n^^^^^^^^\n\npytest has never been associated with a security vulnerability, but in any case, to report a\nsecurity vulnerability please use the `Tidelift security contact `_.\nTidelift will coordinate the fix and disclosure.\n\n\nLicense\n-------\n\nCopyright Holger Krekel and others, 2004.\n\nDistributed under the terms of the `MIT`_ license, pytest is free and open source software.\n\n.. _`MIT`: https://github.com/pytest-dev/pytest/blob/main/LICENSE", - "release_date": "2025-06-18T05:48:03", + "release_date": "2025-09-04T14:34:20", "parties": [ { "type": "person", @@ -466,11 +466,11 @@ "Topic :: Utilities" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", - "size": 365474, + "download_url": "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", + "size": 365750, "sha1": null, - "md5": "6ad4ee79caee224776d07f155d91b7e7", - "sha256": "539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", + "md5": "f2cf4b89fbd6145531a1606f96a89503", + "sha256": "872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", "sha512": null, "bug_tracking_url": "https://github.com/pytest-dev/pytest/issues", "code_view_url": "https://github.com/pytest-dev/pytest", @@ -490,9 +490,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/pytest/8.4.1/json", + "api_data_url": "https://pypi.org/pypi/pytest/8.4.2/json", "datasource_id": null, - "purl": "pkg:pypi/pytest@8.4.1" + "purl": "pkg:pypi/pytest@8.4.2" }, { "type": "pypi", @@ -557,12 +557,12 @@ "type": "pypi", "namespace": null, "name": "typing-extensions", - "version": "4.14.1", + "version": "4.15.0", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Backported and Experimental Type Hints for Python 3.9+\n# Typing Extensions\n\n[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing)\n\n[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) \u2013\n[PyPI](https://pypi.org/project/typing-extensions/)\n\n## Overview\n\nThe `typing_extensions` module serves two related purposes:\n\n- Enable use of new type system features on older Python versions. For example,\n `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows\n users on previous Python versions to use it too.\n- Enable experimentation with new type system PEPs before they are accepted and\n added to the `typing` module.\n\n`typing_extensions` is treated specially by static type checkers such as\nmypy and pyright. Objects defined in `typing_extensions` are treated the same\nway as equivalent forms in `typing`.\n\n`typing_extensions` uses\n[Semantic Versioning](https://semver.org/). The\nmajor version will be incremented only for backwards-incompatible changes.\nTherefore, it's safe to depend\non `typing_extensions` like this: `typing_extensions >=x.y, <(x+1)`,\nwhere `x.y` is the first version that includes all features you need.\n\n## Included items\n\nSee [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a\ncomplete listing of module contents.\n\n## Contributing\n\nSee [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md)\nfor how to contribute to `typing_extensions`.", - "release_date": "2025-07-04T13:28:32", + "description": "Backported and Experimental Type Hints for Python 3.9+\n# Typing Extensions\n\n[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing)\n\n[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) \u2013\n[PyPI](https://pypi.org/project/typing-extensions/)\n\n## Overview\n\nThe `typing_extensions` module serves two related purposes:\n\n- Enable use of new type system features on older Python versions. For example,\n `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows\n users on previous Python versions to use it too.\n- Enable experimentation with new type system PEPs before they are accepted and\n added to the `typing` module.\n\n`typing_extensions` is treated specially by static type checkers such as\nmypy and pyright. Objects defined in `typing_extensions` are treated the same\nway as equivalent forms in `typing`.\n\n`typing_extensions` uses\n[Semantic Versioning](https://semver.org/). The\nmajor version will be incremented only for backwards-incompatible changes.\nTherefore, it's safe to depend\non `typing_extensions` like this: `typing_extensions ~=x.y`,\nwhere `x.y` is the first version that includes all features you need.\n[This](https://packaging.python.org/en/latest/specifications/version-specifiers/#compatible-release)\nis equivalent to `typing_extensions >=x.y, <(x+1)`. Do not depend on `~= x.y.z`\nunless you really know what you're doing; that defeats the purpose of\nsemantic versioning.\n\n## Included items\n\nSee [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a\ncomplete listing of module contents.\n\n## Contributing\n\nSee [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md)\nfor how to contribute to `typing_extensions`.", + "release_date": "2025-08-25T13:49:24", "parties": [ { "type": "person", @@ -600,11 +600,11 @@ "Topic :: Software Development" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", - "size": 43906, + "download_url": "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", + "size": 44614, "sha1": null, - "md5": "86905389dfed18c11e510c9e23147fcb", - "sha256": "d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", + "md5": "1394f56d85d87540f7907680572797e1", + "sha256": "f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", "sha512": null, "bug_tracking_url": "https://github.com/python/typing_extensions/issues", "code_view_url": null, @@ -619,16 +619,16 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/typing-extensions/4.14.1/json", + "api_data_url": "https://pypi.org/pypi/typing-extensions/4.15.0/json", "datasource_id": null, - "purl": "pkg:pypi/typing-extensions@4.14.1" + "purl": "pkg:pypi/typing-extensions@4.15.0" } ], "resolved_dependencies_graph": [ { "package": "pkg:pypi/exceptiongroup@1.3.0", "dependencies": [ - "pkg:pypi/typing-extensions@4.14.1" + "pkg:pypi/typing-extensions@4.15.0" ] }, { @@ -652,7 +652,7 @@ "dependencies": [] }, { - "package": "pkg:pypi/pytest@8.4.1", + "package": "pkg:pypi/pytest@8.4.2", "dependencies": [ "pkg:pypi/exceptiongroup@1.3.0", "pkg:pypi/iniconfig@2.1.0", @@ -667,7 +667,7 @@ "dependencies": [] }, { - "package": "pkg:pypi/typing-extensions@4.14.1", + "package": "pkg:pypi/typing-extensions@4.15.0", "dependencies": [] } ] diff --git a/tests/data/hash-requirements.txt b/tests/data/hash-requirements.txt new file mode 100644 index 00000000..6bba4d87 --- /dev/null +++ b/tests/data/hash-requirements.txt @@ -0,0 +1,7 @@ +addict==2.4.0 \ + --hash=sha256:249bb56bbfd3cdc2a004ea0ff4c2b6ddc84d53bc2194761636eb314d5cfa5dfc \ + --hash=sha256:b3b2210e0e067a281f5646c8c5db92e99b7231ea8b0eb5f74dbdf9e259d4e494 + +requests==2.25.1 \ + --hash=sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e \ + --hash=sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804 diff --git a/tests/data/hash-requirements.txt-expected.json b/tests/data/hash-requirements.txt-expected.json new file mode 100644 index 00000000..7f856ab4 --- /dev/null +++ b/tests/data/hash-requirements.txt-expected.json @@ -0,0 +1,541 @@ +{ + "headers": { + "tool_name": "python-inspector", + "tool_homepageurl": "https://github.com/aboutcode-org/python-inspector", + "options": [ + "--index-url https://pypi.org/simple", + "--json ", + "--operating-system linux", + "--python-version 38", + "--requirement tests/data/hash-requirements.txt" + ], + "notice": "Dependency tree generated with python-inspector.\npython-inspector is a free software tool from nexB Inc. and others.\nVisit https://github.com/aboutcode-org/python-inspector/ for support and download.", + "warnings": [], + "errors": [] + }, + "files": [ + { + "type": "file", + "path": "tests/data/hash-requirements.txt", + "package_data": [ + { + "type": "pypi", + "namespace": null, + "name": null, + "version": null, + "qualifiers": {}, + "subpath": null, + "primary_language": "Python", + "description": null, + "release_date": null, + "parties": [], + "keywords": [], + "homepage_url": null, + "download_url": null, + "size": null, + "sha1": null, + "md5": null, + "sha256": null, + "sha512": null, + "bug_tracking_url": null, + "code_view_url": null, + "vcs_url": null, + "copyright": null, + "license_expression": null, + "declared_license": null, + "notice_text": null, + "source_packages": [], + "file_references": [], + "extra_data": {}, + "dependencies": [ + { + "purl": "pkg:pypi/addict@2.4.0", + "extracted_requirement": "addict==2.4.0", + "scope": "install", + "is_runtime": true, + "is_optional": false, + "is_resolved": true, + "resolved_package": {}, + "extra_data": { + "is_editable": false, + "link": null, + "hash_options": [ + "sha256:249bb56bbfd3cdc2a004ea0ff4c2b6ddc84d53bc2194761636eb314d5cfa5dfc", + "sha256:b3b2210e0e067a281f5646c8c5db92e99b7231ea8b0eb5f74dbdf9e259d4e494" + ], + "is_constraint": false, + "is_archive": null, + "is_wheel": false, + "is_url": null, + "is_vcs_url": null, + "is_name_at_url": false, + "is_local_path": null + } + }, + { + "purl": "pkg:pypi/requests@2.25.1", + "extracted_requirement": "requests==2.25.1", + "scope": "install", + "is_runtime": true, + "is_optional": false, + "is_resolved": true, + "resolved_package": {}, + "extra_data": { + "is_editable": false, + "link": null, + "hash_options": [ + "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e", + "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804" + ], + "is_constraint": false, + "is_archive": null, + "is_wheel": false, + "is_url": null, + "is_vcs_url": null, + "is_name_at_url": false, + "is_local_path": null + } + } + ], + "repository_homepage_url": null, + "repository_download_url": null, + "api_data_url": null, + "datasource_id": "pip_requirements", + "purl": null + } + ] + } + ], + "packages": [ + { + "type": "pypi", + "namespace": null, + "name": "addict", + "version": "2.4.0", + "qualifiers": {}, + "subpath": null, + "primary_language": "Python", + "description": "Addict is a dictionary whose items can be set using both attribute and item syntax.\nAddict is a module that exposes a dictionary subclass that allows items to be set like attributes. Values are gettable and settable using both attribute and item syntax. For more info check out the README at 'github.com/mewwts/addict'.", + "release_date": "2020-11-21T16:21:29", + "parties": [ + { + "type": "person", + "role": "author", + "name": "Mats Julian Olsen", + "email": "mats@plysjbyen.net", + "url": null + } + ], + "keywords": [ + "Intended Audience :: Developers", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Software Development :: Libraries :: Python Modules" + ], + "homepage_url": "https://github.com/mewwts/addict", + "download_url": "https://files.pythonhosted.org/packages/6a/00/b08f23b7d7e1e14ce01419a467b583edbb93c6cdb8654e54a9cc579cd61f/addict-2.4.0-py3-none-any.whl", + "size": 3832, + "sha1": null, + "md5": "513ea09baaf03437068be08b08ca36d1", + "sha256": "249bb56bbfd3cdc2a004ea0ff4c2b6ddc84d53bc2194761636eb314d5cfa5dfc", + "sha512": null, + "bug_tracking_url": null, + "code_view_url": null, + "vcs_url": null, + "copyright": null, + "license_expression": null, + "declared_license": { + "classifiers": [ + "License :: OSI Approved :: MIT License" + ] + }, + "notice_text": null, + "source_packages": [], + "file_references": [], + "extra_data": {}, + "dependencies": [], + "repository_homepage_url": null, + "repository_download_url": null, + "api_data_url": "https://pypi.org/pypi/addict/2.4.0/json", + "datasource_id": null, + "purl": "pkg:pypi/addict@2.4.0" + }, + { + "type": "pypi", + "namespace": null, + "name": "certifi", + "version": "2025.8.3", + "qualifiers": {}, + "subpath": null, + "primary_language": "Python", + "description": "Python package for providing Mozilla's CA Bundle.\nCertifi: Python SSL Certificates\n================================\n\nCertifi provides Mozilla's carefully curated collection of Root Certificates for\nvalidating the trustworthiness of SSL certificates while verifying the identity\nof TLS hosts. It has been extracted from the `Requests`_ project.\n\nInstallation\n------------\n\n``certifi`` is available on PyPI. Simply install it with ``pip``::\n\n $ pip install certifi\n\nUsage\n-----\n\nTo reference the installed certificate authority (CA) bundle, you can use the\nbuilt-in function::\n\n >>> import certifi\n\n >>> certifi.where()\n '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'\n\nOr from the command line::\n\n $ python -m certifi\n /usr/local/lib/python3.7/site-packages/certifi/cacert.pem\n\nEnjoy!\n\n.. _`Requests`: https://requests.readthedocs.io/en/master/\n\nAddition/Removal of Certificates\n--------------------------------\n\nCertifi does not support any addition/removal or other modification of the\nCA trust store content. This project is intended to provide a reliable and\nhighly portable root of trust to python deployments. Look to upstream projects\nfor methods to use alternate trust.", + "release_date": "2025-08-03T03:07:45", + "parties": [ + { + "type": "person", + "role": "author", + "name": "Kenneth Reitz", + "email": "me@kennethreitz.com", + "url": null + } + ], + "keywords": [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Natural Language :: English", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9" + ], + "homepage_url": "https://github.com/certifi/python-certifi", + "download_url": "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", + "size": 161216, + "sha1": null, + "md5": "f9b6740cffcf397b47bc7fb7782b1354", + "sha256": "f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", + "sha512": null, + "bug_tracking_url": null, + "code_view_url": "https://github.com/certifi/python-certifi", + "vcs_url": null, + "copyright": null, + "license_expression": null, + "declared_license": { + "license": "MPL-2.0", + "classifiers": [ + "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)" + ] + }, + "notice_text": null, + "source_packages": [], + "file_references": [], + "extra_data": {}, + "dependencies": [], + "repository_homepage_url": null, + "repository_download_url": null, + "api_data_url": "https://pypi.org/pypi/certifi/2025.8.3/json", + "datasource_id": null, + "purl": "pkg:pypi/certifi@2025.8.3" + }, + { + "type": "pypi", + "namespace": null, + "name": "chardet", + "version": "4.0.0", + "qualifiers": {}, + "subpath": null, + "primary_language": "Python", + "description": "Universal encoding detector for Python 2 and 3\nChardet: The Universal Character Encoding Detector\n--------------------------------------------------\n\n.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg\n :alt: Build status\n :target: https://travis-ci.org/chardet/chardet\n\n.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg\n :target: https://coveralls.io/r/chardet/chardet\n\n.. image:: https://img.shields.io/pypi/v/chardet.svg\n :target: https://warehouse.python.org/project/chardet/\n :alt: Latest version on PyPI\n\n.. image:: https://img.shields.io/pypi/l/chardet.svg\n :alt: License\n\n\nDetects\n - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants)\n - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese)\n - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese)\n - EUC-KR, ISO-2022-KR (Korean)\n - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic)\n - ISO-8859-5, windows-1251 (Bulgarian)\n - ISO-8859-1, windows-1252 (Western European languages)\n - ISO-8859-7, windows-1253 (Greek)\n - ISO-8859-8, windows-1255 (Visual and Logical Hebrew)\n - TIS-620 (Thai)\n\n.. note::\n Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily\n disabled until we can retrain the models.\n\nRequires Python 2.7 or 3.5+.\n\nInstallation\n------------\n\nInstall from `PyPI `_::\n\n pip install chardet\n\nDocumentation\n-------------\n\nFor users, docs are now available at https://chardet.readthedocs.io/.\n\nCommand-line Tool\n-----------------\n\nchardet comes with a command-line script which reports on the encodings of one\nor more files::\n\n % chardetect somefile someotherfile\n somefile: windows-1252 with confidence 0.5\n someotherfile: ascii with confidence 1.0\n\nAbout\n-----\n\nThis is a continuation of Mark Pilgrim's excellent chardet. Previously, two\nversions needed to be maintained: one that supported python 2.x and one that\nsupported python 3.x. We've recently merged with `Ian Cordasco `_'s\n`charade `_ fork, so now we have one\ncoherent version that works for Python 2.7+ and 3.4+.\n\n:maintainer: Dan Blanchard", + "release_date": "2020-12-10T19:35:32", + "parties": [ + { + "type": "person", + "role": "author", + "name": "Mark Pilgrim", + "email": "mark@diveintomark.org", + "url": null + }, + { + "type": "person", + "role": "maintainer", + "name": "Daniel Blanchard", + "email": "dan.blanchard@gmail.com", + "url": null + } + ], + "keywords": [ + "encoding", + "i18n", + "xml", + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Text Processing :: Linguistic" + ], + "homepage_url": "https://github.com/chardet/chardet", + "download_url": "https://files.pythonhosted.org/packages/19/c7/fa589626997dd07bd87d9269342ccb74b1720384a4d739a1872bd84fbe68/chardet-4.0.0-py2.py3-none-any.whl", + "size": 178743, + "sha1": null, + "md5": "504627b9b4fcd44720d5aa1345e29cc7", + "sha256": "f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5", + "sha512": null, + "bug_tracking_url": null, + "code_view_url": null, + "vcs_url": null, + "copyright": null, + "license_expression": null, + "declared_license": { + "license": "LGPL", + "classifiers": [ + "License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)" + ] + }, + "notice_text": null, + "source_packages": [], + "file_references": [], + "extra_data": {}, + "dependencies": [], + "repository_homepage_url": null, + "repository_download_url": null, + "api_data_url": "https://pypi.org/pypi/chardet/4.0.0/json", + "datasource_id": null, + "purl": "pkg:pypi/chardet@4.0.0" + }, + { + "type": "pypi", + "namespace": null, + "name": "idna", + "version": "2.10", + "qualifiers": {}, + "subpath": null, + "primary_language": "Python", + "description": "Internationalized Domain Names in Applications (IDNA)\n=====================================================\n\nSupport for the Internationalised Domain Names in Applications\n(IDNA) protocol as specified in `RFC 5891 `_.\nThis is the latest version of the protocol and is sometimes referred to as\n\u201cIDNA 2008\u201d.\n\nThis library also provides support for Unicode Technical Standard 46,\n`Unicode IDNA Compatibility Processing `_.\n\nThis acts as a suitable replacement for the \u201cencodings.idna\u201d module that\ncomes with the Python standard library, but only supports the\nold, deprecated IDNA specification (`RFC 3490 `_).\n\nBasic functions are simply executed:\n\n.. code-block:: pycon\n\n # Python 3\n >>> import idna\n >>> idna.encode('\u30c9\u30e1\u30a4\u30f3.\u30c6\u30b9\u30c8')\n b'xn--eckwd4c7c.xn--zckzah'\n >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah'))\n \u30c9\u30e1\u30a4\u30f3.\u30c6\u30b9\u30c8\n\n # Python 2\n >>> import idna\n >>> idna.encode(u'\u30c9\u30e1\u30a4\u30f3.\u30c6\u30b9\u30c8')\n 'xn--eckwd4c7c.xn--zckzah'\n >>> print idna.decode('xn--eckwd4c7c.xn--zckzah')\n \u30c9\u30e1\u30a4\u30f3.\u30c6\u30b9\u30c8\n\nPackages\n--------\n\nThe latest tagged release version is published in the PyPI repository:\n\n.. image:: https://badge.fury.io/py/idna.svg\n :target: http://badge.fury.io/py/idna\n\n\nInstallation\n------------\n\nTo install this library, you can use pip:\n\n.. code-block:: bash\n\n $ pip install idna\n\nAlternatively, you can install the package using the bundled setup script:\n\n.. code-block:: bash\n\n $ python setup.py install\n\nThis library works with Python 2.7 and Python 3.4 or later.\n\n\nUsage\n-----\n\nFor typical usage, the ``encode`` and ``decode`` functions will take a domain\nname argument and perform a conversion to A-labels or U-labels respectively.\n\n.. code-block:: pycon\n\n # Python 3\n >>> import idna\n >>> idna.encode('\u30c9\u30e1\u30a4\u30f3.\u30c6\u30b9\u30c8')\n b'xn--eckwd4c7c.xn--zckzah'\n >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah'))\n \u30c9\u30e1\u30a4\u30f3.\u30c6\u30b9\u30c8\n\nYou may use the codec encoding and decoding methods using the\n``idna.codec`` module:\n\n.. code-block:: pycon\n\n # Python 2\n >>> import idna.codec\n >>> print u'\u0434\u043e\u043c\u0435\u043d\u0430.\u0438\u0441\u043f\u044b\u0442\u0430\u043d\u0438\u0435'.encode('idna')\n xn--80ahd1agd.xn--80akhbyknj4f\n >>> print 'xn--80ahd1agd.xn--80akhbyknj4f'.decode('idna')\n \u0434\u043e\u043c\u0435\u043d\u0430.\u0438\u0441\u043f\u044b\u0442\u0430\u043d\u0438\u0435\n\nConversions can be applied at a per-label basis using the ``ulabel`` or ``alabel``\nfunctions if necessary:\n\n.. code-block:: pycon\n\n # Python 2\n >>> idna.alabel(u'\u6d4b\u8bd5')\n 'xn--0zwm56d'\n\nCompatibility Mapping (UTS #46)\n+++++++++++++++++++++++++++++++\n\nAs described in `RFC 5895 `_, the IDNA\nspecification no longer normalizes input from different potential ways a user\nmay input a domain name. This functionality, known as a \u201cmapping\u201d, is now\nconsidered by the specification to be a local user-interface issue distinct\nfrom IDNA conversion functionality.\n\nThis library provides one such mapping, that was developed by the Unicode\nConsortium. Known as `Unicode IDNA Compatibility Processing `_,\nit provides for both a regular mapping for typical applications, as well as\na transitional mapping to help migrate from older IDNA 2003 applications.\n\nFor example, \u201cK\u00f6nigsg\u00e4\u00dfchen\u201d is not a permissible label as *LATIN CAPITAL\nLETTER K* is not allowed (nor are capital letters in general). UTS 46 will\nconvert this into lower case prior to applying the IDNA conversion.\n\n.. code-block:: pycon\n\n # Python 3\n >>> import idna\n >>> idna.encode(u'K\u00f6nigsg\u00e4\u00dfchen')\n ...\n idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'K\u00f6nigsg\u00e4\u00dfchen' not allowed\n >>> idna.encode('K\u00f6nigsg\u00e4\u00dfchen', uts46=True)\n b'xn--knigsgchen-b4a3dun'\n >>> print(idna.decode('xn--knigsgchen-b4a3dun'))\n k\u00f6nigsg\u00e4\u00dfchen\n\nTransitional processing provides conversions to help transition from the older\n2003 standard to the current standard. For example, in the original IDNA\nspecification, the *LATIN SMALL LETTER SHARP S* (\u00df) was converted into two\n*LATIN SMALL LETTER S* (ss), whereas in the current IDNA specification this\nconversion is not performed.\n\n.. code-block:: pycon\n\n # Python 2\n >>> idna.encode(u'K\u00f6nigsg\u00e4\u00dfchen', uts46=True, transitional=True)\n 'xn--knigsgsschen-lcb0w'\n\nImplementors should use transitional processing with caution, only in rare\ncases where conversion from legacy labels to current labels must be performed\n(i.e. IDNA implementations that pre-date 2008). For typical applications\nthat just need to convert labels, transitional processing is unlikely to be\nbeneficial and could produce unexpected incompatible results.\n\n``encodings.idna`` Compatibility\n++++++++++++++++++++++++++++++++\n\nFunction calls from the Python built-in ``encodings.idna`` module are\nmapped to their IDNA 2008 equivalents using the ``idna.compat`` module.\nSimply substitute the ``import`` clause in your code to refer to the\nnew module name.\n\nExceptions\n----------\n\nAll errors raised during the conversion following the specification should\nraise an exception derived from the ``idna.IDNAError`` base class.\n\nMore specific exceptions that may be generated as ``idna.IDNABidiError``\nwhen the error reflects an illegal combination of left-to-right and right-to-left\ncharacters in a label; ``idna.InvalidCodepoint`` when a specific codepoint is\nan illegal character in an IDN label (i.e. INVALID); and ``idna.InvalidCodepointContext``\nwhen the codepoint is illegal based on its positional context (i.e. it is CONTEXTO\nor CONTEXTJ but the contextual requirements are not satisfied.)\n\nBuilding and Diagnostics\n------------------------\n\nThe IDNA and UTS 46 functionality relies upon pre-calculated lookup tables for\nperformance. These tables are derived from computing against eligibility criteria\nin the respective standards. These tables are computed using the command-line\nscript ``tools/idna-data``.\n\nThis tool will fetch relevant tables from the Unicode Consortium and perform the\nrequired calculations to identify eligibility. It has three main modes:\n\n* ``idna-data make-libdata``. Generates ``idnadata.py`` and ``uts46data.py``,\n the pre-calculated lookup tables using for IDNA and UTS 46 conversions. Implementors\n who wish to track this library against a different Unicode version may use this tool\n to manually generate a different version of the ``idnadata.py`` and ``uts46data.py``\n files.\n\n* ``idna-data make-table``. Generate a table of the IDNA disposition\n (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix B.1 of RFC\n 5892 and the pre-computed tables published by `IANA `_.\n\n* ``idna-data U+0061``. Prints debugging output on the various properties\n associated with an individual Unicode codepoint (in this case, U+0061), that are\n used to assess the IDNA and UTS 46 status of a codepoint. This is helpful in debugging\n or analysis.\n\nThe tool accepts a number of arguments, described using ``idna-data -h``. Most notably,\nthe ``--version`` argument allows the specification of the version of Unicode to use\nin computing the table data. For example, ``idna-data --version 9.0.0 make-libdata``\nwill generate library data against Unicode 9.0.0.\n\nNote that this script requires Python 3, but all generated library data will work\nin Python 2.7.\n\n\nTesting\n-------\n\nThe library has a test suite based on each rule of the IDNA specification, as\nwell as tests that are provided as part of the Unicode Technical Standard 46,\n`Unicode IDNA Compatibility Processing `_.\n\nThe tests are run automatically on each commit at Travis CI:\n\n.. image:: https://travis-ci.org/kjd/idna.svg?branch=master\n :target: https://travis-ci.org/kjd/idna", + "release_date": "2020-06-27T23:45:03", + "parties": [ + { + "type": "person", + "role": "author", + "name": "Kim Davies", + "email": "kim@cynosure.com.au", + "url": null + } + ], + "keywords": [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Internet :: Name Service (DNS)", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Utilities" + ], + "homepage_url": "https://github.com/kjd/idna", + "download_url": "https://files.pythonhosted.org/packages/a2/38/928ddce2273eaa564f6f50de919327bf3a00f091b5baba8dfa9460f3a8a8/idna-2.10-py2.py3-none-any.whl", + "size": 58811, + "sha1": null, + "md5": "8f7d13f63706aa265d31ffedc8aa3053", + "sha256": "b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0", + "sha512": null, + "bug_tracking_url": null, + "code_view_url": null, + "vcs_url": null, + "copyright": null, + "license_expression": null, + "declared_license": { + "license": "BSD-like", + "classifiers": [ + "License :: OSI Approved :: BSD License" + ] + }, + "notice_text": null, + "source_packages": [], + "file_references": [], + "extra_data": {}, + "dependencies": [], + "repository_homepage_url": null, + "repository_download_url": null, + "api_data_url": "https://pypi.org/pypi/idna/2.10/json", + "datasource_id": null, + "purl": "pkg:pypi/idna@2.10" + }, + { + "type": "pypi", + "namespace": null, + "name": "requests", + "version": "2.25.1", + "qualifiers": {}, + "subpath": null, + "primary_language": "Python", + "description": "Python HTTP for Humans.\n# Requests\n\n**Requests** is a simple, yet elegant HTTP library.\n\n```python\n>>> import requests\n>>> r = requests.get('https://api.github.com/user', auth=('user', 'pass'))\n>>> r.status_code\n200\n>>> r.headers['content-type']\n'application/json; charset=utf8'\n>>> r.encoding\n'utf-8'\n>>> r.text\n'{\"type\":\"User\"...'\n>>> r.json()\n{'disk_usage': 368627, 'private_gists': 484, ...}\n```\n\nRequests allows you to send HTTP/1.1 requests extremely easily. There\u2019s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data \u2014 but nowadays, just use the `json` method!\n\nRequests is one of the most downloaded Python package today, pulling in around `14M downloads / week`\u2014 according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `500,000+` repositories. You may certainly put your trust in this code.\n\n[![Downloads](https://pepy.tech/badge/requests/month)](https://pepy.tech/project/requests/month)\n[![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests)\n[![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors)\n\n## Installing Requests and Supported Versions\n\nRequests is available on PyPI:\n\n```console\n$ python -m pip install requests\n```\n\nRequests officially supports Python 2.7 & 3.5+.\n\n## Supported Features & Best\u2013Practices\n\nRequests is ready for the demands of building robust and reliable HTTP\u2013speaking applications, for the needs of today.\n\n- Keep-Alive & Connection Pooling\n- International Domains and URLs\n- Sessions with Cookie Persistence\n- Browser-style TLS/SSL Verification\n- Basic & Digest Authentication\n- Familiar `dict`\u2013like Cookies\n- Automatic Content Decompression and Decoding\n- Multi-part File Uploads\n- SOCKS Proxy Support\n- Connection Timeouts\n- Streaming Downloads\n- Automatic honoring of `.netrc`\n- Chunked HTTP Requests\n\n## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)\n\n[![Read the Docs](https://raw.githubusercontent.com/psf/requests/master/ext/ss.png)](https://requests.readthedocs.io)\n\n---\n\n[![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/master/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/master/ext/psf.png)](https://www.python.org/psf)", + "release_date": "2020-12-16T19:38:34", + "parties": [ + { + "type": "person", + "role": "author", + "name": "Kenneth Reitz", + "email": "me@kennethreitz.org", + "url": null + } + ], + "keywords": [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Natural Language :: English", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy" + ], + "homepage_url": "https://requests.readthedocs.io", + "download_url": "https://files.pythonhosted.org/packages/29/c1/24814557f1d22c56d50280771a17307e6bf87b70727d975fd6b2ce6b014a/requests-2.25.1-py2.py3-none-any.whl", + "size": 61216, + "sha1": null, + "md5": "ec79209809129bf13cb002a0a573ef45", + "sha256": "c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e", + "sha512": null, + "bug_tracking_url": null, + "code_view_url": "https://github.com/psf/requests", + "vcs_url": null, + "copyright": null, + "license_expression": null, + "declared_license": { + "license": "Apache 2.0", + "classifiers": [ + "License :: OSI Approved :: Apache Software License" + ] + }, + "notice_text": null, + "source_packages": [], + "file_references": [], + "extra_data": {}, + "dependencies": [], + "repository_homepage_url": null, + "repository_download_url": null, + "api_data_url": "https://pypi.org/pypi/requests/2.25.1/json", + "datasource_id": null, + "purl": "pkg:pypi/requests@2.25.1" + }, + { + "type": "pypi", + "namespace": null, + "name": "urllib3", + "version": "1.26.20", + "qualifiers": {}, + "subpath": null, + "primary_language": "Python", + "description": "HTTP library with thread-safe connection pooling, file post, and more.\nurllib3 is a powerful, *user-friendly* HTTP client for Python. Much of the\nPython ecosystem already uses urllib3 and you should too.\nurllib3 brings many critical features that are missing from the Python\nstandard libraries:\n\n- Thread safety.\n- Connection pooling.\n- Client-side SSL/TLS verification.\n- File uploads with multipart encoding.\n- Helpers for retrying requests and dealing with HTTP redirects.\n- Support for gzip, deflate, and brotli encoding.\n- Proxy support for HTTP and SOCKS.\n- 100% test coverage.\n\nurllib3 is powerful and easy to use:\n\n.. code-block:: python\n\n >>> import urllib3\n >>> http = urllib3.PoolManager()\n >>> r = http.request('GET', 'http://httpbin.org/robots.txt')\n >>> r.status\n 200\n >>> r.data\n 'User-agent: *\\nDisallow: /deny\\n'\n\n\nInstalling\n----------\n\nurllib3 can be installed with `pip `_::\n\n $ python -m pip install urllib3\n\nAlternatively, you can grab the latest source code from `GitHub `_::\n\n $ git clone https://github.com/urllib3/urllib3.git\n $ cd urllib3\n $ git checkout 1.26.x\n $ pip install .\n\n\nDocumentation\n-------------\n\nurllib3 has usage and reference documentation at `urllib3.readthedocs.io `_.\n\n\nContributing\n------------\n\nurllib3 happily accepts contributions. Please see our\n`contributing documentation `_\nfor some tips on getting started.\n\n\nSecurity Disclosures\n--------------------\n\nTo report a security vulnerability, please use the\n`Tidelift security contact `_.\nTidelift will coordinate the fix and disclosure with maintainers.\n\n\nMaintainers\n-----------\n\n- `@sethmlarson `__ (Seth M. Larson)\n- `@pquentin `__ (Quentin Pradet)\n- `@theacodes `__ (Thea Flowers)\n- `@haikuginger `__ (Jess Shapiro)\n- `@lukasa `__ (Cory Benfield)\n- `@sigmavirus24 `__ (Ian Stapleton Cordasco)\n- `@shazow `__ (Andrey Petrov)\n\n\ud83d\udc4b\n\n\nSponsorship\n-----------\n\nIf your company benefits from this library, please consider `sponsoring its\ndevelopment `_.\n\n\nFor Enterprise\n--------------\n\n.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png\n :width: 75\n :alt: Tidelift\n\n.. list-table::\n :widths: 10 100\n\n * - |tideliftlogo|\n - Professional support for urllib3 is available as part of the `Tidelift\n Subscription`_. Tidelift gives software development teams a single source for\n purchasing and maintaining their software, with professional grade assurances\n from the experts who know it best, while seamlessly integrating with existing\n tools.\n\n.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme\n\n\nChanges\n=======\n\n1.26.20 (2024-08-29)\n--------------------\n\n* Fixed a crash where certain standard library hash functions were absent in\n FIPS-compliant environments.\n (`#3432 `__)\n* Replaced deprecated dash-separated setuptools entries in ``setup.cfg``.\n (`#3461 `__)\n* Took into account macOS setting ``ECONNRESET`` instead of ``EPROTOTYPE`` in\n its newer versions.\n (`#3416 `__)\n* Backported changes to our tests and CI configuration from v2.x to support\n testing with CPython 3.12 and 3.13.\n (`#3436 `__)\n\n\n1.26.19 (2024-06-17)\n--------------------\n\n* Added the ``Proxy-Authorization`` header to the list of headers to strip from requests when redirecting to a different host. As before, different headers can be set via ``Retry.remove_headers_on_redirect``.\n* Fixed handling of OpenSSL 3.2.0 new error message for misconfiguring an HTTP proxy as HTTPS. (`#3405 `__)\n\n\n1.26.18 (2023-10-17)\n--------------------\n\n* Made body stripped from HTTP requests changing the request method to GET after HTTP 303 \"See Other\" redirect responses.\n\n\n1.26.17 (2023-10-02)\n--------------------\n\n* Added the ``Cookie`` header to the list of headers to strip from requests when redirecting to a different host. As before, different headers can be set via ``Retry.remove_headers_on_redirect``.\n\n\n1.26.16 (2023-05-23)\n--------------------\n\n* Fixed thread-safety issue where accessing a ``PoolManager`` with many distinct origins\n would cause connection pools to be closed while requests are in progress (`#2954 `_)\n\n\n1.26.15 (2023-03-10)\n--------------------\n\n* Fix socket timeout value when ``HTTPConnection`` is reused (`#2645 `__)\n* Remove \"!\" character from the unreserved characters in IPv6 Zone ID parsing\n (`#2899 `__)\n* Fix IDNA handling of '\\x80' byte (`#2901 `__)\n\n1.26.14 (2023-01-11)\n--------------------\n\n* Fixed parsing of port 0 (zero) returning None, instead of 0. (`#2850 `__)\n* Removed deprecated getheaders() calls in contrib module.\n\n1.26.13 (2022-11-23)\n--------------------\n\n* Deprecated the ``HTTPResponse.getheaders()`` and ``HTTPResponse.getheader()`` methods.\n* Fixed an issue where parsing a URL with leading zeroes in the port would be rejected\n even when the port number after removing the zeroes was valid.\n* Fixed a deprecation warning when using cryptography v39.0.0.\n* Removed the ``<4`` in the ``Requires-Python`` packaging metadata field.\n\n\n1.26.12 (2022-08-22)\n--------------------\n\n* Deprecated the `urllib3[secure]` extra and the `urllib3.contrib.pyopenssl` module.\n Both will be removed in v2.x. See this `GitHub issue `_\n for justification and info on how to migrate.\n\n\n1.26.11 (2022-07-25)\n--------------------\n\n* Fixed an issue where reading more than 2 GiB in a call to ``HTTPResponse.read`` would\n raise an ``OverflowError`` on Python 3.9 and earlier.\n\n\n1.26.10 (2022-07-07)\n--------------------\n\n* Removed support for Python 3.5\n* Fixed an issue where a ``ProxyError`` recommending configuring the proxy as HTTP\n instead of HTTPS could appear even when an HTTPS proxy wasn't configured.\n\n\n1.26.9 (2022-03-16)\n-------------------\n\n* Changed ``urllib3[brotli]`` extra to favor installing Brotli libraries that are still\n receiving updates like ``brotli`` and ``brotlicffi`` instead of ``brotlipy``.\n This change does not impact behavior of urllib3, only which dependencies are installed.\n* Fixed a socket leaking when ``HTTPSConnection.connect()`` raises an exception.\n* Fixed ``server_hostname`` being forwarded from ``PoolManager`` to ``HTTPConnectionPool``\n when requesting an HTTP URL. Should only be forwarded when requesting an HTTPS URL.\n\n\n1.26.8 (2022-01-07)\n-------------------\n\n* Added extra message to ``urllib3.exceptions.ProxyError`` when urllib3 detects that\n a proxy is configured to use HTTPS but the proxy itself appears to only use HTTP.\n* Added a mention of the size of the connection pool when discarding a connection due to the pool being full.\n* Added explicit support for Python 3.11.\n* Deprecated the ``Retry.MAX_BACKOFF`` class property in favor of ``Retry.DEFAULT_MAX_BACKOFF``\n to better match the rest of the default parameter names. ``Retry.MAX_BACKOFF`` is removed in v2.0.\n* Changed location of the vendored ``ssl.match_hostname`` function from ``urllib3.packages.ssl_match_hostname``\n to ``urllib3.util.ssl_match_hostname`` to ensure Python 3.10+ compatibility after being repackaged\n by downstream distributors.\n* Fixed absolute imports, all imports are now relative.\n\n\n1.26.7 (2021-09-22)\n-------------------\n\n* Fixed a bug with HTTPS hostname verification involving IP addresses and lack\n of SNI. (Issue #2400)\n* Fixed a bug where IPv6 braces weren't stripped during certificate hostname\n matching. (Issue #2240)\n\n\n1.26.6 (2021-06-25)\n-------------------\n\n* Deprecated the ``urllib3.contrib.ntlmpool`` module. urllib3 is not able to support\n it properly due to `reasons listed in this issue `_.\n If you are a user of this module please leave a comment.\n* Changed ``HTTPConnection.request_chunked()`` to not erroneously emit multiple\n ``Transfer-Encoding`` headers in the case that one is already specified.\n* Fixed typo in deprecation message to recommend ``Retry.DEFAULT_ALLOWED_METHODS``.\n\n\n1.26.5 (2021-05-26)\n-------------------\n\n* Fixed deprecation warnings emitted in Python 3.10.\n* Updated vendored ``six`` library to 1.16.0.\n* Improved performance of URL parser when splitting\n the authority component.\n\n\n1.26.4 (2021-03-15)\n-------------------\n\n* Changed behavior of the default ``SSLContext`` when connecting to HTTPS proxy\n during HTTPS requests. The default ``SSLContext`` now sets ``check_hostname=True``.\n\n\n1.26.3 (2021-01-26)\n-------------------\n\n* Fixed bytes and string comparison issue with headers (Pull #2141)\n\n* Changed ``ProxySchemeUnknown`` error message to be\n more actionable if the user supplies a proxy URL without\n a scheme. (Pull #2107)\n\n\n1.26.2 (2020-11-12)\n-------------------\n\n* Fixed an issue where ``wrap_socket`` and ``CERT_REQUIRED`` wouldn't\n be imported properly on Python 2.7.8 and earlier (Pull #2052)\n\n\n1.26.1 (2020-11-11)\n-------------------\n\n* Fixed an issue where two ``User-Agent`` headers would be sent if a\n ``User-Agent`` header key is passed as ``bytes`` (Pull #2047)\n\n\n1.26.0 (2020-11-10)\n-------------------\n\n* **NOTE: urllib3 v2.0 will drop support for Python 2**.\n `Read more in the v2.0 Roadmap `_.\n\n* Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)\n\n* Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that\n still wish to use TLS earlier than 1.2 without a deprecation warning\n should opt-in explicitly by setting ``ssl_version=ssl.PROTOCOL_TLSv1_1`` (Pull #2002)\n **Starting in urllib3 v2.0: Connections that receive a ``DeprecationWarning`` will fail**\n\n* Deprecated ``Retry`` options ``Retry.DEFAULT_METHOD_WHITELIST``, ``Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST``\n and ``Retry(method_whitelist=...)`` in favor of ``Retry.DEFAULT_ALLOWED_METHODS``,\n ``Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT``, and ``Retry(allowed_methods=...)``\n (Pull #2000) **Starting in urllib3 v2.0: Deprecated options will be removed**\n\n* Added default ``User-Agent`` header to every request (Pull #1750)\n\n* Added ``urllib3.util.SKIP_HEADER`` for skipping ``User-Agent``, ``Accept-Encoding``,\n and ``Host`` headers from being automatically emitted with requests (Pull #2018)\n\n* Collapse ``transfer-encoding: chunked`` request data and framing into\n the same ``socket.send()`` call (Pull #1906)\n\n* Send ``http/1.1`` ALPN identifier with every TLS handshake by default (Pull #1894)\n\n* Properly terminate SecureTransport connections when CA verification fails (Pull #1977)\n\n* Don't emit an ``SNIMissingWarning`` when passing ``server_hostname=None``\n to SecureTransport (Pull #1903)\n\n* Disabled requesting TLSv1.2 session tickets as they weren't being used by urllib3 (Pull #1970)\n\n* Suppress ``BrokenPipeError`` when writing request body after the server\n has closed the socket (Pull #1524)\n\n* Wrap ``ssl.SSLError`` that can be raised from reading a socket (e.g. \"bad MAC\")\n into an ``urllib3.exceptions.SSLError`` (Pull #1939)\n\n\n1.25.11 (2020-10-19)\n--------------------\n\n* Fix retry backoff time parsed from ``Retry-After`` header when given\n in the HTTP date format. The HTTP date was parsed as the local timezone\n rather than accounting for the timezone in the HTTP date (typically\n UTC) (Pull #1932, Pull #1935, Pull #1938, Pull #1949)\n\n* Fix issue where an error would be raised when the ``SSLKEYLOGFILE``\n environment variable was set to the empty string. Now ``SSLContext.keylog_file``\n is not set in this situation (Pull #2016)\n\n\n1.25.10 (2020-07-22)\n--------------------\n\n* Added support for ``SSLKEYLOGFILE`` environment variable for\n logging TLS session keys with use with programs like\n Wireshark for decrypting captured web traffic (Pull #1867)\n\n* Fixed loading of SecureTransport libraries on macOS Big Sur\n due to the new dynamic linker cache (Pull #1905)\n\n* Collapse chunked request bodies data and framing into one\n call to ``send()`` to reduce the number of TCP packets by 2-4x (Pull #1906)\n\n* Don't insert ``None`` into ``ConnectionPool`` if the pool\n was empty when requesting a connection (Pull #1866)\n\n* Avoid ``hasattr`` call in ``BrotliDecoder.decompress()`` (Pull #1858)\n\n\n1.25.9 (2020-04-16)\n-------------------\n\n* Added ``InvalidProxyConfigurationWarning`` which is raised when\n erroneously specifying an HTTPS proxy URL. urllib3 doesn't currently\n support connecting to HTTPS proxies but will soon be able to\n and we would like users to migrate properly without much breakage.\n\n See `this GitHub issue `_\n for more information on how to fix your proxy config. (Pull #1851)\n\n* Drain connection after ``PoolManager`` redirect (Pull #1817)\n\n* Ensure ``load_verify_locations`` raises ``SSLError`` for all backends (Pull #1812)\n\n* Rename ``VerifiedHTTPSConnection`` to ``HTTPSConnection`` (Pull #1805)\n\n* Allow the CA certificate data to be passed as a string (Pull #1804)\n\n* Raise ``ValueError`` if method contains control characters (Pull #1800)\n\n* Add ``__repr__`` to ``Timeout`` (Pull #1795)\n\n\n1.25.8 (2020-01-20)\n-------------------\n\n* Drop support for EOL Python 3.4 (Pull #1774)\n\n* Optimize _encode_invalid_chars (Pull #1787)\n\n\n1.25.7 (2019-11-11)\n-------------------\n\n* Preserve ``chunked`` parameter on retries (Pull #1715, Pull #1734)\n\n* Allow unset ``SERVER_SOFTWARE`` in App Engine (Pull #1704, Issue #1470)\n\n* Fix issue where URL fragment was sent within the request target. (Pull #1732)\n\n* Fix issue where an empty query section in a URL would fail to parse. (Pull #1732)\n\n* Remove TLS 1.3 support in SecureTransport due to Apple removing support (Pull #1703)\n\n\n1.25.6 (2019-09-24)\n-------------------\n\n* Fix issue where tilde (``~``) characters were incorrectly\n percent-encoded in the path. (Pull #1692)\n\n\n1.25.5 (2019-09-19)\n-------------------\n\n* Add mitigation for BPO-37428 affecting Python <3.7.4 and OpenSSL 1.1.1+ which\n caused certificate verification to be enabled when using ``cert_reqs=CERT_NONE``.\n (Issue #1682)\n\n\n1.25.4 (2019-09-19)\n-------------------\n\n* Propagate Retry-After header settings to subsequent retries. (Pull #1607)\n\n* Fix edge case where Retry-After header was still respected even when\n explicitly opted out of. (Pull #1607)\n\n* Remove dependency on ``rfc3986`` for URL parsing.\n\n* Fix issue where URLs containing invalid characters within ``Url.auth`` would\n raise an exception instead of percent-encoding those characters.\n\n* Add support for ``HTTPResponse.auto_close = False`` which makes HTTP responses\n work well with BufferedReaders and other ``io`` module features. (Pull #1652)\n\n* Percent-encode invalid characters in URL for ``HTTPConnectionPool.request()`` (Pull #1673)\n\n\n1.25.3 (2019-05-23)\n-------------------\n\n* Change ``HTTPSConnection`` to load system CA certificates\n when ``ca_certs``, ``ca_cert_dir``, and ``ssl_context`` are\n unspecified. (Pull #1608, Issue #1603)\n\n* Upgrade bundled rfc3986 to v1.3.2. (Pull #1609, Issue #1605)\n\n\n1.25.2 (2019-04-28)\n-------------------\n\n* Change ``is_ipaddress`` to not detect IPvFuture addresses. (Pull #1583)\n\n* Change ``parse_url`` to percent-encode invalid characters within the\n path, query, and target components. (Pull #1586)\n\n\n1.25.1 (2019-04-24)\n-------------------\n\n* Add support for Google's ``Brotli`` package. (Pull #1572, Pull #1579)\n\n* Upgrade bundled rfc3986 to v1.3.1 (Pull #1578)\n\n\n1.25 (2019-04-22)\n-----------------\n\n* Require and validate certificates by default when using HTTPS (Pull #1507)\n\n* Upgraded ``urllib3.utils.parse_url()`` to be RFC 3986 compliant. (Pull #1487)\n\n* Added support for ``key_password`` for ``HTTPSConnectionPool`` to use\n encrypted ``key_file`` without creating your own ``SSLContext`` object. (Pull #1489)\n\n* Add TLSv1.3 support to CPython, pyOpenSSL, and SecureTransport ``SSLContext``\n implementations. (Pull #1496)\n\n* Switched the default multipart header encoder from RFC 2231 to HTML 5 working draft. (Issue #303, Pull #1492)\n\n* Fixed issue where OpenSSL would block if an encrypted client private key was\n given and no password was given. Instead an ``SSLError`` is raised. (Pull #1489)\n\n* Added support for Brotli content encoding. It is enabled automatically if\n ``brotlipy`` package is installed which can be requested with\n ``urllib3[brotli]`` extra. (Pull #1532)\n\n* Drop ciphers using DSS key exchange from default TLS cipher suites.\n Improve default ciphers when using SecureTransport. (Pull #1496)\n\n* Implemented a more efficient ``HTTPResponse.__iter__()`` method. (Issue #1483)\n\n1.24.3 (2019-05-01)\n-------------------\n\n* Apply fix for CVE-2019-9740. (Pull #1591)\n\n1.24.2 (2019-04-17)\n-------------------\n\n* Don't load system certificates by default when any other ``ca_certs``, ``ca_certs_dir`` or\n ``ssl_context`` parameters are specified.\n\n* Remove Authorization header regardless of case when redirecting to cross-site. (Issue #1510)\n\n* Add support for IPv6 addresses in subjectAltName section of certificates. (Issue #1269)\n\n\n1.24.1 (2018-11-02)\n-------------------\n\n* Remove quadratic behavior within ``GzipDecoder.decompress()`` (Issue #1467)\n\n* Restored functionality of ``ciphers`` parameter for ``create_urllib3_context()``. (Issue #1462)\n\n\n1.24 (2018-10-16)\n-----------------\n\n* Allow key_server_hostname to be specified when initializing a PoolManager to allow custom SNI to be overridden. (Pull #1449)\n\n* Test against Python 3.7 on AppVeyor. (Pull #1453)\n\n* Early-out ipv6 checks when running on App Engine. (Pull #1450)\n\n* Change ambiguous description of backoff_factor (Pull #1436)\n\n* Add ability to handle multiple Content-Encodings (Issue #1441 and Pull #1442)\n\n* Skip DNS names that can't be idna-decoded when using pyOpenSSL (Issue #1405).\n\n* Add a server_hostname parameter to HTTPSConnection which allows for\n overriding the SNI hostname sent in the handshake. (Pull #1397)\n\n* Drop support for EOL Python 2.6 (Pull #1429 and Pull #1430)\n\n* Fixed bug where responses with header Content-Type: message/* erroneously\n raised HeaderParsingError, resulting in a warning being logged. (Pull #1439)\n\n* Move urllib3 to src/urllib3 (Pull #1409)\n\n\n1.23 (2018-06-04)\n-----------------\n\n* Allow providing a list of headers to strip from requests when redirecting\n to a different host. Defaults to the ``Authorization`` header. Different\n headers can be set via ``Retry.remove_headers_on_redirect``. (Issue #1316)\n\n* Fix ``util.selectors._fileobj_to_fd`` to accept ``long`` (Issue #1247).\n\n* Dropped Python 3.3 support. (Pull #1242)\n\n* Put the connection back in the pool when calling stream() or read_chunked() on\n a chunked HEAD response. (Issue #1234)\n\n* Fixed pyOpenSSL-specific ssl client authentication issue when clients\n attempted to auth via certificate + chain (Issue #1060)\n\n* Add the port to the connectionpool connect print (Pull #1251)\n\n* Don't use the ``uuid`` module to create multipart data boundaries. (Pull #1380)\n\n* ``read_chunked()`` on a closed response returns no chunks. (Issue #1088)\n\n* Add Python 2.6 support to ``contrib.securetransport`` (Pull #1359)\n\n* Added support for auth info in url for SOCKS proxy (Pull #1363)\n\n\n1.22 (2017-07-20)\n-----------------\n\n* Fixed missing brackets in ``HTTP CONNECT`` when connecting to IPv6 address via\n IPv6 proxy. (Issue #1222)\n\n* Made the connection pool retry on ``SSLError``. The original ``SSLError``\n is available on ``MaxRetryError.reason``. (Issue #1112)\n\n* Drain and release connection before recursing on retry/redirect. Fixes\n deadlocks with a blocking connectionpool. (Issue #1167)\n\n* Fixed compatibility for cookiejar. (Issue #1229)\n\n* pyopenssl: Use vendored version of ``six``. (Issue #1231)\n\n\n1.21.1 (2017-05-02)\n-------------------\n\n* Fixed SecureTransport issue that would cause long delays in response body\n delivery. (Pull #1154)\n\n* Fixed regression in 1.21 that threw exceptions when users passed the\n ``socket_options`` flag to the ``PoolManager``. (Issue #1165)\n\n* Fixed regression in 1.21 that threw exceptions when users passed the\n ``assert_hostname`` or ``assert_fingerprint`` flag to the ``PoolManager``.\n (Pull #1157)\n\n\n1.21 (2017-04-25)\n-----------------\n\n* Improved performance of certain selector system calls on Python 3.5 and\n later. (Pull #1095)\n\n* Resolved issue where the PyOpenSSL backend would not wrap SysCallError\n exceptions appropriately when sending data. (Pull #1125)\n\n* Selectors now detects a monkey-patched select module after import for modules\n that patch the select module like eventlet, greenlet. (Pull #1128)\n\n* Reduced memory consumption when streaming zlib-compressed responses\n (as opposed to raw deflate streams). (Pull #1129)\n\n* Connection pools now use the entire request context when constructing the\n pool key. (Pull #1016)\n\n* ``PoolManager.connection_from_*`` methods now accept a new keyword argument,\n ``pool_kwargs``, which are merged with the existing ``connection_pool_kw``.\n (Pull #1016)\n\n* Add retry counter for ``status_forcelist``. (Issue #1147)\n\n* Added ``contrib`` module for using SecureTransport on macOS:\n ``urllib3.contrib.securetransport``. (Pull #1122)\n\n* urllib3 now only normalizes the case of ``http://`` and ``https://`` schemes:\n for schemes it does not recognise, it assumes they are case-sensitive and\n leaves them unchanged.\n (Issue #1080)\n\n\n1.20 (2017-01-19)\n-----------------\n\n* Added support for waiting for I/O using selectors other than select,\n improving urllib3's behaviour with large numbers of concurrent connections.\n (Pull #1001)\n\n* Updated the date for the system clock check. (Issue #1005)\n\n* ConnectionPools now correctly consider hostnames to be case-insensitive.\n (Issue #1032)\n\n* Outdated versions of PyOpenSSL now cause the PyOpenSSL contrib module\n to fail when it is injected, rather than at first use. (Pull #1063)\n\n* Outdated versions of cryptography now cause the PyOpenSSL contrib module\n to fail when it is injected, rather than at first use. (Issue #1044)\n\n* Automatically attempt to rewind a file-like body object when a request is\n retried or redirected. (Pull #1039)\n\n* Fix some bugs that occur when modules incautiously patch the queue module.\n (Pull #1061)\n\n* Prevent retries from occurring on read timeouts for which the request method\n was not in the method whitelist. (Issue #1059)\n\n* Changed the PyOpenSSL contrib module to lazily load idna to avoid\n unnecessarily bloating the memory of programs that don't need it. (Pull\n #1076)\n\n* Add support for IPv6 literals with zone identifiers. (Pull #1013)\n\n* Added support for socks5h:// and socks4a:// schemes when working with SOCKS\n proxies, and controlled remote DNS appropriately. (Issue #1035)\n\n\n1.19.1 (2016-11-16)\n-------------------\n\n* Fixed AppEngine import that didn't function on Python 3.5. (Pull #1025)\n\n\n1.19 (2016-11-03)\n-----------------\n\n* urllib3 now respects Retry-After headers on 413, 429, and 503 responses when\n using the default retry logic. (Pull #955)\n\n* Remove markers from setup.py to assist ancient setuptools versions. (Issue\n #986)\n\n* Disallow superscripts and other integerish things in URL ports. (Issue #989)\n\n* Allow urllib3's HTTPResponse.stream() method to continue to work with\n non-httplib underlying FPs. (Pull #990)\n\n* Empty filenames in multipart headers are now emitted as such, rather than\n being suppressed. (Issue #1015)\n\n* Prefer user-supplied Host headers on chunked uploads. (Issue #1009)\n\n\n1.18.1 (2016-10-27)\n-------------------\n\n* CVE-2016-9015. Users who are using urllib3 version 1.17 or 1.18 along with\n PyOpenSSL injection and OpenSSL 1.1.0 *must* upgrade to this version. This\n release fixes a vulnerability whereby urllib3 in the above configuration\n would silently fail to validate TLS certificates due to erroneously setting\n invalid flags in OpenSSL's ``SSL_CTX_set_verify`` function. These erroneous\n flags do not cause a problem in OpenSSL versions before 1.1.0, which\n interprets the presence of any flag as requesting certificate validation.\n\n There is no PR for this patch, as it was prepared for simultaneous disclosure\n and release. The master branch received the same fix in Pull #1010.\n\n\n1.18 (2016-09-26)\n-----------------\n\n* Fixed incorrect message for IncompleteRead exception. (Pull #973)\n\n* Accept ``iPAddress`` subject alternative name fields in TLS certificates.\n (Issue #258)\n\n* Fixed consistency of ``HTTPResponse.closed`` between Python 2 and 3.\n (Issue #977)\n\n* Fixed handling of wildcard certificates when using PyOpenSSL. (Issue #979)\n\n\n1.17 (2016-09-06)\n-----------------\n\n* Accept ``SSLContext`` objects for use in SSL/TLS negotiation. (Issue #835)\n\n* ConnectionPool debug log now includes scheme, host, and port. (Issue #897)\n\n* Substantially refactored documentation. (Issue #887)\n\n* Used URLFetch default timeout on AppEngine, rather than hardcoding our own.\n (Issue #858)\n\n* Normalize the scheme and host in the URL parser (Issue #833)\n\n* ``HTTPResponse`` contains the last ``Retry`` object, which now also\n contains retries history. (Issue #848)\n\n* Timeout can no longer be set as boolean, and must be greater than zero.\n (Pull #924)\n\n* Removed pyasn1 and ndg-httpsclient from dependencies used for PyOpenSSL. We\n now use cryptography and idna, both of which are already dependencies of\n PyOpenSSL. (Pull #930)\n\n* Fixed infinite loop in ``stream`` when amt=None. (Issue #928)\n\n* Try to use the operating system's certificates when we are using an\n ``SSLContext``. (Pull #941)\n\n* Updated cipher suite list to allow ChaCha20+Poly1305. AES-GCM is preferred to\n ChaCha20, but ChaCha20 is then preferred to everything else. (Pull #947)\n\n* Updated cipher suite list to remove 3DES-based cipher suites. (Pull #958)\n\n* Removed the cipher suite fallback to allow HIGH ciphers. (Pull #958)\n\n* Implemented ``length_remaining`` to determine remaining content\n to be read. (Pull #949)\n\n* Implemented ``enforce_content_length`` to enable exceptions when\n incomplete data chunks are received. (Pull #949)\n\n* Dropped connection start, dropped connection reset, redirect, forced retry,\n and new HTTPS connection log levels to DEBUG, from INFO. (Pull #967)\n\n\n1.16 (2016-06-11)\n-----------------\n\n* Disable IPv6 DNS when IPv6 connections are not possible. (Issue #840)\n\n* Provide ``key_fn_by_scheme`` pool keying mechanism that can be\n overridden. (Issue #830)\n\n* Normalize scheme and host to lowercase for pool keys, and include\n ``source_address``. (Issue #830)\n\n* Cleaner exception chain in Python 3 for ``_make_request``.\n (Issue #861)\n\n* Fixed installing ``urllib3[socks]`` extra. (Issue #864)\n\n* Fixed signature of ``ConnectionPool.close`` so it can actually safely be\n called by subclasses. (Issue #873)\n\n* Retain ``release_conn`` state across retries. (Issues #651, #866)\n\n* Add customizable ``HTTPConnectionPool.ResponseCls``, which defaults to\n ``HTTPResponse`` but can be replaced with a subclass. (Issue #879)\n\n\n1.15.1 (2016-04-11)\n-------------------\n\n* Fix packaging to include backports module. (Issue #841)\n\n\n1.15 (2016-04-06)\n-----------------\n\n* Added Retry(raise_on_status=False). (Issue #720)\n\n* Always use setuptools, no more distutils fallback. (Issue #785)\n\n* Dropped support for Python 3.2. (Issue #786)\n\n* Chunked transfer encoding when requesting with ``chunked=True``.\n (Issue #790)\n\n* Fixed regression with IPv6 port parsing. (Issue #801)\n\n* Append SNIMissingWarning messages to allow users to specify it in\n the PYTHONWARNINGS environment variable. (Issue #816)\n\n* Handle unicode headers in Py2. (Issue #818)\n\n* Log certificate when there is a hostname mismatch. (Issue #820)\n\n* Preserve order of request/response headers. (Issue #821)\n\n\n1.14 (2015-12-29)\n-----------------\n\n* contrib: SOCKS proxy support! (Issue #762)\n\n* Fixed AppEngine handling of transfer-encoding header and bug\n in Timeout defaults checking. (Issue #763)\n\n\n1.13.1 (2015-12-18)\n-------------------\n\n* Fixed regression in IPv6 + SSL for match_hostname. (Issue #761)\n\n\n1.13 (2015-12-14)\n-----------------\n\n* Fixed ``pip install urllib3[secure]`` on modern pip. (Issue #706)\n\n* pyopenssl: Fixed SSL3_WRITE_PENDING error. (Issue #717)\n\n* pyopenssl: Support for TLSv1.1 and TLSv1.2. (Issue #696)\n\n* Close connections more defensively on exception. (Issue #734)\n\n* Adjusted ``read_chunked`` to handle gzipped, chunk-encoded bodies without\n repeatedly flushing the decoder, to function better on Jython. (Issue #743)\n\n* Accept ``ca_cert_dir`` for SSL-related PoolManager configuration. (Issue #758)\n\n\n1.12 (2015-09-03)\n-----------------\n\n* Rely on ``six`` for importing ``httplib`` to work around\n conflicts with other Python 3 shims. (Issue #688)\n\n* Add support for directories of certificate authorities, as supported by\n OpenSSL. (Issue #701)\n\n* New exception: ``NewConnectionError``, raised when we fail to establish\n a new connection, usually ``ECONNREFUSED`` socket error.\n\n\n1.11 (2015-07-21)\n-----------------\n\n* When ``ca_certs`` is given, ``cert_reqs`` defaults to\n ``'CERT_REQUIRED'``. (Issue #650)\n\n* ``pip install urllib3[secure]`` will install Certifi and\n PyOpenSSL as dependencies. (Issue #678)\n\n* Made ``HTTPHeaderDict`` usable as a ``headers`` input value\n (Issues #632, #679)\n\n* Added `urllib3.contrib.appengine `_\n which has an ``AppEngineManager`` for using ``URLFetch`` in a\n Google AppEngine environment. (Issue #664)\n\n* Dev: Added test suite for AppEngine. (Issue #631)\n\n* Fix performance regression when using PyOpenSSL. (Issue #626)\n\n* Passing incorrect scheme (e.g. ``foo://``) will raise\n ``ValueError`` instead of ``AssertionError`` (backwards\n compatible for now, but please migrate). (Issue #640)\n\n* Fix pools not getting replenished when an error occurs during a\n request using ``release_conn=False``. (Issue #644)\n\n* Fix pool-default headers not applying for url-encoded requests\n like GET. (Issue #657)\n\n* log.warning in Python 3 when headers are skipped due to parsing\n errors. (Issue #642)\n\n* Close and discard connections if an error occurs during read.\n (Issue #660)\n\n* Fix host parsing for IPv6 proxies. (Issue #668)\n\n* Separate warning type SubjectAltNameWarning, now issued once\n per host. (Issue #671)\n\n* Fix ``httplib.IncompleteRead`` not getting converted to\n ``ProtocolError`` when using ``HTTPResponse.stream()``\n (Issue #674)\n\n1.10.4 (2015-05-03)\n-------------------\n\n* Migrate tests to Tornado 4. (Issue #594)\n\n* Append default warning configuration rather than overwrite.\n (Issue #603)\n\n* Fix streaming decoding regression. (Issue #595)\n\n* Fix chunked requests losing state across keep-alive connections.\n (Issue #599)\n\n* Fix hanging when chunked HEAD response has no body. (Issue #605)\n\n\n1.10.3 (2015-04-21)\n-------------------\n\n* Emit ``InsecurePlatformWarning`` when SSLContext object is missing.\n (Issue #558)\n\n* Fix regression of duplicate header keys being discarded.\n (Issue #563)\n\n* ``Response.stream()`` returns a generator for chunked responses.\n (Issue #560)\n\n* Set upper-bound timeout when waiting for a socket in PyOpenSSL.\n (Issue #585)\n\n* Work on platforms without `ssl` module for plain HTTP requests.\n (Issue #587)\n\n* Stop relying on the stdlib's default cipher list. (Issue #588)\n\n\n1.10.2 (2015-02-25)\n-------------------\n\n* Fix file descriptor leakage on retries. (Issue #548)\n\n* Removed RC4 from default cipher list. (Issue #551)\n\n* Header performance improvements. (Issue #544)\n\n* Fix PoolManager not obeying redirect retry settings. (Issue #553)\n\n\n1.10.1 (2015-02-10)\n-------------------\n\n* Pools can be used as context managers. (Issue #545)\n\n* Don't re-use connections which experienced an SSLError. (Issue #529)\n\n* Don't fail when gzip decoding an empty stream. (Issue #535)\n\n* Add sha256 support for fingerprint verification. (Issue #540)\n\n* Fixed handling of header values containing commas. (Issue #533)\n\n\n1.10 (2014-12-14)\n-----------------\n\n* Disabled SSLv3. (Issue #473)\n\n* Add ``Url.url`` property to return the composed url string. (Issue #394)\n\n* Fixed PyOpenSSL + gevent ``WantWriteError``. (Issue #412)\n\n* ``MaxRetryError.reason`` will always be an exception, not string.\n (Issue #481)\n\n* Fixed SSL-related timeouts not being detected as timeouts. (Issue #492)\n\n* Py3: Use ``ssl.create_default_context()`` when available. (Issue #473)\n\n* Emit ``InsecureRequestWarning`` for *every* insecure HTTPS request.\n (Issue #496)\n\n* Emit ``SecurityWarning`` when certificate has no ``subjectAltName``.\n (Issue #499)\n\n* Close and discard sockets which experienced SSL-related errors.\n (Issue #501)\n\n* Handle ``body`` param in ``.request(...)``. (Issue #513)\n\n* Respect timeout with HTTPS proxy. (Issue #505)\n\n* PyOpenSSL: Handle ZeroReturnError exception. (Issue #520)\n\n\n1.9.1 (2014-09-13)\n------------------\n\n* Apply socket arguments before binding. (Issue #427)\n\n* More careful checks if fp-like object is closed. (Issue #435)\n\n* Fixed packaging issues of some development-related files not\n getting included. (Issue #440)\n\n* Allow performing *only* fingerprint verification. (Issue #444)\n\n* Emit ``SecurityWarning`` if system clock is waaay off. (Issue #445)\n\n* Fixed PyOpenSSL compatibility with PyPy. (Issue #450)\n\n* Fixed ``BrokenPipeError`` and ``ConnectionError`` handling in Py3.\n (Issue #443)\n\n\n\n1.9 (2014-07-04)\n----------------\n\n* Shuffled around development-related files. If you're maintaining a distro\n package of urllib3, you may need to tweak things. (Issue #415)\n\n* Unverified HTTPS requests will trigger a warning on the first request. See\n our new `security documentation\n `_ for details.\n (Issue #426)\n\n* New retry logic and ``urllib3.util.retry.Retry`` configuration object.\n (Issue #326)\n\n* All raised exceptions should now wrapped in a\n ``urllib3.exceptions.HTTPException``-extending exception. (Issue #326)\n\n* All errors during a retry-enabled request should be wrapped in\n ``urllib3.exceptions.MaxRetryError``, including timeout-related exceptions\n which were previously exempt. Underlying error is accessible from the\n ``.reason`` property. (Issue #326)\n\n* ``urllib3.exceptions.ConnectionError`` renamed to\n ``urllib3.exceptions.ProtocolError``. (Issue #326)\n\n* Errors during response read (such as IncompleteRead) are now wrapped in\n ``urllib3.exceptions.ProtocolError``. (Issue #418)\n\n* Requesting an empty host will raise ``urllib3.exceptions.LocationValueError``.\n (Issue #417)\n\n* Catch read timeouts over SSL connections as\n ``urllib3.exceptions.ReadTimeoutError``. (Issue #419)\n\n* Apply socket arguments before connecting. (Issue #427)\n\n\n1.8.3 (2014-06-23)\n------------------\n\n* Fix TLS verification when using a proxy in Python 3.4.1. (Issue #385)\n\n* Add ``disable_cache`` option to ``urllib3.util.make_headers``. (Issue #393)\n\n* Wrap ``socket.timeout`` exception with\n ``urllib3.exceptions.ReadTimeoutError``. (Issue #399)\n\n* Fixed proxy-related bug where connections were being reused incorrectly.\n (Issues #366, #369)\n\n* Added ``socket_options`` keyword parameter which allows to define\n ``setsockopt`` configuration of new sockets. (Issue #397)\n\n* Removed ``HTTPConnection.tcp_nodelay`` in favor of\n ``HTTPConnection.default_socket_options``. (Issue #397)\n\n* Fixed ``TypeError`` bug in Python 2.6.4. (Issue #411)\n\n\n1.8.2 (2014-04-17)\n------------------\n\n* Fix ``urllib3.util`` not being included in the package.\n\n\n1.8.1 (2014-04-17)\n------------------\n\n* Fix AppEngine bug of HTTPS requests going out as HTTP. (Issue #356)\n\n* Don't install ``dummyserver`` into ``site-packages`` as it's only needed\n for the test suite. (Issue #362)\n\n* Added support for specifying ``source_address``. (Issue #352)\n\n\n1.8 (2014-03-04)\n----------------\n\n* Improved url parsing in ``urllib3.util.parse_url`` (properly parse '@' in\n username, and blank ports like 'hostname:').\n\n* New ``urllib3.connection`` module which contains all the HTTPConnection\n objects.\n\n* Several ``urllib3.util.Timeout``-related fixes. Also changed constructor\n signature to a more sensible order. [Backwards incompatible]\n (Issues #252, #262, #263)\n\n* Use ``backports.ssl_match_hostname`` if it's installed. (Issue #274)\n\n* Added ``.tell()`` method to ``urllib3.response.HTTPResponse`` which\n returns the number of bytes read so far. (Issue #277)\n\n* Support for platforms without threading. (Issue #289)\n\n* Expand default-port comparison in ``HTTPConnectionPool.is_same_host``\n to allow a pool with no specified port to be considered equal to to an\n HTTP/HTTPS url with port 80/443 explicitly provided. (Issue #305)\n\n* Improved default SSL/TLS settings to avoid vulnerabilities.\n (Issue #309)\n\n* Fixed ``urllib3.poolmanager.ProxyManager`` not retrying on connect errors.\n (Issue #310)\n\n* Disable Nagle's Algorithm on the socket for non-proxies. A subset of requests\n will send the entire HTTP request ~200 milliseconds faster; however, some of\n the resulting TCP packets will be smaller. (Issue #254)\n\n* Increased maximum number of SubjectAltNames in ``urllib3.contrib.pyopenssl``\n from the default 64 to 1024 in a single certificate. (Issue #318)\n\n* Headers are now passed and stored as a custom\n ``urllib3.collections_.HTTPHeaderDict`` object rather than a plain ``dict``.\n (Issue #329, #333)\n\n* Headers no longer lose their case on Python 3. (Issue #236)\n\n* ``urllib3.contrib.pyopenssl`` now uses the operating system's default CA\n certificates on inject. (Issue #332)\n\n* Requests with ``retries=False`` will immediately raise any exceptions without\n wrapping them in ``MaxRetryError``. (Issue #348)\n\n* Fixed open socket leak with SSL-related failures. (Issue #344, #348)\n\n\n1.7.1 (2013-09-25)\n------------------\n\n* Added granular timeout support with new ``urllib3.util.Timeout`` class.\n (Issue #231)\n\n* Fixed Python 3.4 support. (Issue #238)\n\n\n1.7 (2013-08-14)\n----------------\n\n* More exceptions are now pickle-able, with tests. (Issue #174)\n\n* Fixed redirecting with relative URLs in Location header. (Issue #178)\n\n* Support for relative urls in ``Location: ...`` header. (Issue #179)\n\n* ``urllib3.response.HTTPResponse`` now inherits from ``io.IOBase`` for bonus\n file-like functionality. (Issue #187)\n\n* Passing ``assert_hostname=False`` when creating a HTTPSConnectionPool will\n skip hostname verification for SSL connections. (Issue #194)\n\n* New method ``urllib3.response.HTTPResponse.stream(...)`` which acts as a\n generator wrapped around ``.read(...)``. (Issue #198)\n\n* IPv6 url parsing enforces brackets around the hostname. (Issue #199)\n\n* Fixed thread race condition in\n ``urllib3.poolmanager.PoolManager.connection_from_host(...)`` (Issue #204)\n\n* ``ProxyManager`` requests now include non-default port in ``Host: ...``\n header. (Issue #217)\n\n* Added HTTPS proxy support in ``ProxyManager``. (Issue #170 #139)\n\n* New ``RequestField`` object can be passed to the ``fields=...`` param which\n can specify headers. (Issue #220)\n\n* Raise ``urllib3.exceptions.ProxyError`` when connecting to proxy fails.\n (Issue #221)\n\n* Use international headers when posting file names. (Issue #119)\n\n* Improved IPv6 support. (Issue #203)\n\n\n1.6 (2013-04-25)\n----------------\n\n* Contrib: Optional SNI support for Py2 using PyOpenSSL. (Issue #156)\n\n* ``ProxyManager`` automatically adds ``Host: ...`` header if not given.\n\n* Improved SSL-related code. ``cert_req`` now optionally takes a string like\n \"REQUIRED\" or \"NONE\". Same with ``ssl_version`` takes strings like \"SSLv23\"\n The string values reflect the suffix of the respective constant variable.\n (Issue #130)\n\n* Vendored ``socksipy`` now based on Anorov's fork which handles unexpectedly\n closed proxy connections and larger read buffers. (Issue #135)\n\n* Ensure the connection is closed if no data is received, fixes connection leak\n on some platforms. (Issue #133)\n\n* Added SNI support for SSL/TLS connections on Py32+. (Issue #89)\n\n* Tests fixed to be compatible with Py26 again. (Issue #125)\n\n* Added ability to choose SSL version by passing an ``ssl.PROTOCOL_*`` constant\n to the ``ssl_version`` parameter of ``HTTPSConnectionPool``. (Issue #109)\n\n* Allow an explicit content type to be specified when encoding file fields.\n (Issue #126)\n\n* Exceptions are now pickleable, with tests. (Issue #101)\n\n* Fixed default headers not getting passed in some cases. (Issue #99)\n\n* Treat \"content-encoding\" header value as case-insensitive, per RFC 2616\n Section 3.5. (Issue #110)\n\n* \"Connection Refused\" SocketErrors will get retried rather than raised.\n (Issue #92)\n\n* Updated vendored ``six``, no longer overrides the global ``six`` module\n namespace. (Issue #113)\n\n* ``urllib3.exceptions.MaxRetryError`` contains a ``reason`` property holding\n the exception that prompted the final retry. If ``reason is None`` then it\n was due to a redirect. (Issue #92, #114)\n\n* Fixed ``PoolManager.urlopen()`` from not redirecting more than once.\n (Issue #149)\n\n* Don't assume ``Content-Type: text/plain`` for multi-part encoding parameters\n that are not files. (Issue #111)\n\n* Pass `strict` param down to ``httplib.HTTPConnection``. (Issue #122)\n\n* Added mechanism to verify SSL certificates by fingerprint (md5, sha1) or\n against an arbitrary hostname (when connecting by IP or for misconfigured\n servers). (Issue #140)\n\n* Streaming decompression support. (Issue #159)\n\n\n1.5 (2012-08-02)\n----------------\n\n* Added ``urllib3.add_stderr_logger()`` for quickly enabling STDERR debug\n logging in urllib3.\n\n* Native full URL parsing (including auth, path, query, fragment) available in\n ``urllib3.util.parse_url(url)``.\n\n* Built-in redirect will switch method to 'GET' if status code is 303.\n (Issue #11)\n\n* ``urllib3.PoolManager`` strips the scheme and host before sending the request\n uri. (Issue #8)\n\n* New ``urllib3.exceptions.DecodeError`` exception for when automatic decoding,\n based on the Content-Type header, fails.\n\n* Fixed bug with pool depletion and leaking connections (Issue #76). Added\n explicit connection closing on pool eviction. Added\n ``urllib3.PoolManager.clear()``.\n\n* 99% -> 100% unit test coverage.\n\n\n1.4 (2012-06-16)\n----------------\n\n* Minor AppEngine-related fixes.\n\n* Switched from ``mimetools.choose_boundary`` to ``uuid.uuid4()``.\n\n* Improved url parsing. (Issue #73)\n\n* IPv6 url support. (Issue #72)\n\n\n1.3 (2012-03-25)\n----------------\n\n* Removed pre-1.0 deprecated API.\n\n* Refactored helpers into a ``urllib3.util`` submodule.\n\n* Fixed multipart encoding to support list-of-tuples for keys with multiple\n values. (Issue #48)\n\n* Fixed multiple Set-Cookie headers in response not getting merged properly in\n Python 3. (Issue #53)\n\n* AppEngine support with Py27. (Issue #61)\n\n* Minor ``encode_multipart_formdata`` fixes related to Python 3 strings vs\n bytes.\n\n\n1.2.2 (2012-02-06)\n------------------\n\n* Fixed packaging bug of not shipping ``test-requirements.txt``. (Issue #47)\n\n\n1.2.1 (2012-02-05)\n------------------\n\n* Fixed another bug related to when ``ssl`` module is not available. (Issue #41)\n\n* Location parsing errors now raise ``urllib3.exceptions.LocationParseError``\n which inherits from ``ValueError``.\n\n\n1.2 (2012-01-29)\n----------------\n\n* Added Python 3 support (tested on 3.2.2)\n\n* Dropped Python 2.5 support (tested on 2.6.7, 2.7.2)\n\n* Use ``select.poll`` instead of ``select.select`` for platforms that support\n it.\n\n* Use ``Queue.LifoQueue`` instead of ``Queue.Queue`` for more aggressive\n connection reusing. Configurable by overriding ``ConnectionPool.QueueCls``.\n\n* Fixed ``ImportError`` during install when ``ssl`` module is not available.\n (Issue #41)\n\n* Fixed ``PoolManager`` redirects between schemes (such as HTTP -> HTTPS) not\n completing properly. (Issue #28, uncovered by Issue #10 in v1.1)\n\n* Ported ``dummyserver`` to use ``tornado`` instead of ``webob`` +\n ``eventlet``. Removed extraneous unsupported dummyserver testing backends.\n Added socket-level tests.\n\n* More tests. Achievement Unlocked: 99% Coverage.\n\n\n1.1 (2012-01-07)\n----------------\n\n* Refactored ``dummyserver`` to its own root namespace module (used for\n testing).\n\n* Added hostname verification for ``VerifiedHTTPSConnection`` by vendoring in\n Py32's ``ssl_match_hostname``. (Issue #25)\n\n* Fixed cross-host HTTP redirects when using ``PoolManager``. (Issue #10)\n\n* Fixed ``decode_content`` being ignored when set through ``urlopen``. (Issue\n #27)\n\n* Fixed timeout-related bugs. (Issues #17, #23)\n\n\n1.0.2 (2011-11-04)\n------------------\n\n* Fixed typo in ``VerifiedHTTPSConnection`` which would only present as a bug if\n you're using the object manually. (Thanks pyos)\n\n* Made RecentlyUsedContainer (and consequently PoolManager) more thread-safe by\n wrapping the access log in a mutex. (Thanks @christer)\n\n* Made RecentlyUsedContainer more dict-like (corrected ``__delitem__`` and\n ``__getitem__`` behaviour), with tests. Shouldn't affect core urllib3 code.\n\n\n1.0.1 (2011-10-10)\n------------------\n\n* Fixed a bug where the same connection would get returned into the pool twice,\n causing extraneous \"HttpConnectionPool is full\" log warnings.\n\n\n1.0 (2011-10-08)\n----------------\n\n* Added ``PoolManager`` with LRU expiration of connections (tested and\n documented).\n* Added ``ProxyManager`` (needs tests, docs, and confirmation that it works\n with HTTPS proxies).\n* Added optional partial-read support for responses when\n ``preload_content=False``. You can now make requests and just read the headers\n without loading the content.\n* Made response decoding optional (default on, same as before).\n* Added optional explicit boundary string for ``encode_multipart_formdata``.\n* Convenience request methods are now inherited from ``RequestMethods``. Old\n helpers like ``get_url`` and ``post_url`` should be abandoned in favour of\n the new ``request(method, url, ...)``.\n* Refactored code to be even more decoupled, reusable, and extendable.\n* License header added to ``.py`` files.\n* Embiggened the documentation: Lots of Sphinx-friendly docstrings in the code\n and docs in ``docs/`` and on https://urllib3.readthedocs.io/.\n* Embettered all the things!\n* Started writing this file.\n\n\n0.4.1 (2011-07-17)\n------------------\n\n* Minor bug fixes, code cleanup.\n\n\n0.4 (2011-03-01)\n----------------\n\n* Better unicode support.\n* Added ``VerifiedHTTPSConnection``.\n* Added ``NTLMConnectionPool`` in contrib.\n* Minor improvements.\n\n\n0.3.1 (2010-07-13)\n------------------\n\n* Added ``assert_host_name`` optional parameter. Now compatible with proxies.\n\n\n0.3 (2009-12-10)\n----------------\n\n* Added HTTPS support.\n* Minor bug fixes.\n* Refactored, broken backwards compatibility with 0.2.\n* API to be treated as stable from this version forward.\n\n\n0.2 (2008-11-17)\n----------------\n\n* Added unit tests.\n* Bug fixes.\n\n\n0.1 (2008-11-16)\n----------------\n\n* First release.", + "release_date": "2024-08-29T15:43:08", + "parties": [ + { + "type": "person", + "role": "author", + "name": "Andrey Petrov", + "email": "andrey.petrov@shazow.net", + "url": null + } + ], + "keywords": [ + "urllib httplib threadsafe filepost http https ssl pooling", + "Environment :: Web Environment", + "Intended Audience :: Developers", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Internet :: WWW/HTTP", + "Topic :: Software Development :: Libraries" + ], + "homepage_url": "https://urllib3.readthedocs.io/", + "download_url": "https://files.pythonhosted.org/packages/33/cf/8435d5a7159e2a9c83a95896ed596f68cf798005fe107cc655b5c5c14704/urllib3-1.26.20-py2.py3-none-any.whl", + "size": 144225, + "sha1": null, + "md5": "7e0b513a3a35c882aee2bc2e7e6e9848", + "sha256": "0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", + "sha512": null, + "bug_tracking_url": null, + "code_view_url": "https://github.com/urllib3/urllib3", + "vcs_url": null, + "copyright": null, + "license_expression": null, + "declared_license": { + "license": "MIT", + "classifiers": [ + "License :: OSI Approved :: MIT License" + ] + }, + "notice_text": null, + "source_packages": [], + "file_references": [], + "extra_data": {}, + "dependencies": [], + "repository_homepage_url": null, + "repository_download_url": null, + "api_data_url": "https://pypi.org/pypi/urllib3/1.26.20/json", + "datasource_id": null, + "purl": "pkg:pypi/urllib3@1.26.20" + } + ], + "resolved_dependencies_graph": [ + { + "package": "pkg:pypi/addict@2.4.0", + "dependencies": [] + }, + { + "package": "pkg:pypi/certifi@2025.8.3", + "dependencies": [] + }, + { + "package": "pkg:pypi/chardet@4.0.0", + "dependencies": [] + }, + { + "package": "pkg:pypi/idna@2.10", + "dependencies": [] + }, + { + "package": "pkg:pypi/requests@2.25.1", + "dependencies": [ + "pkg:pypi/certifi@2025.8.3", + "pkg:pypi/chardet@4.0.0", + "pkg:pypi/idna@2.10", + "pkg:pypi/urllib3@1.26.20" + ] + }, + { + "package": "pkg:pypi/urllib3@1.26.20", + "dependencies": [] + } + ] +} \ No newline at end of file diff --git a/tests/data/resolved_deps/autobahn-310-expected.json b/tests/data/resolved_deps/autobahn-310-expected.json index b1182fb2..87566419 100644 --- a/tests/data/resolved_deps/autobahn-310-expected.json +++ b/tests/data/resolved_deps/autobahn-310-expected.json @@ -10,15 +10,15 @@ ] }, { - "package": "pkg:pypi/cffi@1.17.1", + "package": "pkg:pypi/cffi@2.0.0", "dependencies": [ - "pkg:pypi/pycparser@2.22" + "pkg:pypi/pycparser@2.23" ] }, { "package": "pkg:pypi/cryptography@43.0.3", "dependencies": [ - "pkg:pypi/cffi@1.17.1" + "pkg:pypi/cffi@2.0.0" ] }, { @@ -32,7 +32,7 @@ "dependencies": [] }, { - "package": "pkg:pypi/pycparser@2.22", + "package": "pkg:pypi/pycparser@2.23", "dependencies": [] }, { @@ -46,11 +46,11 @@ ], [ "pkg:pypi/autobahn@22.3.2", - "pkg:pypi/cffi@1.17.1", + "pkg:pypi/cffi@2.0.0", "pkg:pypi/cryptography@43.0.3", "pkg:pypi/hyperlink@21.0.0", "pkg:pypi/idna@3.10", - "pkg:pypi/pycparser@2.22", + "pkg:pypi/pycparser@2.23", "pkg:pypi/setuptools@80.9.0", "pkg:pypi/txaio@23.6.1" ] diff --git a/tests/data/resolved_deps/flask-310-expected.json b/tests/data/resolved_deps/flask-310-expected.json index cc88a552..25b9b00e 100644 --- a/tests/data/resolved_deps/flask-310-expected.json +++ b/tests/data/resolved_deps/flask-310-expected.json @@ -1,13 +1,13 @@ [ [ { - "package": "pkg:pypi/click@8.2.1", + "package": "pkg:pypi/click@8.2.2", "dependencies": [] }, { "package": "pkg:pypi/flask@2.1.2", "dependencies": [ - "pkg:pypi/click@8.2.1", + "pkg:pypi/click@8.2.2", "pkg:pypi/itsdangerous@2.2.0", "pkg:pypi/jinja2@3.1.6", "pkg:pypi/werkzeug@3.1.3" @@ -35,7 +35,7 @@ } ], [ - "pkg:pypi/click@8.2.1", + "pkg:pypi/click@8.2.2", "pkg:pypi/flask@2.1.2", "pkg:pypi/itsdangerous@2.2.0", "pkg:pypi/jinja2@3.1.6", diff --git a/tests/data/resolved_deps/flask-310-win-expected.json b/tests/data/resolved_deps/flask-310-win-expected.json index a8f45499..e054ac42 100644 --- a/tests/data/resolved_deps/flask-310-win-expected.json +++ b/tests/data/resolved_deps/flask-310-win-expected.json @@ -1,7 +1,7 @@ [ [ { - "package": "pkg:pypi/click@8.2.1", + "package": "pkg:pypi/click@8.2.2", "dependencies": [ "pkg:pypi/colorama@0.4.6" ] @@ -13,7 +13,7 @@ { "package": "pkg:pypi/flask@2.1.2", "dependencies": [ - "pkg:pypi/click@8.2.1", + "pkg:pypi/click@8.2.2", "pkg:pypi/itsdangerous@2.2.0", "pkg:pypi/jinja2@3.1.6", "pkg:pypi/werkzeug@3.1.3" @@ -41,7 +41,7 @@ } ], [ - "pkg:pypi/click@8.2.1", + "pkg:pypi/click@8.2.2", "pkg:pypi/colorama@0.4.6", "pkg:pypi/flask@2.1.2", "pkg:pypi/itsdangerous@2.2.0", diff --git a/tests/data/test-api-expected.json b/tests/data/test-api-expected.json index 9236c0c8..48e56477 100644 --- a/tests/data/test-api-expected.json +++ b/tests/data/test-api-expected.json @@ -5,12 +5,12 @@ "type": "pypi", "namespace": null, "name": "click", - "version": "8.2.1", + "version": "8.2.2", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Composable command line interface toolkit\n# $ click_\n\nClick is a Python package for creating beautiful command line interfaces\nin a composable way with as little code as necessary. It's the \"Command\nLine Interface Creation Kit\". It's highly configurable but comes with\nsensible defaults out of the box.\n\nIt aims to make the process of writing command line tools quick and fun\nwhile also preventing any frustration caused by the inability to\nimplement an intended CLI API.\n\nClick in three points:\n\n- Arbitrary nesting of commands\n- Automatic help page generation\n- Supports lazy loading of subcommands at runtime\n\n\n## A Simple Example\n\n```python\nimport click\n\n@click.command()\n@click.option(\"--count\", default=1, help=\"Number of greetings.\")\n@click.option(\"--name\", prompt=\"Your name\", help=\"The person to greet.\")\ndef hello(count, name):\n \"\"\"Simple program that greets NAME for a total of COUNT times.\"\"\"\n for _ in range(count):\n click.echo(f\"Hello, {name}!\")\n\nif __name__ == '__main__':\n hello()\n```\n\n```\n$ python hello.py --count=3\nYour name: Click\nHello, Click!\nHello, Click!\nHello, Click!\n```\n\n\n## Donate\n\nThe Pallets organization develops and supports Click and other popular\npackages. In order to grow the community of contributors and users, and\nallow the maintainers to devote more time to the projects, [please\ndonate today][].\n\n[please donate today]: https://palletsprojects.com/donate\n\n## Contributing\n\nSee our [detailed contributing documentation][contrib] for many ways to\ncontribute, including reporting issues, requesting features, asking or answering\nquestions, and making PRs.\n\n[contrib]: https://palletsprojects.com/contributing/", - "release_date": "2025-05-20T23:19:47", + "description": "Composable command line interface toolkit\n
\"\"
\n\n# Click\n\nClick is a Python package for creating beautiful command line interfaces\nin a composable way with as little code as necessary. It's the \"Command\nLine Interface Creation Kit\". It's highly configurable but comes with\nsensible defaults out of the box.\n\nIt aims to make the process of writing command line tools quick and fun\nwhile also preventing any frustration caused by the inability to\nimplement an intended CLI API.\n\nClick in three points:\n\n- Arbitrary nesting of commands\n- Automatic help page generation\n- Supports lazy loading of subcommands at runtime\n\n\n## A Simple Example\n\n```python\nimport click\n\n@click.command()\n@click.option(\"--count\", default=1, help=\"Number of greetings.\")\n@click.option(\"--name\", prompt=\"Your name\", help=\"The person to greet.\")\ndef hello(count, name):\n \"\"\"Simple program that greets NAME for a total of COUNT times.\"\"\"\n for _ in range(count):\n click.echo(f\"Hello, {name}!\")\n\nif __name__ == '__main__':\n hello()\n```\n\n```\n$ python hello.py --count=3\nYour name: Click\nHello, Click!\nHello, Click!\nHello, Click!\n```\n\n\n## Donate\n\nThe Pallets organization develops and supports Click and other popular\npackages. In order to grow the community of contributors and users, and\nallow the maintainers to devote more time to the projects, [please\ndonate today][].\n\n[please donate today]: https://palletsprojects.com/donate\n\n## Contributing\n\nSee our [detailed contributing documentation][contrib] for many ways to\ncontribute, including reporting issues, requesting features, asking or answering\nquestions, and making PRs.\n\n[contrib]: https://palletsprojects.com/contributing/", + "release_date": "2025-08-02T02:23:39", "parties": [ { "type": "person", @@ -28,11 +28,11 @@ "Typing :: Typed" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", - "size": 102215, + "download_url": "https://files.pythonhosted.org/packages/ec/85/e7297e34133ae1cfde3bffd30c24e1ef055248251baa877834e048687a28/click-8.2.2-py3-none-any.whl", + "size": 103900, "sha1": null, - "md5": "aeead16d8bed93caa7107ac87b1e5ec8", - "sha256": "61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", + "md5": "7d180e1baded1a50d5ad31b43a965888", + "sha256": "52e1e9f5d3db8c85aa76968c7c67ed41ddbacb167f43201511c8fd61eb5ba2ca", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/pallets/click/", @@ -47,9 +47,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/click/8.2.1/json", + "api_data_url": "https://pypi.org/pypi/click/8.2.2/json", "datasource_id": null, - "purl": "pkg:pypi/click@8.2.1" + "purl": "pkg:pypi/click@8.2.2" }, { "type": "pypi", @@ -350,13 +350,13 @@ ], "resolution": [ { - "package": "pkg:pypi/click@8.2.1", + "package": "pkg:pypi/click@8.2.2", "dependencies": [] }, { "package": "pkg:pypi/flask@2.1.2", "dependencies": [ - "pkg:pypi/click@8.2.1", + "pkg:pypi/click@8.2.2", "pkg:pypi/itsdangerous@2.2.0", "pkg:pypi/jinja2@3.1.6", "pkg:pypi/werkzeug@3.1.3" diff --git a/tests/data/test-api-pdt-expected.json b/tests/data/test-api-pdt-expected.json index 12f53983..102a4a1d 100644 --- a/tests/data/test-api-pdt-expected.json +++ b/tests/data/test-api-pdt-expected.json @@ -5,12 +5,12 @@ "type": "pypi", "namespace": null, "name": "click", - "version": "8.2.1", + "version": "8.2.2", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Composable command line interface toolkit\n# $ click_\n\nClick is a Python package for creating beautiful command line interfaces\nin a composable way with as little code as necessary. It's the \"Command\nLine Interface Creation Kit\". It's highly configurable but comes with\nsensible defaults out of the box.\n\nIt aims to make the process of writing command line tools quick and fun\nwhile also preventing any frustration caused by the inability to\nimplement an intended CLI API.\n\nClick in three points:\n\n- Arbitrary nesting of commands\n- Automatic help page generation\n- Supports lazy loading of subcommands at runtime\n\n\n## A Simple Example\n\n```python\nimport click\n\n@click.command()\n@click.option(\"--count\", default=1, help=\"Number of greetings.\")\n@click.option(\"--name\", prompt=\"Your name\", help=\"The person to greet.\")\ndef hello(count, name):\n \"\"\"Simple program that greets NAME for a total of COUNT times.\"\"\"\n for _ in range(count):\n click.echo(f\"Hello, {name}!\")\n\nif __name__ == '__main__':\n hello()\n```\n\n```\n$ python hello.py --count=3\nYour name: Click\nHello, Click!\nHello, Click!\nHello, Click!\n```\n\n\n## Donate\n\nThe Pallets organization develops and supports Click and other popular\npackages. In order to grow the community of contributors and users, and\nallow the maintainers to devote more time to the projects, [please\ndonate today][].\n\n[please donate today]: https://palletsprojects.com/donate\n\n## Contributing\n\nSee our [detailed contributing documentation][contrib] for many ways to\ncontribute, including reporting issues, requesting features, asking or answering\nquestions, and making PRs.\n\n[contrib]: https://palletsprojects.com/contributing/", - "release_date": "2025-05-20T23:19:47", + "description": "Composable command line interface toolkit\n
\"\"
\n\n# Click\n\nClick is a Python package for creating beautiful command line interfaces\nin a composable way with as little code as necessary. It's the \"Command\nLine Interface Creation Kit\". It's highly configurable but comes with\nsensible defaults out of the box.\n\nIt aims to make the process of writing command line tools quick and fun\nwhile also preventing any frustration caused by the inability to\nimplement an intended CLI API.\n\nClick in three points:\n\n- Arbitrary nesting of commands\n- Automatic help page generation\n- Supports lazy loading of subcommands at runtime\n\n\n## A Simple Example\n\n```python\nimport click\n\n@click.command()\n@click.option(\"--count\", default=1, help=\"Number of greetings.\")\n@click.option(\"--name\", prompt=\"Your name\", help=\"The person to greet.\")\ndef hello(count, name):\n \"\"\"Simple program that greets NAME for a total of COUNT times.\"\"\"\n for _ in range(count):\n click.echo(f\"Hello, {name}!\")\n\nif __name__ == '__main__':\n hello()\n```\n\n```\n$ python hello.py --count=3\nYour name: Click\nHello, Click!\nHello, Click!\nHello, Click!\n```\n\n\n## Donate\n\nThe Pallets organization develops and supports Click and other popular\npackages. In order to grow the community of contributors and users, and\nallow the maintainers to devote more time to the projects, [please\ndonate today][].\n\n[please donate today]: https://palletsprojects.com/donate\n\n## Contributing\n\nSee our [detailed contributing documentation][contrib] for many ways to\ncontribute, including reporting issues, requesting features, asking or answering\nquestions, and making PRs.\n\n[contrib]: https://palletsprojects.com/contributing/", + "release_date": "2025-08-02T02:23:39", "parties": [ { "type": "person", @@ -28,11 +28,11 @@ "Typing :: Typed" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", - "size": 102215, + "download_url": "https://files.pythonhosted.org/packages/ec/85/e7297e34133ae1cfde3bffd30c24e1ef055248251baa877834e048687a28/click-8.2.2-py3-none-any.whl", + "size": 103900, "sha1": null, - "md5": "aeead16d8bed93caa7107ac87b1e5ec8", - "sha256": "61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", + "md5": "7d180e1baded1a50d5ad31b43a965888", + "sha256": "52e1e9f5d3db8c85aa76968c7c67ed41ddbacb167f43201511c8fd61eb5ba2ca", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/pallets/click/", @@ -47,9 +47,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/click/8.2.1/json", + "api_data_url": "https://pypi.org/pypi/click/8.2.2/json", "datasource_id": null, - "purl": "pkg:pypi/click@8.2.1" + "purl": "pkg:pypi/click@8.2.2" }, { "type": "pypi", @@ -357,7 +357,7 @@ { "key": "click", "package_name": "click", - "installed_version": "8.2.1", + "installed_version": "8.2.2", "dependencies": [] }, { diff --git a/tests/data/test-api-with-prefer-source.json b/tests/data/test-api-with-prefer-source.json index 80b32323..8653fa3a 100644 --- a/tests/data/test-api-with-prefer-source.json +++ b/tests/data/test-api-with-prefer-source.json @@ -5,12 +5,12 @@ "type": "pypi", "namespace": null, "name": "click", - "version": "8.2.1", + "version": "8.2.2", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Composable command line interface toolkit\n# $ click_\n\nClick is a Python package for creating beautiful command line interfaces\nin a composable way with as little code as necessary. It's the \"Command\nLine Interface Creation Kit\". It's highly configurable but comes with\nsensible defaults out of the box.\n\nIt aims to make the process of writing command line tools quick and fun\nwhile also preventing any frustration caused by the inability to\nimplement an intended CLI API.\n\nClick in three points:\n\n- Arbitrary nesting of commands\n- Automatic help page generation\n- Supports lazy loading of subcommands at runtime\n\n\n## A Simple Example\n\n```python\nimport click\n\n@click.command()\n@click.option(\"--count\", default=1, help=\"Number of greetings.\")\n@click.option(\"--name\", prompt=\"Your name\", help=\"The person to greet.\")\ndef hello(count, name):\n \"\"\"Simple program that greets NAME for a total of COUNT times.\"\"\"\n for _ in range(count):\n click.echo(f\"Hello, {name}!\")\n\nif __name__ == '__main__':\n hello()\n```\n\n```\n$ python hello.py --count=3\nYour name: Click\nHello, Click!\nHello, Click!\nHello, Click!\n```\n\n\n## Donate\n\nThe Pallets organization develops and supports Click and other popular\npackages. In order to grow the community of contributors and users, and\nallow the maintainers to devote more time to the projects, [please\ndonate today][].\n\n[please donate today]: https://palletsprojects.com/donate\n\n## Contributing\n\nSee our [detailed contributing documentation][contrib] for many ways to\ncontribute, including reporting issues, requesting features, asking or answering\nquestions, and making PRs.\n\n[contrib]: https://palletsprojects.com/contributing/", - "release_date": "2025-05-20T23:19:49", + "description": "Composable command line interface toolkit\n
\"\"
\n\n# Click\n\nClick is a Python package for creating beautiful command line interfaces\nin a composable way with as little code as necessary. It's the \"Command\nLine Interface Creation Kit\". It's highly configurable but comes with\nsensible defaults out of the box.\n\nIt aims to make the process of writing command line tools quick and fun\nwhile also preventing any frustration caused by the inability to\nimplement an intended CLI API.\n\nClick in three points:\n\n- Arbitrary nesting of commands\n- Automatic help page generation\n- Supports lazy loading of subcommands at runtime\n\n\n## A Simple Example\n\n```python\nimport click\n\n@click.command()\n@click.option(\"--count\", default=1, help=\"Number of greetings.\")\n@click.option(\"--name\", prompt=\"Your name\", help=\"The person to greet.\")\ndef hello(count, name):\n \"\"\"Simple program that greets NAME for a total of COUNT times.\"\"\"\n for _ in range(count):\n click.echo(f\"Hello, {name}!\")\n\nif __name__ == '__main__':\n hello()\n```\n\n```\n$ python hello.py --count=3\nYour name: Click\nHello, Click!\nHello, Click!\nHello, Click!\n```\n\n\n## Donate\n\nThe Pallets organization develops and supports Click and other popular\npackages. In order to grow the community of contributors and users, and\nallow the maintainers to devote more time to the projects, [please\ndonate today][].\n\n[please donate today]: https://palletsprojects.com/donate\n\n## Contributing\n\nSee our [detailed contributing documentation][contrib] for many ways to\ncontribute, including reporting issues, requesting features, asking or answering\nquestions, and making PRs.\n\n[contrib]: https://palletsprojects.com/contributing/", + "release_date": "2025-08-02T02:23:41", "parties": [ { "type": "person", @@ -28,11 +28,11 @@ "Typing :: Typed" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", - "size": 286342, + "download_url": "https://files.pythonhosted.org/packages/e9/87/105111999772ec9730e3d4d910c723ea9763ece2ec441533a5cea1e87e3c/click-8.2.2.tar.gz", + "size": 263977, "sha1": null, - "md5": "438cfd4974584ae4f960ffeab32e8991", - "sha256": "27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", + "md5": "2b12ffb230a22fd5aabe301ade045b4a", + "sha256": "068616e6ef9705a07b6db727cb9c248f4eb9dae437a30239f56fa94b18b852ef", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/pallets/click/", @@ -47,9 +47,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/click/8.2.1/json", + "api_data_url": "https://pypi.org/pypi/click/8.2.2/json", "datasource_id": null, - "purl": "pkg:pypi/click@8.2.1" + "purl": "pkg:pypi/click@8.2.2" }, { "type": "pypi", @@ -350,13 +350,13 @@ ], "resolution": [ { - "package": "pkg:pypi/click@8.2.1", + "package": "pkg:pypi/click@8.2.2", "dependencies": [] }, { "package": "pkg:pypi/flask@2.1.2", "dependencies": [ - "pkg:pypi/click@8.2.1", + "pkg:pypi/click@8.2.2", "pkg:pypi/itsdangerous@2.2.0", "pkg:pypi/jinja2@3.1.6", "pkg:pypi/werkzeug@3.1.3" diff --git a/tests/data/test-api-with-python-311.json b/tests/data/test-api-with-python-311.json index 80b32323..8653fa3a 100644 --- a/tests/data/test-api-with-python-311.json +++ b/tests/data/test-api-with-python-311.json @@ -5,12 +5,12 @@ "type": "pypi", "namespace": null, "name": "click", - "version": "8.2.1", + "version": "8.2.2", "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Composable command line interface toolkit\n# $ click_\n\nClick is a Python package for creating beautiful command line interfaces\nin a composable way with as little code as necessary. It's the \"Command\nLine Interface Creation Kit\". It's highly configurable but comes with\nsensible defaults out of the box.\n\nIt aims to make the process of writing command line tools quick and fun\nwhile also preventing any frustration caused by the inability to\nimplement an intended CLI API.\n\nClick in three points:\n\n- Arbitrary nesting of commands\n- Automatic help page generation\n- Supports lazy loading of subcommands at runtime\n\n\n## A Simple Example\n\n```python\nimport click\n\n@click.command()\n@click.option(\"--count\", default=1, help=\"Number of greetings.\")\n@click.option(\"--name\", prompt=\"Your name\", help=\"The person to greet.\")\ndef hello(count, name):\n \"\"\"Simple program that greets NAME for a total of COUNT times.\"\"\"\n for _ in range(count):\n click.echo(f\"Hello, {name}!\")\n\nif __name__ == '__main__':\n hello()\n```\n\n```\n$ python hello.py --count=3\nYour name: Click\nHello, Click!\nHello, Click!\nHello, Click!\n```\n\n\n## Donate\n\nThe Pallets organization develops and supports Click and other popular\npackages. In order to grow the community of contributors and users, and\nallow the maintainers to devote more time to the projects, [please\ndonate today][].\n\n[please donate today]: https://palletsprojects.com/donate\n\n## Contributing\n\nSee our [detailed contributing documentation][contrib] for many ways to\ncontribute, including reporting issues, requesting features, asking or answering\nquestions, and making PRs.\n\n[contrib]: https://palletsprojects.com/contributing/", - "release_date": "2025-05-20T23:19:49", + "description": "Composable command line interface toolkit\n
\"\"
\n\n# Click\n\nClick is a Python package for creating beautiful command line interfaces\nin a composable way with as little code as necessary. It's the \"Command\nLine Interface Creation Kit\". It's highly configurable but comes with\nsensible defaults out of the box.\n\nIt aims to make the process of writing command line tools quick and fun\nwhile also preventing any frustration caused by the inability to\nimplement an intended CLI API.\n\nClick in three points:\n\n- Arbitrary nesting of commands\n- Automatic help page generation\n- Supports lazy loading of subcommands at runtime\n\n\n## A Simple Example\n\n```python\nimport click\n\n@click.command()\n@click.option(\"--count\", default=1, help=\"Number of greetings.\")\n@click.option(\"--name\", prompt=\"Your name\", help=\"The person to greet.\")\ndef hello(count, name):\n \"\"\"Simple program that greets NAME for a total of COUNT times.\"\"\"\n for _ in range(count):\n click.echo(f\"Hello, {name}!\")\n\nif __name__ == '__main__':\n hello()\n```\n\n```\n$ python hello.py --count=3\nYour name: Click\nHello, Click!\nHello, Click!\nHello, Click!\n```\n\n\n## Donate\n\nThe Pallets organization develops and supports Click and other popular\npackages. In order to grow the community of contributors and users, and\nallow the maintainers to devote more time to the projects, [please\ndonate today][].\n\n[please donate today]: https://palletsprojects.com/donate\n\n## Contributing\n\nSee our [detailed contributing documentation][contrib] for many ways to\ncontribute, including reporting issues, requesting features, asking or answering\nquestions, and making PRs.\n\n[contrib]: https://palletsprojects.com/contributing/", + "release_date": "2025-08-02T02:23:41", "parties": [ { "type": "person", @@ -28,11 +28,11 @@ "Typing :: Typed" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", - "size": 286342, + "download_url": "https://files.pythonhosted.org/packages/e9/87/105111999772ec9730e3d4d910c723ea9763ece2ec441533a5cea1e87e3c/click-8.2.2.tar.gz", + "size": 263977, "sha1": null, - "md5": "438cfd4974584ae4f960ffeab32e8991", - "sha256": "27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", + "md5": "2b12ffb230a22fd5aabe301ade045b4a", + "sha256": "068616e6ef9705a07b6db727cb9c248f4eb9dae437a30239f56fa94b18b852ef", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/pallets/click/", @@ -47,9 +47,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/click/8.2.1/json", + "api_data_url": "https://pypi.org/pypi/click/8.2.2/json", "datasource_id": null, - "purl": "pkg:pypi/click@8.2.1" + "purl": "pkg:pypi/click@8.2.2" }, { "type": "pypi", @@ -350,13 +350,13 @@ ], "resolution": [ { - "package": "pkg:pypi/click@8.2.1", + "package": "pkg:pypi/click@8.2.2", "dependencies": [] }, { "package": "pkg:pypi/flask@2.1.2", "dependencies": [ - "pkg:pypi/click@8.2.1", + "pkg:pypi/click@8.2.2", "pkg:pypi/itsdangerous@2.2.0", "pkg:pypi/jinja2@3.1.6", "pkg:pypi/werkzeug@3.1.3" diff --git a/tests/data/test-api-with-requirement-file.json b/tests/data/test-api-with-requirement-file.json index 1c5c08da..dece42d3 100644 --- a/tests/data/test-api-with-requirement-file.json +++ b/tests/data/test-api-with-requirement-file.json @@ -3727,12 +3727,12 @@ "type": "pypi", "namespace": null, "name": "pip", - "version": "25.1.1", + "version": "25.2", "qualifiers": {}, "subpath": null, "primary_language": "Python", "description": "The PyPA recommended tool for installing Python packages.\npip - The Python Package Installer\n==================================\n\n.. |pypi-version| image:: https://img.shields.io/pypi/v/pip.svg\n :target: https://pypi.org/project/pip/\n :alt: PyPI\n\n.. |python-versions| image:: https://img.shields.io/pypi/pyversions/pip\n :target: https://pypi.org/project/pip\n :alt: PyPI - Python Version\n\n.. |docs-badge| image:: https://readthedocs.org/projects/pip/badge/?version=latest\n :target: https://pip.pypa.io/en/latest\n :alt: Documentation\n\n|pypi-version| |python-versions| |docs-badge|\n\npip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.\n\nPlease take a look at our documentation for how to install and use pip:\n\n* `Installation`_\n* `Usage`_\n\nWe release updates regularly, with a new version every 3 months. Find more details in our documentation:\n\n* `Release notes`_\n* `Release process`_\n\nIf you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms:\n\n* `Issue tracking`_\n* `Discourse channel`_\n* `User IRC`_\n\nIf you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms:\n\n* `GitHub page`_\n* `Development documentation`_\n* `Development IRC`_\n\nCode of Conduct\n---------------\n\nEveryone interacting in the pip project's codebases, issue trackers, chat\nrooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.\n\n.. _package installer: https://packaging.python.org/guides/tool-recommendations/\n.. _Python Package Index: https://pypi.org\n.. _Installation: https://pip.pypa.io/en/stable/installation/\n.. _Usage: https://pip.pypa.io/en/stable/\n.. _Release notes: https://pip.pypa.io/en/stable/news.html\n.. _Release process: https://pip.pypa.io/en/latest/development/release-process/\n.. _GitHub page: https://github.com/pypa/pip\n.. _Development documentation: https://pip.pypa.io/en/latest/development\n.. _Issue tracking: https://github.com/pypa/pip/issues\n.. _Discourse channel: https://discuss.python.org/c/packaging\n.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa\n.. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev\n.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md", - "release_date": "2025-05-02T15:13:59", + "release_date": "2025-07-30T21:50:13", "parties": [ { "type": "person", @@ -3752,29 +3752,25 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Software Development :: Build Tools" ], "homepage_url": null, - "download_url": "https://files.pythonhosted.org/packages/29/a2/d40fb2460e883eca5199c62cfc2463fd261f760556ae6290f88488c362c0/pip-25.1.1-py3-none-any.whl", - "size": 1825227, + "download_url": "https://files.pythonhosted.org/packages/b7/3f/945ef7ab14dc4f9d7f40288d2df998d1837ee0888ec3659c813487572faa/pip-25.2-py3-none-any.whl", + "size": 1752557, "sha1": null, - "md5": "6a01d861bf88ee075c6942419a04839d", - "sha256": "2913a38a2abf4ea6b64ab507bd9e967f3b53dc1ede74b01b0931e1ce548751af", + "md5": "52af72c263169cc46cc32bed310a5eab", + "sha256": "6d67a2b4e7f14d8b31b8b52648866fa717f45a1eb70e83002f4331d07e953717", "sha512": null, "bug_tracking_url": null, "code_view_url": "https://github.com/pypa/pip", "vcs_url": null, "copyright": null, - "license_expression": null, - "declared_license": { - "license": "MIT", - "classifiers": [ - "License :: OSI Approved :: MIT License" - ] - }, + "license_expression": "MIT", + "declared_license": {}, "notice_text": null, "source_packages": [], "file_references": [], @@ -3782,9 +3778,9 @@ "dependencies": [], "repository_homepage_url": null, "repository_download_url": null, - "api_data_url": "https://pypi.org/pypi/pip/25.1.1/json", + "api_data_url": "https://pypi.org/pypi/pip/25.2/json", "datasource_id": null, - "purl": "pkg:pypi/pip@25.1.1" + "purl": "pkg:pypi/pip@25.2" }, { "type": "pypi", @@ -6162,13 +6158,13 @@ ] }, { - "package": "pkg:pypi/pip@25.1.1", + "package": "pkg:pypi/pip@25.2", "dependencies": [] }, { "package": "pkg:pypi/pipdeptree@2.2.1", "dependencies": [ - "pkg:pypi/pip@25.1.1" + "pkg:pypi/pip@25.2" ] }, { diff --git a/tests/test_cli.py b/tests/test_cli.py index a360801e..149b8947 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -414,6 +414,17 @@ def test_cli_with_pinned_requirements_file(): ) +@pytest.mark.online +def test_cli_with_hash_requirements(): + requirements_file = test_env.get_test_loc("hash-requirements.txt") + expected_file = test_env.get_test_loc("hash-requirements.txt-expected.json", must_exist=False) + check_requirements_resolution( + requirements_file=requirements_file, + expected_file=expected_file, + regen=REGEN_TEST_FIXTURES, + ) + + @pytest.mark.online def test_cli_with_setup_py_failure(): setup_py_file = setup_test_env.get_test_loc("simple-setup.py")