Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-azure-core for 
openSUSE:Factory checked in at 2024-04-09 16:48:06
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-azure-core (Old)
 and      /work/SRC/openSUSE:Factory/.python-azure-core.new.29460 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-azure-core"

Tue Apr  9 16:48:06 2024 rev:49 rq:1166374 version:1.30.1

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-azure-core/python-azure-core.changes      
2024-04-07 22:06:40.660078286 +0200
+++ 
/work/SRC/openSUSE:Factory/.python-azure-core.new.29460/python-azure-core.changes
   2024-04-09 16:53:39.766312889 +0200
@@ -1,0 +2,8 @@
+Tue Apr  9 10:45:21 UTC 2024 - John Paul Adrian Glaubitz 
<adrian.glaub...@suse.com>
+
+- New upstream release
+  + Version 1.30.1
+  + For detailed information about changes see the
+    CHANGELOG.md file provided with this package
+
+-------------------------------------------------------------------

Old:
----
  azure-core-1.30.0.tar.gz

New:
----
  azure-core-1.30.1.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-azure-core.spec ++++++
--- /var/tmp/diff_new_pack.yZgjP2/_old  2024-04-09 16:53:40.214329416 +0200
+++ /var/tmp/diff_new_pack.yZgjP2/_new  2024-04-09 16:53:40.218329564 +0200
@@ -18,7 +18,7 @@
 
 %{?sle15_python_module_pythons}
 Name:           python-azure-core
-Version:        1.30.0
+Version:        1.30.1
 Release:        0
 Summary:        Microsoft Azure Core Library for Python
 License:        MIT

++++++ azure-core-1.30.0.tar.gz -> azure-core-1.30.1.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/azure-core-1.30.0/CHANGELOG.md 
new/azure-core-1.30.1/CHANGELOG.md
--- old/azure-core-1.30.0/CHANGELOG.md  2024-01-31 22:55:17.000000000 +0100
+++ new/azure-core-1.30.1/CHANGELOG.md  2024-02-29 20:44:39.000000000 +0100
@@ -1,5 +1,11 @@
 # Release History
 
+## 1.30.1 (2024-02-29)
+
+### Other Changes
+
+- Accept float for `retry_after` header.  #34203
+
 ## 1.30.0 (2024-02-01)
 
 ### Features Added
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/azure-core-1.30.0/PKG-INFO 
new/azure-core-1.30.1/PKG-INFO
--- old/azure-core-1.30.0/PKG-INFO      2024-01-31 22:56:05.936185100 +0100
+++ new/azure-core-1.30.1/PKG-INFO      2024-02-29 20:45:29.062880000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: azure-core
-Version: 1.30.0
+Version: 1.30.1
 Summary: Microsoft Azure Core Library for Python
 Home-page: 
https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/core/azure-core
 Author: Microsoft Corporation
@@ -286,6 +286,12 @@
 
 # Release History
 
+## 1.30.1 (2024-02-29)
+
+### Other Changes
+
+- Accept float for `retry_after` header.  #34203
+
 ## 1.30.0 (2024-02-01)
 
 ### Features Added
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/azure-core-1.30.0/azure/core/_version.py 
new/azure-core-1.30.1/azure/core/_version.py
--- old/azure-core-1.30.0/azure/core/_version.py        2024-01-31 
22:55:17.000000000 +0100
+++ new/azure-core-1.30.1/azure/core/_version.py        2024-02-29 
20:44:39.000000000 +0100
@@ -9,4 +9,4 @@
 # regenerated.
 # --------------------------------------------------------------------------
 
-VERSION = "1.30.0"
+VERSION = "1.30.1"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-core-1.30.0/azure/core/pipeline/policies/_utils.py 
new/azure-core-1.30.1/azure/core/pipeline/policies/_utils.py
--- old/azure-core-1.30.0/azure/core/pipeline/policies/_utils.py        
2024-01-31 22:55:17.000000000 +0100
+++ new/azure-core-1.30.1/azure/core/pipeline/policies/_utils.py        
2024-02-29 20:44:39.000000000 +0100
@@ -66,7 +66,7 @@
     """
     delay: float  # Using the Mypy recommendation to use float for "int or 
float"
     try:
-        delay = int(retry_after)
+        delay = float(retry_after)
     except ValueError:
         # Not an integer? Try HTTP date
         retry_date = _parse_http_date(retry_after)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/azure-core-1.30.0/azure_core.egg-info/PKG-INFO 
new/azure-core-1.30.1/azure_core.egg-info/PKG-INFO
--- old/azure-core-1.30.0/azure_core.egg-info/PKG-INFO  2024-01-31 
22:56:05.000000000 +0100
+++ new/azure-core-1.30.1/azure_core.egg-info/PKG-INFO  2024-02-29 
20:45:28.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: azure-core
-Version: 1.30.0
+Version: 1.30.1
 Summary: Microsoft Azure Core Library for Python
 Home-page: 
https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/core/azure-core
 Author: Microsoft Corporation
@@ -286,6 +286,12 @@
 
 # Release History
 
+## 1.30.1 (2024-02-29)
+
+### Other Changes
+
+- Accept float for `retry_after` header.  #34203
+
 ## 1.30.0 (2024-02-01)
 
 ### Features Added
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/azure-core-1.30.0/azure_core.egg-info/SOURCES.txt 
new/azure-core-1.30.1/azure_core.egg-info/SOURCES.txt
--- old/azure-core-1.30.0/azure_core.egg-info/SOURCES.txt       2024-01-31 
22:56:05.000000000 +0100
+++ new/azure-core-1.30.1/azure_core.egg-info/SOURCES.txt       2024-02-29 
20:45:28.000000000 +0100
@@ -162,6 +162,14 @@
 tests/async_tests/test_testserver_async.py
 tests/async_tests/test_tracing_decorator_async.py
 tests/async_tests/test_universal_http_async.py
+tests/perf_tests/__init__.py
+tests/perf_tests/_test_base.py
+tests/perf_tests/custom_iterator.py
+tests/perf_tests/download_binary.py
+tests/perf_tests/list_entities_json.py
+tests/perf_tests/query_entities_json.py
+tests/perf_tests/update_entity_json.py
+tests/perf_tests/upload_binary.py
 tests/testserver_tests/coretestserver/setup.py
 tests/testserver_tests/coretestserver/coretestserver/__init__.py
 tests/testserver_tests/coretestserver/coretestserver/test_routes/__init__.py
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-core-1.30.0/samples/example_shared_transport.py 
new/azure-core-1.30.1/samples/example_shared_transport.py
--- old/azure-core-1.30.0/samples/example_shared_transport.py   2024-01-31 
22:55:17.000000000 +0100
+++ new/azure-core-1.30.1/samples/example_shared_transport.py   2024-02-29 
20:44:39.000000000 +0100
@@ -23,7 +23,7 @@
 from azure.core.pipeline.transport import RequestsTransport
 from azure.storage.blob import BlobServiceClient
 
-connection_string = os.getenv("AZURE_STORAGE_CONNECTION_STRING")
+connection_string = os.environ["AZURE_STORAGE_CONNECTION_STRING"]
 
 
 def shared_transport():
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-core-1.30.0/samples/example_shared_transport_async.py 
new/azure-core-1.30.1/samples/example_shared_transport_async.py
--- old/azure-core-1.30.0/samples/example_shared_transport_async.py     
2024-01-31 22:55:17.000000000 +0100
+++ new/azure-core-1.30.1/samples/example_shared_transport_async.py     
2024-02-29 20:44:39.000000000 +0100
@@ -24,7 +24,7 @@
 from azure.core.pipeline.transport import AioHttpTransport
 from azure.storage.blob.aio import BlobServiceClient
 
-connection_string = os.getenv("AZURE_STORAGE_CONNECTION_STRING")
+connection_string = os.environ["AZURE_STORAGE_CONNECTION_STRING"]
 
 
 async def shared_transport_async():
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-core-1.30.0/tests/async_tests/test_retry_policy_async.py 
new/azure-core-1.30.1/tests/async_tests/test_retry_policy_async.py
--- old/azure-core-1.30.0/tests/async_tests/test_retry_policy_async.py  
2024-01-31 22:55:17.000000000 +0100
+++ new/azure-core-1.30.1/tests/async_tests/test_retry_policy_async.py  
2024-02-29 20:44:39.000000000 +0100
@@ -59,7 +59,7 @@
     assert backoff_time == 4
 
 
-@pytest.mark.parametrize("retry_after_input,http_request", product(["0", 
"800", "1000", "1200"], HTTP_REQUESTS))
+@pytest.mark.parametrize("retry_after_input,http_request", product(["0", 
"800", "1000", "1200", "0.9"], HTTP_REQUESTS))
 def test_retry_after(retry_after_input, http_request):
     retry_policy = AsyncRetryPolicy()
     request = http_request("GET", "http://localhost";)
@@ -78,7 +78,7 @@
     assert retry_after == float(retry_after_input)
 
 
-@pytest.mark.parametrize("retry_after_input,http_request", product(["0", 
"800", "1000", "1200"], HTTP_REQUESTS))
+@pytest.mark.parametrize("retry_after_input,http_request", product(["0", 
"800", "1000", "1200", "0.9"], HTTP_REQUESTS))
 def test_x_ms_retry_after(retry_after_input, http_request):
     retry_policy = AsyncRetryPolicy()
     request = http_request("GET", "http://localhost";)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-core-1.30.0/tests/async_tests/test_streaming_async.py 
new/azure-core-1.30.1/tests/async_tests/test_streaming_async.py
--- old/azure-core-1.30.0/tests/async_tests/test_streaming_async.py     
2024-01-31 22:55:17.000000000 +0100
+++ new/azure-core-1.30.1/tests/async_tests/test_streaming_async.py     
2024-02-29 20:44:39.000000000 +0100
@@ -24,12 +24,14 @@
 #
 # --------------------------------------------------------------------------
 import os
+import zlib
 import pytest
 from azure.core import AsyncPipelineClient
 from azure.core.exceptions import DecodeError
 from utils import HTTP_REQUESTS
 
 
+@pytest.mark.live_test_only
 @pytest.mark.asyncio
 @pytest.mark.parametrize("http_request", HTTP_REQUESTS)
 async def test_decompress_plain_no_header(http_request):
@@ -50,6 +52,7 @@
         assert decoded == "test"
 
 
+@pytest.mark.live_test_only
 @pytest.mark.asyncio
 @pytest.mark.parametrize("http_request", HTTP_REQUESTS)
 async def test_compress_plain_no_header(http_request):
@@ -70,6 +73,7 @@
         assert decoded == "test"
 
 
+@pytest.mark.live_test_only
 @pytest.mark.asyncio
 @pytest.mark.parametrize("http_request", HTTP_REQUESTS)
 async def test_decompress_compressed_no_header(http_request):
@@ -93,6 +97,7 @@
             pass
 
 
+@pytest.mark.live_test_only
 @pytest.mark.asyncio
 @pytest.mark.parametrize("http_request", HTTP_REQUESTS)
 async def test_compress_compressed_no_header(http_request):
@@ -121,7 +126,6 @@
 @pytest.mark.parametrize("http_request", HTTP_REQUESTS)
 async def test_decompress_plain_header(http_request):
     # expect error
-    import zlib
 
     account_name = "coretests"
     account_url = "https://{}.blob.core.windows.net".format(account_name)
@@ -141,6 +145,7 @@
             pass
 
 
+@pytest.mark.live_test_only
 @pytest.mark.asyncio
 @pytest.mark.parametrize("http_request", HTTP_REQUESTS)
 async def test_compress_plain_header(http_request):
@@ -161,6 +166,7 @@
         assert decoded == "test"
 
 
+@pytest.mark.live_test_only
 @pytest.mark.asyncio
 @pytest.mark.parametrize("http_request", HTTP_REQUESTS)
 async def test_decompress_compressed_header(http_request):
@@ -181,6 +187,20 @@
         assert decoded == "test"
 
 
+@pytest.mark.asyncio
+@pytest.mark.parametrize("http_request", HTTP_REQUESTS)
+async def test_compress_compressed_no_header_offline(port, http_request):
+    # expect compressed text
+    client = AsyncPipelineClient("")
+    async with client:
+        request = http_request(method="GET", 
url="http://localhost:{}/streams/compressed_no_header".format(port))
+        pipeline_response = await client._pipeline.run(request, stream=True)
+        response = pipeline_response.http_response
+        data = response.stream_download(client._pipeline, decompress=False)
+        with pytest.raises(UnicodeDecodeError):
+            b"".join([d async for d in data]).decode("utf-8")
+
+
 @pytest.mark.live_test_only
 @pytest.mark.asyncio
 @pytest.mark.parametrize("http_request", HTTP_REQUESTS)
@@ -195,11 +215,103 @@
         pipeline_response = await client._pipeline.run(request, stream=True)
         response = pipeline_response.http_response
         data = response.stream_download(client._pipeline, decompress=False)
-        content = b""
-        async for d in data:
-            content += d
-        try:
-            decoded = content.decode("utf-8")
-            assert False
-        except UnicodeDecodeError:
-            pass
+        with pytest.raises(UnicodeDecodeError):
+            b"".join([d async for d in data]).decode("utf-8")
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("http_request", HTTP_REQUESTS)
+async def test_decompress_plain_no_header_offline(port, http_request):
+    # expect plain text
+    client = AsyncPipelineClient("")
+    async with client:
+        request = http_request(method="GET", 
url="http://localhost:{}/streams/string".format(port))
+        pipeline_response = await client._pipeline.run(request, stream=True)
+        response = pipeline_response.http_response
+        data = response.stream_download(client._pipeline, decompress=True)
+        decoded = b"".join([d async for d in data]).decode("utf-8")
+        assert decoded == "test"
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("http_request", HTTP_REQUESTS)
+async def test_compress_plain_header_offline(port, http_request):
+    # expect plain text
+    client = AsyncPipelineClient("")
+    async with client:
+        request = http_request(method="GET", 
url="http://localhost:{}/streams/plain_header".format(port))
+        pipeline_response = await client._pipeline.run(request, stream=True)
+        response = pipeline_response.http_response
+        data = response.stream_download(client._pipeline, decompress=False)
+        decoded = b"".join([d async for d in data]).decode("utf-8")
+        assert decoded == "test"
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("http_request", HTTP_REQUESTS)
+async def test_decompress_compressed_no_header_offline(port, http_request):
+    # expect compressed text
+    client = AsyncPipelineClient("")
+    async with client:
+        request = http_request(method="GET", 
url="http://localhost:{}/streams/compressed_no_header".format(port))
+        pipeline_response = await client._pipeline.run(request, stream=True)
+        response = pipeline_response.http_response
+        data = response.stream_download(client._pipeline, decompress=True)
+
+        with pytest.raises(UnicodeDecodeError):
+            b"".join([d async for d in data]).decode("utf-8")
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("http_request", HTTP_REQUESTS)
+async def test_compress_compressed_header_offline(port, http_request):
+    # expect compressed text
+    client = AsyncPipelineClient("")
+    async with client:
+        request = http_request(method="GET", 
url="http://localhost:{}/streams/compressed_header".format(port))
+        pipeline_response = await client._pipeline.run(request, stream=True)
+        response = pipeline_response.http_response
+        data = response.stream_download(client._pipeline, decompress=False)
+        with pytest.raises(UnicodeDecodeError):
+            b"".join([d async for d in data]).decode("utf-8")
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("http_request", HTTP_REQUESTS)
+async def test_decompress_plain_header_offline(port, http_request):
+    # expect error
+    client = AsyncPipelineClient("")
+    async with client:
+        request = http_request(method="GET", 
url="http://localhost:{}/streams/compressed".format(port))
+        pipeline_response = await client._pipeline.run(request, stream=True)
+        response = pipeline_response.http_response
+        data = response.stream_download(client._pipeline, decompress=True)
+        with pytest.raises((zlib.error, DecodeError)):
+            b"".join([d async for d in data])
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("http_request", HTTP_REQUESTS)
+async def test_compress_plain_no_header_offline(port, http_request):
+    client = AsyncPipelineClient("")
+    async with client:
+        request = http_request(method="GET", 
url="http://localhost:{}/streams/string".format(port))
+        pipeline_response = await client._pipeline.run(request, stream=True)
+        response = pipeline_response.http_response
+        data = response.stream_download(client._pipeline, decompress=False)
+        decoded = b"".join([d async for d in data]).decode("utf-8")
+        assert decoded == "test"
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("http_request", HTTP_REQUESTS)
+async def test_decompress_compressed_header_offline(port, http_request):
+    # expect compressed text
+    client = AsyncPipelineClient("")
+    async with client:
+        request = http_request(method="GET", 
url="http://localhost:{}/streams/decompress_header".format(port))
+        pipeline_response = await client._pipeline.run(request, stream=True)
+        response = pipeline_response.http_response
+        data = response.stream_download(client._pipeline, decompress=True)
+        decoded = b"".join([d async for d in data]).decode("utf-8")
+        assert decoded == "test"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/azure-core-1.30.0/tests/perf_tests/_test_base.py 
new/azure-core-1.30.1/tests/perf_tests/_test_base.py
--- old/azure-core-1.30.0/tests/perf_tests/_test_base.py        1970-01-01 
01:00:00.000000000 +0100
+++ new/azure-core-1.30.1/tests/perf_tests/_test_base.py        2024-02-29 
20:44:39.000000000 +0100
@@ -0,0 +1,266 @@
+# 
--------------------------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for 
license information.
+# 
--------------------------------------------------------------------------------------------
+
+import os
+import uuid
+import string
+import random
+
+from devtools_testutils.perfstress_tests import PerfStressTest
+
+from azure.core import PipelineClient, AsyncPipelineClient
+from azure.core.pipeline import Pipeline, AsyncPipeline
+from azure.core.pipeline.transport import (
+    RequestsTransport,
+    AioHttpTransport,
+    AsyncioRequestsTransport,
+)
+from azure.core.pipeline.policies import (
+    UserAgentPolicy,
+    HeadersPolicy,
+    ProxyPolicy,
+    NetworkTraceLoggingPolicy,
+    HttpLoggingPolicy,
+    RetryPolicy,
+    CustomHookPolicy,
+    RedirectPolicy,
+    AsyncRetryPolicy,
+    AsyncRedirectPolicy,
+    BearerTokenCredentialPolicy,
+    AsyncBearerTokenCredentialPolicy,
+)
+import azure.core.pipeline.policies as policies
+from azure.core.credentials import AzureNamedKeyCredential
+from azure.core.exceptions import (
+    ClientAuthenticationError,
+    ResourceExistsError,
+    ResourceNotFoundError,
+    ResourceNotModifiedError,
+)
+from azure.identity import ClientSecretCredential
+from azure.identity.aio import ClientSecretCredential as 
AsyncClientSecretCredential
+from azure.data.tables.aio import TableClient
+
+from azure.storage.blob._shared.authentication import 
SharedKeyCredentialPolicy as BlobSharedKeyCredentialPolicy
+from azure.data.tables._authentication import SharedKeyCredentialPolicy as 
TableSharedKeyCredentialPolicy
+
+_LETTERS = string.ascii_letters
+
+
+class _ServiceTest(PerfStressTest):
+    transport = None
+    async_transport = None
+
+    def __init__(self, arguments):
+        super().__init__(arguments)
+        self.account_name = self.get_from_env("AZURE_STORAGE_ACCOUNT_NAME")
+        self.account_key = self.get_from_env("AZURE_STORAGE_ACCOUNT_KEY")
+        async_transport_types = {"aiohttp": AioHttpTransport, "requests": 
AsyncioRequestsTransport}
+        sync_transport_types = {"requests": RequestsTransport}
+        self.tenant_id = os.environ["CORE_TENANT_ID"]
+        self.client_id = os.environ["CORE_CLIENT_ID"]
+        self.client_secret = os.environ["CORE_CLIENT_SECRET"]
+        self.storage_scope = "https://storage.azure.com/.default";
+
+        # defaults transports
+        self.sync_transport = RequestsTransport
+        self.async_transport = AioHttpTransport
+
+        # if transport is specified, use that
+        if self.args.transport:
+            # if sync, override sync default
+            if self.args.sync:
+                try:
+                    self.sync_transport = 
sync_transport_types[self.args.transport]
+                except KeyError:
+                    raise ValueError(f"Invalid sync 
transport:{self.args.transport}\n Valid options are:\n- requests\n")
+            # if async, override async default
+            else:
+                try:
+                    self.async_transport = 
async_transport_types[self.args.transport]
+                except KeyError:
+                    raise ValueError(
+                        f"Invalid async transport:{self.args.transport}\n 
Valid options are:\n- aiohttp\n- requests\n"
+                    )
+
+        self.error_map = {
+            401: ClientAuthenticationError,
+            404: ResourceNotFoundError,
+            409: ResourceExistsError,
+            304: ResourceNotModifiedError,
+        }
+
+    def _build_sync_pipeline_client(self, auth_policy):
+        default_policies = [
+            UserAgentPolicy,
+            HeadersPolicy,
+            ProxyPolicy,
+            NetworkTraceLoggingPolicy,
+            HttpLoggingPolicy,
+            RetryPolicy,
+            CustomHookPolicy,
+            RedirectPolicy,
+        ]
+
+        if self.args.policies is None:
+            # if None, only auth policy is passed in
+            sync_pipeline = Pipeline(transport=self.sync_transport(), 
policies=[auth_policy])
+        elif self.args.policies == "all":
+            # if all, autorest default policies + auth policy
+            sync_policies = [auth_policy]
+            sync_policies.extend([policy(sdk_moniker=self.sdk_moniker) for 
policy in default_policies])
+            sync_pipeline = Pipeline(transport=self.sync_transport(), 
policies=sync_policies)
+        else:
+            sync_policies = [auth_policy]
+            for p in self.args.policies.split(","):
+                try:
+                    policy = getattr(policies, p)
+                except AttributeError as exc:
+                    raise ValueError(
+                        f"Azure Core has no policy named {exc.name}. Please 
use policies from the following list: {policies.__all__}"
+                    ) from exc
+                sync_policies.append(policy(sdk_moniker=self.sdk_moniker))
+            sync_pipeline = Pipeline(transport=self.sync_transport(), 
policies=sync_policies)
+        return PipelineClient(self.account_endpoint, pipeline=sync_pipeline)
+
+    def _build_async_pipeline_client(self, auth_policy):
+        default_policies = [
+            UserAgentPolicy,
+            HeadersPolicy,
+            ProxyPolicy,
+            NetworkTraceLoggingPolicy,
+            HttpLoggingPolicy,
+            AsyncRetryPolicy,
+            CustomHookPolicy,
+            AsyncRedirectPolicy,
+        ]
+        if self.args.policies is None:
+            # if None, only auth policy is passed in
+            async_pipeline = AsyncPipeline(transport=self.async_transport(), 
policies=[auth_policy])
+        elif self.args.policies == "all":
+            # if all, autorest default policies + auth policy
+            async_policies = [auth_policy]
+            async_policies.extend([policy(sdk_moniker=self.sdk_moniker) for 
policy in default_policies])
+            async_pipeline = AsyncPipeline(transport=self.async_transport(), 
policies=async_policies)
+        else:
+            async_policies = [auth_policy]
+            # if custom list of policies, pass in custom list + auth policy
+            for p in self.args.policies.split(","):
+                try:
+                    policy = getattr(policies, p)
+                except AttributeError as exc:
+                    raise ValueError(
+                        f"Azure Core has no policy named {exc.name}. Please 
use policies from the following list: {policies.__all__}"
+                    ) from exc
+                async_policies.append(policy(sdk_moniker=self.sdk_moniker))
+            async_pipeline = AsyncPipeline(transport=self.async_transport(), 
policies=async_policies)
+        return AsyncPipelineClient(self.account_endpoint, 
pipeline=async_pipeline)
+
+    def _set_auth_policies(self):
+        if not self.args.aad:
+            # if tables, create table credential policy, else blob policy
+            if "tables" in self.sdk_moniker:
+                self.sync_auth_policy = TableSharedKeyCredentialPolicy(
+                    AzureNamedKeyCredential(self.account_name, 
self.account_key)
+                )
+                self.async_auth_policy = self.sync_auth_policy
+            else:
+                self.sync_auth_policy = 
BlobSharedKeyCredentialPolicy(self.account_name, self.account_key)
+                self.async_auth_policy = self.sync_auth_policy
+        else:
+            sync_credential = ClientSecretCredential(self.tenant_id, 
self.client_id, self.client_secret)
+            self.sync_auth_policy = 
BearerTokenCredentialPolicy(sync_credential, self.storage_scope)
+            async_credential = AsyncClientSecretCredential(self.tenant_id, 
self.client_id, self.client_secret)
+            self.async_auth_policy = 
AsyncBearerTokenCredentialPolicy(async_credential, self.storage_scope)
+
+    @staticmethod
+    def add_arguments(parser):
+        super(_ServiceTest, _ServiceTest).add_arguments(parser)
+        parser.add_argument(
+            "--transport",
+            nargs="?",
+            type=str,
+            help="""Underlying HttpTransport type. Defaults to `aiohttp` if 
async, `requests` if sync. Other possible values for async:\n"""
+            """ - `requests`\n""",
+            default=None,
+        )
+        parser.add_argument(
+            "-s", "--size", nargs="?", type=int, help="Size of data to 
transfer.  Default is 10240.", default=10240
+        )
+        parser.add_argument(
+            "--policies",
+            nargs="?",
+            type=str,
+            help="""List of policies to pass in to the pipeline. Options:"""
+            """\n- None: No extra policies passed in, except for 
authentication policy. This is the default."""
+            """\n- 'all': All policies added automatically by autorest."""
+            """\n- 'policy1,policy2': Comma-separated list of policies, such 
as 'RetryPolicy,HttpLoggingPolicy'""",
+            default=None,
+        )
+        parser.add_argument("--aad", action="store_true", help="Use AAD 
authentication instead of shared key.")
+
+
+class _BlobTest(_ServiceTest):
+    container_name = "perfstress-" + str(uuid.uuid4())
+
+    def __init__(self, arguments):
+        super().__init__(arguments)
+        self.account_endpoint = 
self.get_from_env("AZURE_STORAGE_BLOBS_ENDPOINT")
+        self.container_name = self.get_from_env("AZURE_STORAGE_CONTAINER_NAME")
+        self.api_version = "2021-12-02"
+        self.sdk_moniker = f"storage-blob/{self.api_version}"
+
+        self._set_auth_policies()
+        self.pipeline_client = 
self._build_sync_pipeline_client(self.sync_auth_policy)
+        self.async_pipeline_client = 
self._build_async_pipeline_client(self.async_auth_policy)
+
+    async def close(self):
+        self.pipeline_client.close()
+        await self.async_pipeline_client.close()
+        await super().close()
+
+
+class _TableTest(_ServiceTest):
+    table_name = "".join(random.choice(_LETTERS) for i in range(30))
+
+    def __init__(self, arguments):
+        super().__init__(arguments)
+        self.account_endpoint = 
self.get_from_env("AZURE_STORAGE_TABLES_ENDPOINT")
+        self.api_version = "2019-02-02"
+        self.data_service_version = "3.0"
+        self.sdk_moniker = f"tables/{self.api_version}"
+        self._set_auth_policies()
+
+        self.pipeline_client = 
self._build_sync_pipeline_client(self.sync_auth_policy)
+        self.async_pipeline_client = 
self._build_async_pipeline_client(self.async_auth_policy)
+
+        self.connection_string = self.get_from_env("AZURE_STORAGE_CONN_STR")
+        self.async_table_client = 
TableClient.from_connection_string(self.connection_string, self.table_name)
+
+    async def global_setup(self):
+        await super().global_setup()
+        await self.async_table_client.create_table()
+
+    async def global_cleanup(self):
+        await self.async_table_client.delete_table()
+
+    def get_base_entity(self, pk, rk, size):
+        # 227 is the length of the entity with Data of length 0
+        base_entity_length = 227
+        data_length = max(size - base_entity_length, 0)
+        # size = 227 + data_length
+        return {
+            "PartitionKey": pk,
+            "RowKey": rk,
+            "Data": "a" * data_length,
+        }
+
+    def get_entity(self, rk=0):
+        return {"PartitionKey": "pk", "RowKey": str(rk), "Property1": 
f"a{rk}", "Property2": f"b{rk}"}
+
+    async def close(self):
+        self.pipeline_client.close()
+        await self.async_pipeline_client.close()
+        await super().close()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-core-1.30.0/tests/perf_tests/custom_iterator.py 
new/azure-core-1.30.1/tests/perf_tests/custom_iterator.py
--- old/azure-core-1.30.0/tests/perf_tests/custom_iterator.py   1970-01-01 
01:00:00.000000000 +0100
+++ new/azure-core-1.30.1/tests/perf_tests/custom_iterator.py   2024-02-29 
20:44:39.000000000 +0100
@@ -0,0 +1,81 @@
+# 
--------------------------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for 
license information.
+# 
--------------------------------------------------------------------------------------------
+from typing import (
+    Any,
+    Callable,
+)
+from azure.core.paging import PageIterator
+from azure.core.async_paging import AsyncPageIterator
+
+NEXT_PARTITION_KEY = "x-ms-continuation-NextPartitionKey"
+NEXT_ROW_KEY = "x-ms-continuation-NextRowKey"
+
+
+class MockResponse:
+    pass
+
+
+class CustomIterator(PageIterator):
+    def __init__(self, command: Callable, **kwargs: Any) -> None:
+        super(CustomIterator, self).__init__(
+            self._get_next_cb,
+            self._extract_data_cb,
+            continuation_token=kwargs.get("continuation_token"),
+        )
+        self._command = command
+        self.page_size = kwargs.get("page_size")
+
+    def _get_next_cb(self, continuation_token, **kwargs):  # pylint: 
disable=inconsistent-return-statements
+        if not continuation_token:
+            next_partition_key = None
+            next_row_key = None
+        else:
+            next_partition_key = continuation_token.get("PartitionKey")
+            next_row_key = continuation_token.get("RowKey")
+
+        return self._command(top=self.page_size, 
next_partition_key=next_partition_key, next_row_key=next_row_key)
+
+    def _extract_data_cb(self, response):
+        next_entity = None
+        if response.headers and (NEXT_PARTITION_KEY in response.headers or 
NEXT_ROW_KEY in response.headers):
+            next_entity = {
+                "PartitionKey": response.headers[NEXT_PARTITION_KEY],
+                "RowKey": response.headers[NEXT_ROW_KEY],
+            }
+
+        return next_entity, response.json()["value"]
+
+
+class AsyncCustomIterator(AsyncPageIterator):
+    def __init__(
+        self,
+        command,
+        page_size=None,
+        continuation_token=None,
+    ):
+        super(AsyncCustomIterator, self).__init__(
+            get_next=self._get_next_cb, extract_data=self._extract_data_cb, 
continuation_token=continuation_token or ""
+        )
+        self._command = command
+        self.page_size = page_size
+
+    async def _get_next_cb(self, continuation_token, **kwargs):  # pylint: 
disable=inconsistent-return-statements
+        if not continuation_token:
+            next_partition_key = None
+            next_row_key = None
+        else:
+            next_partition_key = continuation_token.get("PartitionKey")
+            next_row_key = continuation_token.get("RowKey")
+
+        return await self._command(top=self.page_size, 
next_partition_key=next_partition_key, next_row_key=next_row_key)
+
+    async def _extract_data_cb(self, response):
+        next_entity = None
+        if response.headers and (NEXT_PARTITION_KEY in response.headers or 
NEXT_ROW_KEY in response.headers):
+            next_entity = {
+                "PartitionKey": response.headers[NEXT_PARTITION_KEY],
+                "RowKey": response.headers[NEXT_ROW_KEY],
+            }
+        return next_entity, response.json()["value"]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-core-1.30.0/tests/perf_tests/download_binary.py 
new/azure-core-1.30.1/tests/perf_tests/download_binary.py
--- old/azure-core-1.30.0/tests/perf_tests/download_binary.py   1970-01-01 
01:00:00.000000000 +0100
+++ new/azure-core-1.30.1/tests/perf_tests/download_binary.py   2024-02-29 
20:44:39.000000000 +0100
@@ -0,0 +1,86 @@
+# 
--------------------------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for 
license information.
+# 
--------------------------------------------------------------------------------------------
+
+from time import time
+from wsgiref.handlers import format_date_time
+from devtools_testutils.perfstress_tests import get_random_bytes, WriteStream
+
+from azure.core.exceptions import (
+    HttpResponseError,
+    map_error,
+)
+from azure.core.rest import HttpRequest
+from azure.storage.blob._generated.operations._block_blob_operations import 
build_upload_request
+from ._test_base import _BlobTest
+
+
+class DownloadBinaryDataTest(_BlobTest):
+    def __init__(self, arguments):
+        super().__init__(arguments)
+        blob_name = "streamdownloadtest"
+        self.blob_endpoint = 
f"{self.account_endpoint}{self.container_name}/{blob_name}"
+
+    async def global_setup(self):
+        await super().global_setup()
+        data = get_random_bytes(self.args.size)
+        current_time = format_date_time(time())
+        request = build_upload_request(
+            url=self.blob_endpoint,
+            content=data,
+            content_length=self.args.size,
+            content_type="application/octet-stream",
+            headers={
+                "x-ms-version": self.api_version,
+                "x-ms-date": current_time,
+            },
+        )
+        response = (await self.async_pipeline_client._pipeline.run(request, 
stream=False)).http_response
+
+        if response.status_code not in [201]:
+            map_error(status_code=response.status_code, response=response, 
error_map=self.error_map)
+            raise HttpResponseError(response=response)
+
+    def run_sync(self):
+        current_time = format_date_time(time())
+        response = self.pipeline_client._pipeline.run(
+            HttpRequest(
+                "GET",
+                self.blob_endpoint,
+                headers={
+                    "x-ms-version": self.api_version,
+                    "Accept": "application/octet-stream",
+                    "x-ms-date": current_time,
+                },
+            ),
+            stream=True,
+        ).http_response
+        response.read()
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, 
error_map=self.error_map)
+            raise HttpResponseError(response=response)
+
+    async def run_async(self):
+        current_time = format_date_time(time())
+        response = (
+            await self.async_pipeline_client._pipeline.run(
+                HttpRequest(
+                    "GET",
+                    self.blob_endpoint,
+                    headers={
+                        "x-ms-version": self.api_version,
+                        "Accept": "application/octet-stream",
+                        "x-ms-date": current_time,
+                    },
+                ),
+                stream=True,
+            )
+        ).http_response
+        await response.read()
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, 
error_map=self.error_map)
+            raise HttpResponseError(response=response)
+
+    async def close(self):
+        await super().close()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-core-1.30.0/tests/perf_tests/list_entities_json.py 
new/azure-core-1.30.1/tests/perf_tests/list_entities_json.py
--- old/azure-core-1.30.0/tests/perf_tests/list_entities_json.py        
1970-01-01 01:00:00.000000000 +0100
+++ new/azure-core-1.30.1/tests/perf_tests/list_entities_json.py        
2024-02-29 20:44:39.000000000 +0100
@@ -0,0 +1,130 @@
+# 
--------------------------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for 
license information.
+# 
--------------------------------------------------------------------------------------------
+from time import time
+from wsgiref.handlers import format_date_time
+from urllib.parse import quote
+
+from azure.core.rest import HttpRequest
+from azure.core.exceptions import (
+    HttpResponseError,
+    map_error,
+)
+from azure.core.paging import ItemPaged
+from azure.core.async_paging import AsyncItemPaged
+
+from .custom_iterator import CustomIterator, AsyncCustomIterator
+from ._test_base import _TableTest
+
+
+class ListEntitiesPageableTest(_TableTest):
+    def __init__(self, arguments):
+        super().__init__(arguments)
+        self.url = f"{self.account_endpoint}{self.table_name}()"
+
+    async def global_setup(self):
+        await super().global_setup()
+        batch_size = 0
+        batch = []
+        for row in range(self.args.count):
+            entity = self.get_entity(row)
+            batch.append(("upsert", entity))
+            batch_size += 1
+            if batch_size >= 100:
+                await self.async_table_client.submit_transaction(batch)
+                batch = []
+                batch_size = 0
+        if batch_size:
+            await self.async_table_client.submit_transaction(batch)
+
+    def _get_list_entities(self, *, top=None, next_partition_key=None, 
next_row_key=None, **kwargs):
+        current_time = format_date_time(time())
+        params = {}
+        if top:
+            params["$top"] = top
+        if next_partition_key:
+            params["NextPartitionKey"] = quote(next_partition_key)
+        if next_row_key:
+            params["NextRowKey"] = quote(next_row_key)
+
+        request = HttpRequest(
+            method="GET",
+            url=self.url,
+            params=params,
+            headers={
+                "x-ms-version": self.api_version,
+                "DataServiceVersion": self.data_service_version,
+                "Accept": "application/json;odata=minimalmetadata;",
+                "x-ms-date": current_time,
+            },
+        )
+        response = self.pipeline_client._pipeline.run(request).http_response
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, 
error_map=self.error_map)
+            raise HttpResponseError(response=response)
+
+        return response
+
+    async def _get_list_entities_async(self, *, top=None, 
next_partition_key=None, next_row_key=None, **kwargs):
+        current_time = format_date_time(time())
+        params = {}
+        if top:
+            params["$top"] = top
+        if next_partition_key:
+            params["NextPartitionKey"] = quote(next_partition_key)
+        if next_row_key:
+            params["NextRowKey"] = quote(next_row_key)
+
+        request = HttpRequest(
+            method="GET",
+            url=self.url,
+            params=params,
+            headers={
+                "x-ms-version": self.api_version,
+                "DataServiceVersion": self.data_service_version,
+                "Accept": "application/json;odata=minimalmetadata;",
+                "x-ms-date": current_time,
+            },
+        )
+        response = (await 
self.async_pipeline_client._pipeline.run(request)).http_response
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, 
error_map=self.error_map)
+            raise HttpResponseError(response=response)
+
+        return response
+
+    def run_sync(self):
+        for _ in ItemPaged(
+            self._get_list_entities,
+            page_iterator_class=CustomIterator,
+            page_size=self.args.page_size,
+        ):
+            pass
+
+    async def run_async(self):
+        async for _ in AsyncItemPaged(
+            self._get_list_entities_async,
+            page_iterator_class=AsyncCustomIterator,
+            page_size=self.args.page_size,
+        ):
+            pass
+
+    async def close(self):
+        await self.async_table_client.close()
+        await super().close()
+
+    @staticmethod
+    def add_arguments(parser):
+        super(ListEntitiesPageableTest, 
ListEntitiesPageableTest).add_arguments(parser)
+        parser.add_argument(
+            "--page-size",
+            nargs="?",
+            type=int,
+            help="""Max number of entities to list per page. """
+            """Default is None, which will return all possible results per 
page.""",
+            default=None,
+        )
+        parser.add_argument(
+            "-c", "--count", nargs="?", type=int, help="Number of table 
entities to list. Defaults to 100", default=100
+        )
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-core-1.30.0/tests/perf_tests/query_entities_json.py 
new/azure-core-1.30.1/tests/perf_tests/query_entities_json.py
--- old/azure-core-1.30.0/tests/perf_tests/query_entities_json.py       
1970-01-01 01:00:00.000000000 +0100
+++ new/azure-core-1.30.1/tests/perf_tests/query_entities_json.py       
2024-02-29 20:44:39.000000000 +0100
@@ -0,0 +1,79 @@
+# 
--------------------------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for 
license information.
+# 
--------------------------------------------------------------------------------------------
+from time import time
+from wsgiref.handlers import format_date_time
+from urllib.parse import quote
+
+from azure.core.rest import HttpRequest
+from azure.core.exceptions import (
+    HttpResponseError,
+    map_error,
+)
+from ._test_base import _TableTest
+
+
+class QueryEntitiesJSONTest(_TableTest):
+    def __init__(self, arguments):
+        super().__init__(arguments)
+        # query params
+        self.select = quote("Property2")
+        self.filter = quote("Property1 eq 'a'")
+        self.url = f"{self.account_endpoint}{self.table_name}"
+
+    async def global_setup(self):
+        await super().global_setup()
+        # create entity to be queried
+        entity = self.get_entity()
+        await self.async_table_client.create_entity(entity)
+
+    def run_sync(self):
+        current_time = format_date_time(time())
+        request = HttpRequest(
+            method="GET",
+            url=self.url,
+            params={"$select": self.select, "$filter": self.filter},
+            headers={
+                "x-ms-version": self.api_version,
+                "DataServiceVersion": self.data_service_version,
+                "Content-Type": "application/json",
+                "Accept": "application/json",
+                "x-ms-date": current_time,
+            },
+        )
+        response = self.pipeline_client._pipeline.run(
+            request,
+        ).http_response
+        response.json()
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, 
error_map=self.error_map)
+            raise HttpResponseError(response=response)
+
+    async def run_async(self):
+        current_time = format_date_time(time())
+        request = HttpRequest(
+            method="GET",
+            url=self.url,
+            params={"$select": self.select, "$filter": self.filter},
+            headers={
+                "x-ms-version": self.api_version,
+                "DataServiceVersion": self.data_service_version,
+                "Content-Type": "application/json",
+                "Accept": "application/json",
+                "x-ms-date": current_time,
+            },
+        )
+        response = (
+            await self.async_pipeline_client._pipeline.run(
+                request,
+            )
+        ).http_response
+        response.json()
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, 
error_map=self.error_map)
+            raise HttpResponseError(response=response)
+
+    async def close(self):
+        await self.async_table_client.close()
+        await super().close()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-core-1.30.0/tests/perf_tests/update_entity_json.py 
new/azure-core-1.30.1/tests/perf_tests/update_entity_json.py
--- old/azure-core-1.30.0/tests/perf_tests/update_entity_json.py        
1970-01-01 01:00:00.000000000 +0100
+++ new/azure-core-1.30.1/tests/perf_tests/update_entity_json.py        
2024-02-29 20:44:39.000000000 +0100
@@ -0,0 +1,86 @@
+# 
--------------------------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for 
license information.
+# 
--------------------------------------------------------------------------------------------
+import uuid
+from time import time
+from wsgiref.handlers import format_date_time
+
+from azure.core.rest import HttpRequest
+from azure.core.exceptions import (
+    HttpResponseError,
+    map_error,
+)
+from ._test_base import _TableTest
+
+
+class UpdateEntityJSONTest(_TableTest):
+    partition_key = str(uuid.uuid4())
+    row_key = str(uuid.uuid4())
+
+    def __init__(self, arguments):
+        super().__init__(arguments)
+        # base entity
+        self.base_entity = self.get_base_entity(
+            UpdateEntityJSONTest.partition_key, UpdateEntityJSONTest.row_key, 
self.args.size
+        )
+        self.url = 
f"{self.account_endpoint}{self.table_name}(PartitionKey='{UpdateEntityJSONTest.partition_key}',RowKey='{UpdateEntityJSONTest.row_key}')"
+
+    async def global_setup(self):
+        await super().global_setup()
+        # create entity to be updated
+        await self.async_table_client.create_entity(self.base_entity)
+
+    def run_sync(self):
+        current_time = format_date_time(time())
+        request = HttpRequest(
+            method="PUT",
+            url=self.url,
+            params={},
+            headers={
+                "x-ms-version": self.api_version,
+                "DataServiceVersion": self.data_service_version,
+                "Content-Type": "application/json",
+                "Accept": "application/json",
+                "If-Match": "*",
+                "x-ms-date": current_time,
+            },
+            json=self.base_entity,
+            content=None,
+        )
+        response = self.pipeline_client._pipeline.run(
+            request,
+        ).http_response
+        if response.status_code not in [204]:
+            map_error(status_code=response.status_code, response=response, 
error_map=self.error_map)
+            raise HttpResponseError(response=response)
+
+    async def run_async(self):
+        current_time = format_date_time(time())
+        request = HttpRequest(
+            method="PUT",
+            url=self.url,
+            params={},
+            headers={
+                "x-ms-version": self.api_version,
+                "DataServiceVersion": self.data_service_version,
+                "Content-Type": "application/json",
+                "Accept": "application/json",
+                "If-Match": "*",
+                "x-ms-date": current_time,
+            },
+            json=self.base_entity,
+            content=None,
+        )
+        response = (
+            await self.async_pipeline_client._pipeline.run(
+                request,
+            )
+        ).http_response
+        if response.status_code not in [204]:
+            map_error(status_code=response.status_code, response=response, 
error_map=self.error_map)
+            raise HttpResponseError(response=response)
+
+    async def close(self):
+        await self.async_table_client.close()
+        await super().close()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/azure-core-1.30.0/tests/perf_tests/upload_binary.py 
new/azure-core-1.30.1/tests/perf_tests/upload_binary.py
--- old/azure-core-1.30.0/tests/perf_tests/upload_binary.py     1970-01-01 
01:00:00.000000000 +0100
+++ new/azure-core-1.30.1/tests/perf_tests/upload_binary.py     2024-02-29 
20:44:39.000000000 +0100
@@ -0,0 +1,70 @@
+# 
--------------------------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for 
license information.
+# 
--------------------------------------------------------------------------------------------
+
+from time import time
+from wsgiref.handlers import format_date_time
+from devtools_testutils.perfstress_tests import RandomStream, AsyncRandomStream
+
+from azure.core.rest import HttpRequest
+from azure.core.exceptions import (
+    HttpResponseError,
+    map_error,
+)
+from ._test_base import _BlobTest
+
+
+class UploadBinaryDataTest(_BlobTest):
+    def __init__(self, arguments):
+        super().__init__(arguments)
+        blob_name = "uploadtest"
+        self.blob_endpoint = 
f"{self.account_endpoint}{self.container_name}/{blob_name}"
+        self.upload_stream = RandomStream(self.args.size)
+        self.upload_stream_async = AsyncRandomStream(self.args.size)
+
+    def run_sync(self):
+        self.upload_stream.reset()
+        current_time = format_date_time(time())
+        request = HttpRequest(
+            method="PUT",
+            url=self.blob_endpoint,
+            params={},
+            headers={
+                "x-ms-date": current_time,
+                "x-ms-blob-type": "BlockBlob",
+                "Content-Length": str(self.args.size),
+                "x-ms-version": self.api_version,
+                "Content-Type": "application/octet-stream",
+            },
+            content=self.upload_stream,
+        )
+        response = self.pipeline_client._pipeline.run(request).http_response
+        if response.status_code not in [201]:
+            map_error(status_code=response.status_code, response=response, 
error_map=self.error_map)
+            raise HttpResponseError(response=response)
+
+    async def run_async(self):
+        self.upload_stream_async.reset()
+        current_time = format_date_time(time())
+        request = HttpRequest(
+            method="PUT",
+            url=self.blob_endpoint,
+            params={},
+            headers={
+                "x-ms-date": current_time,
+                "x-ms-blob-type": "BlockBlob",
+                "Content-Length": str(self.args.size),
+                "x-ms-version": self.api_version,
+                "Content-Type": "application/octet-stream",
+            },
+            content=self.upload_stream_async,
+        )
+        pipeline_response = await 
self.async_pipeline_client._pipeline.run(request)
+        response = pipeline_response.http_response
+        if response.status_code not in [201]:
+            map_error(status_code=response.status_code, response=response, 
error_map=self.error_map)
+            raise HttpResponseError(response=response)
+
+    async def close(self):
+        await super().close()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/azure-core-1.30.0/tests/test_retry_policy.py 
new/azure-core-1.30.1/tests/test_retry_policy.py
--- old/azure-core-1.30.0/tests/test_retry_policy.py    2024-01-31 
22:55:17.000000000 +0100
+++ new/azure-core-1.30.1/tests/test_retry_policy.py    2024-02-29 
20:44:39.000000000 +0100
@@ -61,7 +61,8 @@
 
 
 @pytest.mark.parametrize(
-    "retry_after_input,http_request,http_response", product(["0", "800", 
"1000", "1200"], HTTP_REQUESTS, HTTP_RESPONSES)
+    "retry_after_input,http_request,http_response",
+    product(["0", "800", "1000", "1200", "0.9"], HTTP_REQUESTS, 
HTTP_RESPONSES),
 )
 def test_retry_after(retry_after_input, http_request, http_response):
     retry_policy = RetryPolicy()
@@ -82,7 +83,8 @@
 
 
 @pytest.mark.parametrize(
-    "retry_after_input,http_request,http_response", product(["0", "800", 
"1000", "1200"], HTTP_REQUESTS, HTTP_RESPONSES)
+    "retry_after_input,http_request,http_response",
+    product(["0", "800", "1000", "1200", "0.9"], HTTP_REQUESTS, 
HTTP_RESPONSES),
 )
 def test_x_ms_retry_after(retry_after_input, http_request, http_response):
     retry_policy = RetryPolicy()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/azure-core-1.30.0/tests/test_streaming.py 
new/azure-core-1.30.1/tests/test_streaming.py
--- old/azure-core-1.30.0/tests/test_streaming.py       2024-01-31 
22:55:17.000000000 +0100
+++ new/azure-core-1.30.1/tests/test_streaming.py       2024-02-29 
20:44:39.000000000 +0100
@@ -31,6 +31,7 @@
 from utils import HTTP_REQUESTS
 
 
+@pytest.mark.live_test_only
 @pytest.mark.parametrize("http_request", HTTP_REQUESTS)
 def test_decompress_plain_no_header(http_request):
     # expect plain text
@@ -79,6 +80,7 @@
     assert decoded == "test"
 
 
+@pytest.mark.live_test_only
 @pytest.mark.parametrize("http_request", HTTP_REQUESTS)
 def test_decompress_compressed_no_header(http_request):
     # expect compressed text
@@ -158,6 +160,7 @@
             list(data)
 
 
+@pytest.mark.live_test_only
 @pytest.mark.parametrize("http_request", HTTP_REQUESTS)
 def test_compress_plain_header(http_request):
     # expect plain text
@@ -204,6 +207,7 @@
         assert decoded == "test"
 
 
+@pytest.mark.live_test_only
 @pytest.mark.parametrize("http_request", HTTP_REQUESTS)
 def test_compress_compressed_header(http_request):
     # expect compressed text
@@ -221,3 +225,55 @@
         assert False
     except UnicodeDecodeError:
         pass
+
+
+@pytest.mark.parametrize("http_request", HTTP_REQUESTS)
+def test_decompress_plain_no_header_offline(port, http_request):
+    # expect plain text
+    request = http_request(method="GET", 
url="http://localhost:{}/streams/string".format(port))
+    with RequestsTransport() as sender:
+        response = sender.send(request, stream=True)
+        response.raise_for_status()
+        data = response.stream_download(sender, decompress=True)
+        content = b"".join(list(data))
+        decoded = content.decode("utf-8")
+        assert decoded == "test"
+
+
+@pytest.mark.parametrize("http_request", HTTP_REQUESTS)
+def test_compress_plain_header_offline(port, http_request):
+    # expect plain text
+    request = http_request(method="GET", 
url="http://localhost:{}/streams/plain_header".format(port))
+    with RequestsTransport() as sender:
+        response = sender.send(request, stream=True)
+        response.raise_for_status()
+        data = response.stream_download(sender, decompress=False)
+        content = b"".join(list(data))
+        decoded = content.decode("utf-8")
+        assert decoded == "test"
+
+
+@pytest.mark.parametrize("http_request", HTTP_REQUESTS)
+def test_decompress_compressed_no_header_offline(port, http_request):
+    # expect compressed text
+    client = PipelineClient("")
+    request = http_request(method="GET", 
url="http://localhost:{}/streams/compressed_no_header".format(port))
+    response = client._pipeline.run(request, stream=True).http_response
+    response.raise_for_status()
+    data = response.stream_download(client._pipeline, decompress=True)
+    content = b"".join(list(data))
+    with pytest.raises(UnicodeDecodeError):
+        content.decode("utf-8")
+
+
+@pytest.mark.parametrize("http_request", HTTP_REQUESTS)
+def test_compress_compressed_header_offline(port, http_request):
+    # expect compressed text
+    client = PipelineClient("")
+    request = http_request(method="GET", 
url="http://localhost:{}/streams/compressed_header".format(port))
+    response = client._pipeline.run(request, stream=True).http_response
+    response.raise_for_status()
+    data = response.stream_download(client._pipeline, decompress=False)
+    content = b"".join(list(data))
+    with pytest.raises(UnicodeDecodeError):
+        content.decode("utf-8")
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/azure-core-1.30.0/tests/test_utils.py 
new/azure-core-1.30.1/tests/test_utils.py
--- old/azure-core-1.30.0/tests/test_utils.py   2024-01-31 22:55:17.000000000 
+0100
+++ new/azure-core-1.30.1/tests/test_utils.py   2024-02-29 20:44:39.000000000 
+0100
@@ -8,6 +8,7 @@
 import pytest
 from azure.core.utils import case_insensitive_dict
 from azure.core.utils._utils import get_running_async_lock
+from azure.core.pipeline.policies._utils import parse_retry_after
 
 
 @pytest.fixture()
@@ -134,3 +135,14 @@
         sys.modules.pop("trio", None)
         with pytest.raises(RuntimeError):
             get_running_async_lock()
+
+
+def test_parse_retry_after():
+    ret = parse_retry_after("100")
+    assert ret == 100
+    ret = parse_retry_after("Fri, 1 Oct 2100 00:00:00 GMT")
+    assert ret > 0
+    ret = parse_retry_after("0")
+    assert ret == 0
+    ret = parse_retry_after("0.9")
+    assert ret == 0.9
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-core-1.30.0/tests/testserver_tests/coretestserver/coretestserver/test_routes/streams.py
 
new/azure-core-1.30.1/tests/testserver_tests/coretestserver/coretestserver/test_routes/streams.py
--- 
old/azure-core-1.30.0/tests/testserver_tests/coretestserver/coretestserver/test_routes/streams.py
   2024-01-31 22:55:17.000000000 +0100
+++ 
new/azure-core-1.30.1/tests/testserver_tests/coretestserver/coretestserver/test_routes/streams.py
   2024-02-29 20:44:39.000000000 +0100
@@ -59,11 +59,21 @@
     return Response(streaming_test(), status=200, mimetype="text/plain")
 
 
+@streams_api.route("/plain_header", methods=["GET"])
+def plain_header():
+    return Response(streaming_test(), status=200, mimetype="text/plain", 
headers={"Content-Encoding": "gzip"})
+
+
 @streams_api.route("/compressed_no_header", methods=["GET"])
 def compressed_no_header():
     return Response(compressed_stream(), status=300)
 
 
+@streams_api.route("/compressed_header", methods=["GET"])
+def compressed_header():
+    return Response(compressed_stream(), status=200, 
headers={"Content-Encoding": "gzip"})
+
+
 @streams_api.route("/compressed", methods=["GET"])
 def compressed():
     return Response(stream_compressed_header_error(), status=300, 
headers={"Content-Encoding": "gzip"})

Reply via email to