From f6b1c3544401e7c9a2dd875c752dc204d05bd387 Mon Sep 17 00:00:00 2001 From: Peter Wu Date: Sat, 9 May 2026 10:42:56 -0400 Subject: [PATCH 1/4] WIP --- .../samples/datalake_samples_query.py | 19 ++- .../samples/datalake_samples_service_async.py | 11 +- .../tests/perfstress_tests/_test_base.py | 30 ++++- .../tests/perfstress_tests/upload.py | 5 +- .../tests/settings/testcase.py | 26 ++-- .../tests/test_cpk.py | 7 +- .../tests/test_cpk_async.py | 7 +- .../tests/test_datalake_service_client.py | 14 ++- .../test_datalake_service_client_async.py | 17 +-- .../tests/test_directory.py | 116 ++++++++--------- .../tests/test_directory_async.py | 65 ++++++---- .../tests/test_file.py | 31 +++-- .../tests/test_file_async.py | 117 +++++++++++------- .../tests/test_file_system.py | 23 ++-- .../tests/test_file_system_async.py | 50 ++++---- .../tests/test_helpers.py | 7 +- .../tests/test_helpers_async.py | 7 +- .../tests/test_large_file.py | 37 +++--- .../tests/test_large_file_async.py | 38 +++--- 19 files changed, 347 insertions(+), 280 deletions(-) diff --git a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_query.py b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_query.py index 63862dfb894d..39397b6a6080 100644 --- a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_query.py +++ b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_query.py @@ -16,6 +16,8 @@ """ import os import sys + +from azure.core.exceptions import HttpResponseError from azure.storage.filedatalake import DataLakeServiceClient, DelimitedJsonDialect, DelimitedTextDialect CSV_DATA = b'Service,Package,Version,RepoPath,MissingDocs\r\nApp Configuration,' \ @@ -64,7 +66,7 @@ def main(): filesystem_client = datalake_service_client.get_file_system_client(filesystem_name) try: filesystem_client.create_file_system() - except: + except HttpResponseError: pass # [START query] errors = [] @@ -77,9 +79,20 @@ def on_error(error): # select the second column of the csv file query_expression = "SELECT _2 from DataLakeStorage" - input_format = DelimitedTextDialect(delimiter=',', quotechar='"', lineterminator='\n', escapechar="", has_header=False) + input_format = DelimitedTextDialect( + delimiter=',', + quotechar='"', + lineterminator='\n', + escapechar="", + has_header=False + ) output_format = DelimitedJsonDialect(delimiter='\n') - reader = file_client.query_file(query_expression, on_error=on_error, file_format=input_format, output_format=output_format) + reader = file_client.query_file( + query_expression, + on_error=on_error, + file_format=input_format, + output_format=output_format + ) content = reader.readall() # [END query] print(content) diff --git a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_service_async.py b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_service_async.py index bdc9e35ea6ff..978a2ae08e5e 100644 --- a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_service_async.py +++ b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_service_async.py @@ -25,6 +25,10 @@ import asyncio import os +from datetime import datetime, timedelta +from azure.storage.filedatalake.aio import DataLakeServiceClient + + connection_string = os.environ['DATALAKE_STORAGE_CONNECTION_STRING'] account_name = os.getenv('DATALAKE_STORAGE_ACCOUNT_NAME', "") @@ -34,8 +38,6 @@ async def main(): # Instantiate a DataLakeServiceClient using a connection string # [START create_datalake_service_client] - from azure.storage.filedatalake.aio import DataLakeServiceClient - from datetime import datetime, timedelta, timezone datalake_service_client = DataLakeServiceClient.from_connection_string(connection_string) # [END create_datalake_service_client] @@ -51,7 +53,6 @@ async def main(): async with datalake_service_client: # get user delegation key # [START get_user_delegation_key] - from datetime import datetime, timedelta user_delegation_key = await datalake_service_client.get_user_delegation_key(datetime.utcnow(), datetime.utcnow() + timedelta(hours=1)) # [END get_user_delegation_key] @@ -84,7 +85,8 @@ async def main(): from azure.storage.filedatalake import ContentSettings content_settings = ContentSettings( content_language='spanish', - content_disposition='inline') + content_disposition='inline' + ) await file_client.create_file(content_settings=content_settings) await file_client.set_metadata(metadata=metadata) file_props = await file_client.get_file_properties() @@ -103,5 +105,6 @@ async def main(): await token_credential.close() + if __name__ == '__main__': asyncio.run(main()) diff --git a/sdk/storage/azure-storage-file-datalake/tests/perfstress_tests/_test_base.py b/sdk/storage/azure-storage-file-datalake/tests/perfstress_tests/_test_base.py index a4ebe8102bf7..80ef0c5b33a8 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/perfstress_tests/_test_base.py +++ b/sdk/storage/azure-storage-file-datalake/tests/perfstress_tests/_test_base.py @@ -3,12 +3,10 @@ # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- -import os import uuid from devtools_testutils.perfstress_tests import PerfStressTest -from azure.core.exceptions import ResourceNotFoundError from azure.storage.filedatalake import DataLakeServiceClient as SyncDataLakeServiceClient from azure.storage.filedatalake.aio import DataLakeServiceClient as AsyncDataLakeServiceClient @@ -22,7 +20,8 @@ def __init__(self, arguments): connection_string = self.get_from_env("AZURE_STORAGE_CONNECTION_STRING") if not _ServiceTest.service_client or self.args.no_client_share: _ServiceTest.service_client = SyncDataLakeServiceClient.from_connection_string(conn_str=connection_string) - _ServiceTest.async_service_client = AsyncDataLakeServiceClient.from_connection_string(conn_str=connection_string) + _ServiceTest.async_service_client = AsyncDataLakeServiceClient.from_connection_string( + conn_str=connection_string) self.service_client = _ServiceTest.service_client self.async_service_client =_ServiceTest.async_service_client @@ -33,9 +32,28 @@ async def close(self): @staticmethod def add_arguments(parser): super(_ServiceTest, _ServiceTest).add_arguments(parser) - parser.add_argument('-c', '--max-concurrency', nargs='?', type=int, help='Maximum number of concurrent threads used for data transfer. Defaults to 1', default=1) - parser.add_argument('-s', '--size', nargs='?', type=int, help='Size of data to transfer. Default is 10240.', default=10240) - parser.add_argument('--no-client-share', action='store_true', help='Create one ServiceClient per test instance. Default is to share a single ServiceClient.', default=False) + parser.add_argument( + '-c', + '--max-concurrency', + nargs='?', + type=int, + help='Maximum number of concurrent threads used for data transfer. Defaults to 1', + default=1 + ) + parser.add_argument( + '-s', + '--size', + nargs='?', + type=int, + help='Size of data to transfer. Default is 10240.', + default=10240 + ) + parser.add_argument( + '--no-client-share', + action='store_true', + help='Create one ServiceClient per test instance. Default is to share a single ServiceClient.', + default=False + ) class _FileSystemTest(_ServiceTest): diff --git a/sdk/storage/azure-storage-file-datalake/tests/perfstress_tests/upload.py b/sdk/storage/azure-storage-file-datalake/tests/perfstress_tests/upload.py index 2e5dc69519fc..f2383deaa7b0 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/perfstress_tests/upload.py +++ b/sdk/storage/azure-storage-file-datalake/tests/perfstress_tests/upload.py @@ -3,10 +3,9 @@ # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- -from ._test_base import _FileTest +from devtools_testutils.perfstress_tests import AsyncRandomStream, RandomStream -from devtools_testutils.perfstress_tests import RandomStream -from devtools_testutils.perfstress_tests import AsyncRandomStream +from ._test_base import _FileTest class UploadTest(_FileTest): diff --git a/sdk/storage/azure-storage-file-datalake/tests/settings/testcase.py b/sdk/storage/azure-storage-file-datalake/tests/settings/testcase.py index a4d20ec40145..b8cbb7efde74 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/settings/testcase.py +++ b/sdk/storage/azure-storage-file-datalake/tests/settings/testcase.py @@ -1,19 +1,16 @@ -# coding: utf-8 # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=unused-wildcard-import, wildcard-import + import functools import os.path from devtools_testutils import EnvironmentVariableLoader, EnvironmentVariableOptions from devtools_testutils.fake_credentials import STORAGE_ACCOUNT_FAKE_KEY -try: - from cStringIO import StringIO # Python 2 -except ImportError: - from io import StringIO try: # Running locally - use configuration in settings_real.py @@ -25,14 +22,19 @@ LOGGING_FORMAT = '%(asctime)s %(name)-20s %(levelname)-5s %(message)s' LOGGING_FORMAT = '%(asctime)s %(name)-20s %(levelname)-5s %(message)s' -os.environ['DATALAKE_STORAGE_ACCOUNT_NAME'] = os.environ.get('DATALAKE_STORAGE_ACCOUNT_NAME', None) or DATALAKE_STORAGE_ACCOUNT_NAME -os.environ['DATALAKE_STORAGE_ACCOUNT_KEY'] = os.environ.get('DATALAKE_STORAGE_ACCOUNT_KEY', None) or DATALAKE_STORAGE_ACCOUNT_KEY +os.environ['DATALAKE_STORAGE_ACCOUNT_NAME'] = (os.environ.get('DATALAKE_STORAGE_ACCOUNT_NAME', None) or + DATALAKE_STORAGE_ACCOUNT_NAME) +os.environ['DATALAKE_STORAGE_ACCOUNT_KEY'] = (os.environ.get('DATALAKE_STORAGE_ACCOUNT_KEY', None) or + DATALAKE_STORAGE_ACCOUNT_KEY) -os.environ['STORAGE_DATA_LAKE_SOFT_DELETE_ACCOUNT_NAME'] = os.environ.get('STORAGE_DATA_LAKE_SOFT_DELETE_ACCOUNT_NAME', None) or STORAGE_DATA_LAKE_SOFT_DELETE_ACCOUNT_NAME -os.environ['STORAGE_DATA_LAKE_SOFT_DELETE_ACCOUNT_KEY'] = os.environ.get('STORAGE_DATA_LAKE_SOFT_DELETE_ACCOUNT_KEY', None) or STORAGE_DATA_LAKE_SOFT_DELETE_ACCOUNT_KEY +os.environ['STORAGE_DATA_LAKE_SOFT_DELETE_ACCOUNT_NAME'] = os.environ.get( + 'STORAGE_DATA_LAKE_SOFT_DELETE_ACCOUNT_NAME', None) or STORAGE_DATA_LAKE_SOFT_DELETE_ACCOUNT_NAME +os.environ['STORAGE_DATA_LAKE_SOFT_DELETE_ACCOUNT_KEY'] = os.environ.get( + 'STORAGE_DATA_LAKE_SOFT_DELETE_ACCOUNT_KEY', None) or STORAGE_DATA_LAKE_SOFT_DELETE_ACCOUNT_KEY os.environ['AZURE_TEST_RUN_LIVE'] = os.environ.get('AZURE_TEST_RUN_LIVE', None) or RUN_IN_LIVE -os.environ['AZURE_SKIP_LIVE_RECORDING'] = os.environ.get('AZURE_SKIP_LIVE_RECORDING', None) or SKIP_LIVE_RECORDING +os.environ['AZURE_SKIP_LIVE_RECORDING'] = os.environ.get( + 'AZURE_SKIP_LIVE_RECORDING', None) or SKIP_LIVE_RECORDING os.environ['PROTOCOL'] = PROTOCOL os.environ['ACCOUNT_URL_SUFFIX'] = ACCOUNT_URL_SUFFIX @@ -42,5 +44,7 @@ datalake_storage_account_key=STORAGE_ACCOUNT_FAKE_KEY, storage_data_lake_soft_delete_account_name="storagesoftdelname", storage_data_lake_soft_delete_account_key=STORAGE_ACCOUNT_FAKE_KEY, - options=EnvironmentVariableOptions(hide_secrets=["datalake_storage_account_key", "storage_data_lake_soft_delete_account_key"]), + options=EnvironmentVariableOptions( + hide_secrets=["datalake_storage_account_key", "storage_data_lake_soft_delete_account_key"] + ), ) diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_cpk.py b/sdk/storage/azure-storage-file-datalake/tests/test_cpk.py index 87cd8651945f..045c4e7ebe23 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_cpk.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_cpk.py @@ -4,14 +4,15 @@ # license information. # -------------------------------------------------------------------------- -from azure.core.exceptions import ResourceExistsError, ResourceNotFoundError -from azure.storage.filedatalake import CustomerProvidedEncryptionKey, DataLakeServiceClient - from devtools_testutils import recorded_by_proxy from devtools_testutils.storage import StorageRecordedTestCase from settings.testcase import DataLakePreparer from test_quick_query import DATALAKE_CSV_DATA +from azure.core.exceptions import ResourceExistsError, ResourceNotFoundError +from azure.storage.filedatalake import CustomerProvidedEncryptionKey, DataLakeServiceClient + + # ------------------------------------------------------------------------------ TEST_DIRECTORY_PREFIX = 'directory' TEST_FILE_PREFIX = 'file' diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_cpk_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_cpk_async.py index a262054c17e7..ebabf6451105 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_cpk_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_cpk_async.py @@ -4,15 +4,14 @@ # license information. # -------------------------------------------------------------------------- -import asyncio +from devtools_testutils.aio import recorded_by_proxy_async +from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase +from settings.testcase import DataLakePreparer from azure.core.exceptions import ResourceExistsError, ResourceNotFoundError from azure.storage.filedatalake import CustomerProvidedEncryptionKey from azure.storage.filedatalake.aio import DataLakeServiceClient -from devtools_testutils.aio import recorded_by_proxy_async -from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase -from settings.testcase import DataLakePreparer # ------------------------------------------------------------------------------ TEST_DIRECTORY_PREFIX = 'directory' diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_datalake_service_client.py b/sdk/storage/azure-storage-file-datalake/tests/test_datalake_service_client.py index 396d13749399..f21f57090b07 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_datalake_service_client.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_datalake_service_client.py @@ -4,12 +4,17 @@ # license information. # -------------------------------------------------------------------------- -import pytest from typing import NamedTuple from unittest.mock import MagicMock +import pytest + +from devtools_testutils import recorded_by_proxy +from devtools_testutils.storage import StorageRecordedTestCase +from settings.testcase import DataLakePreparer + from azure.core.credentials import AzureNamedKeyCredential -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError +from azure.core.exceptions import HttpResponseError from azure.storage.filedatalake import ( AnalyticsLogging, CorsRule, @@ -19,13 +24,10 @@ FileSystemClient, Metrics, RetentionPolicy, - StaticWebsite + StaticWebsite, ) from azure.storage.filedatalake._shared.parser import DEVSTORE_ACCOUNT_KEY, DEVSTORE_ACCOUNT_NAME -from devtools_testutils import recorded_by_proxy -from devtools_testutils.storage import StorageRecordedTestCase -from settings.testcase import DataLakePreparer # ------------------------------------------------------------------------------ TEST_FILE_SYSTEM_PREFIX = 'filesystem' diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_datalake_service_client_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_datalake_service_client_async.py index 606ac56f6b4d..3abc85b49caa 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_datalake_service_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_datalake_service_client_async.py @@ -4,31 +4,32 @@ # license information. # -------------------------------------------------------------------------- -import pytest import sys from typing import NamedTuple -from azure.core.credentials import AzureNamedKeyCredential -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase +from settings.testcase import DataLakePreparer + +from azure.core.credentials import AzureNamedKeyCredential +from azure.core.exceptions import HttpResponseError from azure.storage.filedatalake import ( AnalyticsLogging, CorsRule, Metrics, RetentionPolicy, - StaticWebsite + StaticWebsite, ) from azure.storage.filedatalake._shared.parser import DEVSTORE_ACCOUNT_KEY, DEVSTORE_ACCOUNT_NAME from azure.storage.filedatalake.aio import ( DataLakeDirectoryClient, DataLakeFileClient, DataLakeServiceClient, - FileSystemClient + FileSystemClient, ) -from devtools_testutils.aio import recorded_by_proxy_async -from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase -from settings.testcase import DataLakePreparer if sys.version_info >= (3, 8): from unittest.mock import AsyncMock diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_directory.py b/sdk/storage/azure-storage-file-datalake/tests/test_directory.py index 1d032e15b8b4..90bc6bc63380 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_directory.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_directory.py @@ -8,15 +8,19 @@ from time import sleep import pytest + +from devtools_testutils import recorded_by_proxy +from devtools_testutils.storage import StorageRecordedTestCase +from settings.testcase import DataLakePreparer + from azure.core import MatchConditions from azure.core.exceptions import ( AzureError, - ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceModifiedError, ResourceNotFoundError, - ServiceRequestError + ServiceRequestError, ) from azure.storage.filedatalake import ( ContentSettings, @@ -26,14 +30,10 @@ EncryptionScopeOptions, FileSystemSasPermissions, generate_directory_sas, - generate_file_system_sas + generate_file_system_sas, ) from azure.storage.filedatalake._models import AccessControlChangeCounters, AccessControlChangeResult -from azure.storage.filedatalake._serialize import _SUPPORTED_API_VERSIONS -from devtools_testutils import recorded_by_proxy -from devtools_testutils.storage import StorageRecordedTestCase -from settings.testcase import DataLakePreparer # ------------------------------------------------------------------------------ TEST_DIRECTORY_PREFIX = 'directory' @@ -87,9 +87,6 @@ def _create_sub_directory_and_files(self, directory_client, num_of_dirs, num_of_ for j in range(0, num_of_files_per_dir): sub_dir.create_file(self.get_resource_name('subfile' + str(j))) - def _create_file_system(self): - return self.dsc.create_file_system(self._get_file_system_reference()) - # --Helpers----------------------------------------------------------------- @DataLakePreparer() @@ -624,10 +621,14 @@ def test_set_access_control_recursive_with_failures(self, **kwargs): directory_name = self._get_directory_reference() directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name) directory_client.create_directory(owner=test_guid) - self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir1').create_directory(owner=test_guid, permissions='0777') - self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir2').create_directory(owner=test_guid, permissions='0777') - self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir1/file1').create_file(owner=test_guid, permissions='0777') - self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir2/file2').create_file(owner=test_guid, permissions='0777') + self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir1').create_directory( + owner=test_guid, permissions='0777') + self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir2').create_directory( + owner=test_guid, permissions='0777') + self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir1/file1').create_file( + owner=test_guid, permissions='0777') + self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir2/file2').create_file( + owner=test_guid, permissions='0777') directory_client.get_file_client('file3').create_file() # User delegation SAS with provided owner permissions @@ -692,10 +693,14 @@ def test_set_access_control_recursive_stop_on_failures(self, **kwargs): directory_name = self._get_directory_reference() directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name) directory_client.create_directory(owner=test_guid) - self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir1').create_directory(owner=test_guid, permissions='0777') - self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir2').create_directory(owner=test_guid, permissions='0777') - self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir1/file1').create_file(owner=test_guid,permissions='0777') - self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir2/file2').create_file(owner=test_guid,permissions='0777') + self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir1').create_directory( + owner=test_guid, permissions='0777') + self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir2').create_directory( + owner=test_guid, permissions='0777') + self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir1/file1').create_file( + owner=test_guid,permissions='0777') + self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir2/file2').create_file( + owner=test_guid,permissions='0777') directory_client.get_file_client('file3').create_file() # User delegation SAS with provided owner permissions @@ -761,10 +766,14 @@ def test_set_access_control_recursive_continue_on_failures(self, **kwargs): directory_name = self._get_directory_reference() directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name) directory_client.create_directory(owner=test_guid) - self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir1').create_directory(owner=test_guid, permissions='0777') - self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir2').create_directory(owner=test_guid, permissions='0777') - self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir1/file1').create_file(owner=test_guid, permissions='0777') - self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir2/file2').create_file(owner=test_guid, permissions='0777') + self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir1').create_directory( + owner=test_guid, permissions='0777') + self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir2').create_directory( + owner=test_guid, permissions='0777') + self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir1/file1').create_file( + owner=test_guid, permissions='0777') + self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir2/file2').create_file( + owner=test_guid, permissions='0777') directory_client.get_file_client('file3').create_file() self.dsc.get_directory_client(self.file_system_name, directory_name + '/dir3').create_directory() @@ -836,8 +845,8 @@ def test_set_access_control_recursive_in_batches_with_explicit_iteration(self, * batch_size = 2 while result.continuation is not None: - result = directory_client.set_access_control_recursive(acl=acl, batch_size=batch_size, max_batches=max_batches, - continuation=result.continuation) + result = directory_client.set_access_control_recursive( + acl=acl, batch_size=batch_size, max_batches=max_batches, continuation=result.continuation) running_tally.directories_successful += result.counters.directories_successful running_tally.files_successful += result.counters.files_successful @@ -963,10 +972,14 @@ def test_update_access_control_recursive_with_failures(self, **kwargs): directory_name = self._get_directory_reference() directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name) directory_client.create_directory(owner=test_guid) - self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir1').create_directory(owner=test_guid, permissions='0777') - self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir2').create_directory(owner=test_guid, permissions='0777') - self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir1/file1').create_file(owner=test_guid, permissions='0777') - self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir2/file2').create_file(owner=test_guid, permissions='0777') + self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir1').create_directory( + owner=test_guid, permissions='0777') + self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir2').create_directory( + owner=test_guid, permissions='0777') + self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir1/file1').create_file( + owner=test_guid, permissions='0777') + self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir2/file2').create_file( + owner=test_guid, permissions='0777') directory_client.get_file_client('file3').create_file() # User delegation SAS with provided owner permissions @@ -1052,7 +1065,7 @@ def test_remove_access_control_recursive_in_batches(self, **kwargs): summary = directory_client.remove_access_control_recursive(acl=REMOVE_ACL, batch_size=2) # Assert - summary.counters.directories_successful == num_sub_dirs + 1 # +1 as the dir itself was also included + assert summary.counters.directories_successful == num_sub_dirs + 1 # +1 as the dir itself was also included assert summary.counters.files_successful == num_sub_dirs * num_file_per_sub_dir assert summary.counters.failure_count == 0 @@ -1084,7 +1097,7 @@ def progress_callback(resp): batch_size=2) # Assert - summary.counters.directories_successful == num_sub_dirs + 1 # +1 as the dir itself was also included + assert summary.counters.directories_successful == num_sub_dirs + 1 # +1 as the dir itself was also included assert summary.counters.files_successful == num_sub_dirs * num_file_per_sub_dir assert summary.counters.failure_count == 0 assert summary.counters.directories_successful == running_tally.directories_successful @@ -1112,10 +1125,14 @@ def test_remove_access_control_recursive_with_failures(self, **kwargs): directory_name = self._get_directory_reference() directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name) directory_client.create_directory(owner=test_guid) - self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir1').create_directory(owner=test_guid, permissions='0777') - self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir2').create_directory(owner=test_guid, permissions='0777') - self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir1/file1').create_file(owner=test_guid, permissions='0777') - self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir2/file2').create_file(owner=test_guid, permissions='0777') + self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir1').create_directory( + owner=test_guid, permissions='0777') + self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir2').create_directory( + owner=test_guid, permissions='0777') + self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir1/file1').create_file( + owner=test_guid, permissions='0777') + self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir2/file2').create_file( + owner=test_guid, permissions='0777') directory_client.get_file_client('file3').create_file() # User delegation SAS with provided owner permissions @@ -1150,7 +1167,8 @@ def progress_callback(resp): if resp.batch_failures: failed_entries.append(resp.batch_failures) - summary = owner_dir_client.remove_access_control_recursive(acl=REMOVE_ACL, progress_hook=progress_callback, batch_size=2) + summary = owner_dir_client.remove_access_control_recursive( + acl=REMOVE_ACL, progress_hook=progress_callback, batch_size=2) # Assert assert summary.counters.failure_count == 1 @@ -1532,32 +1550,6 @@ def test_using_directory_sas_to_create_file(self, **kwargs): with pytest.raises(HttpResponseError): directory_client.delete_directory() - @DataLakePreparer() - @recorded_by_proxy - def test_using_directory_sas_to_create_file(self, **kwargs): - datalake_storage_account_name = kwargs.pop("datalake_storage_account_name") - datalake_storage_account_key = kwargs.pop("datalake_storage_account_key") - - newest_api_version = _SUPPORTED_API_VERSIONS[-1] - - service_client = DataLakeServiceClient("https://abc.dfs.core.windows.net", credential='fake') - filesys_client = service_client.get_file_system_client("filesys") - dir_client = DataLakeDirectoryClient("https://abc.dfs.core.windows.net", "filesys", "dir", credential='fake') - file_client = dir_client.get_file_client("file") - assert service_client.api_version == newest_api_version - assert filesys_client.api_version == newest_api_version - assert dir_client.api_version == newest_api_version - assert file_client.api_version == newest_api_version - - service_client2 = DataLakeServiceClient("https://abc.dfs.core.windows.net", credential='fake', api_version="2019-02-02") - filesys_client2 = service_client2.get_file_system_client("filesys") - dir_client2 = DataLakeDirectoryClient("https://abc.dfs.core.windows.net", "filesys", "dir", credential='fake', api_version="2019-02-02") - file_client2 = dir_client2.get_file_client("file") - assert service_client2.api_version == "2019-02-02" - assert filesys_client2.api_version == "2019-02-02" - assert dir_client2.api_version == "2019-02-02" - assert file_client2.api_version == "2019-02-02" - @DataLakePreparer() @recorded_by_proxy def test_storage_account_audience_dir_client(self, **kwargs): @@ -1600,7 +1592,7 @@ def test_bad_audience_dir_client(self, **kwargs): token_credential = self.get_credential(DataLakeServiceClient) directory_client = DataLakeDirectoryClient( self.dsc.url, self.file_system_name, directory_name, - credential=token_credential, audience=f'https://badaudience.blob.core.windows.net/' + credential=token_credential, audience='https://badaudience.blob.core.windows.net/' ) # Will not raise ClientAuthenticationError despite bad audience due to Bearer Challenge diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_directory_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_directory_async.py index 002e453c0e6e..a6bc9d696201 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_directory_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_directory_async.py @@ -3,21 +3,26 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- + import asyncio import time import unittest from datetime import datetime, timedelta import pytest + +from devtools_testutils.aio import recorded_by_proxy_async +from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase +from settings.testcase import DataLakePreparer + from azure.core import MatchConditions from azure.core.exceptions import ( AzureError, - ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceModifiedError, ResourceNotFoundError, - ServiceRequestError + ServiceRequestError, ) from azure.storage.filedatalake import ( AccessControlChangeCounters, @@ -27,14 +32,12 @@ EncryptionScopeOptions, FileSystemSasPermissions, generate_directory_sas, - generate_file_system_sas + generate_file_system_sas, ) -from azure.storage.filedatalake.aio import DataLakeDirectoryClient, DataLakeServiceClient from azure.storage.filedatalake._serialize import _SUPPORTED_API_VERSIONS +from azure.storage.filedatalake.aio import DataLakeDirectoryClient, DataLakeServiceClient + -from devtools_testutils.aio import recorded_by_proxy_async -from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase -from settings.testcase import DataLakePreparer # ------------------------------------------------------------------------------ TEST_DIRECTORY_PREFIX = 'directory' REMOVE_ACL = "mask," + "default:user,default:group," + \ @@ -87,9 +90,6 @@ async def _create_sub_directory_and_files(self, directory_client, num_of_dirs, n for j in range(0, num_of_files_per_dir): await sub_dir.create_file(self.get_resource_name('subfile' + str(j))) - async def _create_file_system(self): - return await self.dsc.create_file_system(self._get_file_system_reference()) - # --Helpers----------------------------------------------------------------- @DataLakePreparer() @@ -624,10 +624,14 @@ async def test_set_access_control_recursive_with_failures(self, **kwargs): directory_name = self._get_directory_reference() directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name) await directory_client.create_directory(owner=test_guid) - await self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir1').create_directory(owner=test_guid, permissions='0777') - await self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir2').create_directory(owner=test_guid, permissions='0777') - await self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir1/file1').create_file(owner=test_guid, permissions='0777') - await self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir2/file2').create_file(owner=test_guid, permissions='0777') + await self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir1').create_directory( + owner=test_guid, permissions='0777') + await self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir2').create_directory( + owner=test_guid, permissions='0777') + await self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir1/file1').create_file( + owner=test_guid, permissions='0777') + await self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir2/file2').create_file( + owner=test_guid, permissions='0777') await directory_client.get_file_client('file3').create_file() # User delegation SAS with provided owner permissions @@ -820,10 +824,14 @@ async def test_update_access_control_recursive_with_failures(self, **kwargs): directory_name = self._get_directory_reference() directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name) await directory_client.create_directory(owner=test_guid) - await self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir1').create_directory(owner=test_guid, permissions='0777') - await self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir2').create_directory(owner=test_guid, permissions='0777') - await self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir1/file1').create_file(owner=test_guid, permissions='0777') - await self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir2/file2').create_file(owner=test_guid, permissions='0777') + await self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir1').create_directory( + owner=test_guid, permissions='0777') + await self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir2').create_directory( + owner=test_guid, permissions='0777') + await self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir1/file1').create_file( + owner=test_guid, permissions='0777') + await self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir2/file2').create_file( + owner=test_guid, permissions='0777') await directory_client.get_file_client('file3').create_file() # User delegation SAS with provided owner permissions @@ -1042,10 +1050,14 @@ async def test_remove_access_control_recursive_with_failures(self, **kwargs): directory_name = self._get_directory_reference() directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name) await directory_client.create_directory(owner=test_guid) - await self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir1').create_directory(owner=test_guid, permissions='0777') - await self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir2').create_directory(owner=test_guid, permissions='0777') - await self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir1/file1').create_file(owner=test_guid, permissions='0777') - await self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir2/file2').create_file(owner=test_guid, permissions='0777') + await self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir1').create_directory( + owner=test_guid, permissions='0777') + await self.dsc.get_directory_client(self.file_system_name, directory_name + '/subdir2').create_directory( + owner=test_guid, permissions='0777') + await self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir1/file1').create_file( + owner=test_guid, permissions='0777') + await self.dsc.get_file_client(self.file_system_name, directory_name + '/subdir2/file2').create_file( + owner=test_guid, permissions='0777') await directory_client.get_file_client('file3').create_file() # User delegation SAS with provided owner permissions @@ -1080,8 +1092,11 @@ async def progress_callback(resp): if resp.batch_failures: failed_entries.append(resp.batch_failures) - summary = await owner_dir_client.remove_access_control_recursive(acl=REMOVE_ACL, progress_hook=progress_callback, - batch_size=2) + summary = await owner_dir_client.remove_access_control_recursive( + acl=REMOVE_ACL, + progress_hook=progress_callback, + batch_size=2 + ) # Assert assert summary.counters.failure_count == 1 @@ -1550,7 +1565,7 @@ async def test_bad_audience_dir_client(self, **kwargs): token_credential = self.get_credential(DataLakeServiceClient, is_async=True) directory_client = DataLakeDirectoryClient( self.dsc.url, self.file_system_name, directory_name, - credential=token_credential, audience=f'https://badaudience.blob.core.windows.net/' + credential=token_credential, audience='https://badaudience.blob.core.windows.net/' ) # Will not raise ClientAuthenticationError despite bad audience due to Bearer Challenge diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file.py b/sdk/storage/azure-storage-file-datalake/tests/test_file.py index 54b2533ee935..cc87bc10a1fb 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file.py @@ -11,6 +11,12 @@ from urllib.parse import quote, urlencode import pytest + +from devtools_testutils import recorded_by_proxy +from devtools_testutils.storage import StorageRecordedTestCase +from settings.testcase import DataLakePreparer +from test_helpers import MockStorageTransport, ProgressTracker + from azure.core import MatchConditions from azure.core.credentials import AzureSasCredential from azure.core.exceptions import ( @@ -18,7 +24,7 @@ HttpResponseError, ResourceExistsError, ResourceModifiedError, - ResourceNotFoundError + ResourceNotFoundError, ) from azure.storage.filedatalake import ( AccountSasPermissions, @@ -33,19 +39,14 @@ generate_account_sas, generate_file_sas, generate_file_system_sas, - ResourceTypes + ResourceTypes, ) -from devtools_testutils import recorded_by_proxy -from devtools_testutils.storage import StorageRecordedTestCase -from settings.testcase import DataLakePreparer -from test_helpers import MockStorageTransport, ProgressTracker # ------------------------------------------------------------------------------ - TEST_DIRECTORY_PREFIX = 'directory' TEST_FILE_PREFIX = 'file' - +TEST_FILE_SYSTEM_PREFIX = 'filesystem' # ------------------------------------------------------------------------------ @@ -54,7 +55,6 @@ def _setUp(self, account_name, account_key): url = self.account_url(account_name, 'dfs') self.dsc = DataLakeServiceClient(url, credential=account_key.secret, logging_enable=True) self.config = self.dsc._config - self.file_system_name = self.get_resource_name('filesystem') if not self.is_playback(): @@ -64,14 +64,11 @@ def _setUp(self, account_name, account_key): except ResourceExistsError: pass - def tearDown(self): - if not self.is_playback(): - try: - self.dsc.delete_file_system(self.file_system_name) - except: - pass - # --Helpers----------------------------------------------------------------- + def _get_file_system_reference(self, prefix=TEST_FILE_SYSTEM_PREFIX): + file_system_name = self.get_resource_name(prefix) + return file_system_name + def _get_directory_reference(self, prefix=TEST_DIRECTORY_PREFIX): directory_name = self.get_resource_name(prefix) return directory_name @@ -1672,7 +1669,7 @@ def test_bad_audience_file_client(self, **kwargs): file_client.file_system_name + '/', '/' + file_client.path_name, credential=token_credential, - audience=f'https://badaudience.blob.core.windows.net/' + audience='https://badaudience.blob.core.windows.net/' ) # Will not raise ClientAuthenticationError despite bad audience due to Bearer Challenge diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py index cd00a5d081d1..1a0d494fb508 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py @@ -12,6 +12,12 @@ from urllib.parse import quote, urlencode import pytest + +from devtools_testutils.aio import recorded_by_proxy_async +from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase +from settings.testcase import DataLakePreparer +from test_helpers_async import AsyncStream, MockStorageTransport, ProgressTracker + from azure.core import MatchConditions from azure.core.credentials import AzureSasCredential from azure.core.exceptions import ( @@ -19,7 +25,7 @@ HttpResponseError, ResourceExistsError, ResourceModifiedError, - ResourceNotFoundError + ResourceNotFoundError, ) from azure.storage.filedatalake import ( AccountSasPermissions, @@ -30,19 +36,20 @@ generate_account_sas, generate_file_sas, generate_file_system_sas, - ResourceTypes + ResourceTypes, +) +from azure.storage.filedatalake.aio import ( + DataLakeDirectoryClient, + DataLakeFileClient, + DataLakeServiceClient, + FileSystemClient, ) -from azure.storage.filedatalake.aio import DataLakeDirectoryClient, DataLakeFileClient, DataLakeServiceClient, FileSystemClient -from devtools_testutils.aio import recorded_by_proxy_async -from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase -from settings.testcase import DataLakePreparer -from test_helpers_async import AsyncStream, MockStorageTransport, ProgressTracker -# ------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ TEST_DIRECTORY_PREFIX = 'directory' TEST_FILE_PREFIX = 'file' - +TEST_FILE_SYSTEM_PREFIX = 'filesystem' # ------------------------------------------------------------------------------ @@ -50,9 +57,7 @@ class TestFileAsync(AsyncStorageRecordedTestCase): async def _setUp(self, account_name, account_key): url = self.account_url(account_name, 'dfs') self.dsc = DataLakeServiceClient(url, credential=account_key.secret) - self.config = self.dsc._config - self.file_system_name = self.get_resource_name('filesystem') if not self.is_playback(): @@ -63,16 +68,11 @@ async def _setUp(self, account_name, account_key): except ResourceExistsError: pass - def tearDown(self): - if not self.is_playback(): - try: - loop = asyncio.get_event_loop() - loop.run_until_complete(self.dsc.delete_file_system(self.file_system_name)) - loop.run_until_complete(self.dsc.__aexit__()) - except: - pass - # --Helpers----------------------------------------------------------------- + def _get_file_system_reference(self, prefix=TEST_FILE_SYSTEM_PREFIX): + file_system_name = self.get_resource_name(prefix) + return file_system_name + def _get_directory_reference(self, prefix=TEST_DIRECTORY_PREFIX): directory_name = self.get_resource_name(prefix) return directory_name @@ -129,7 +129,7 @@ async def test_create_file(self, **kwargs): # Assert assert response is not None - + @DataLakePreparer() @recorded_by_proxy_async async def test_create_file_owner_group_acl(self, **kwargs): @@ -662,7 +662,8 @@ async def test_upload_data_to_existing_file_with_content_settings(self, **kwargs content_language='spanish', content_disposition='inline') - await file_client.upload_data(data, content_settings=content_settings, etag=etag, match_condition=MatchConditions.IfNotModified) + await file_client.upload_data( + data, content_settings=content_settings, etag=etag, match_condition=MatchConditions.IfNotModified) downloaded_data = await (await file_client.download_file()).readall() properties = await file_client.get_file_properties() @@ -692,7 +693,14 @@ async def test_upload_data_to_existing_file_with_permissions_and_umask(self, **k # to override the existing file data = self.get_random_bytes(100) - await file_client.upload_data(data, overwrite=True, permissions='0777', umask="0000", etag=etag, match_condition=MatchConditions.IfNotModified) + await file_client.upload_data( + data, + overwrite=True, + permissions='0777', + umask="0000", + etag=etag, + match_condition=MatchConditions.IfNotModified + ) downloaded_data = await (await file_client.download_file()).readall() prop = await file_client.get_access_control() @@ -803,7 +811,8 @@ async def test_read_file_with_user_delegation_key(self, **kwargs): # Get user delegation key token_credential = self.get_credential(DataLakeServiceClient, is_async=True) - service_client = DataLakeServiceClient(self.account_url(datalake_storage_account_name, 'dfs'), credential=token_credential) + service_client = DataLakeServiceClient( + self.account_url(datalake_storage_account_name, 'dfs'), credential=token_credential) user_delegation_key = await service_client.get_user_delegation_key(datetime.utcnow(), datetime.utcnow() + timedelta(hours=1)) @@ -912,7 +921,12 @@ async def test_account_sas_raises_if_sas_already_in_uri(self, **kwargs): await self._setUp(datalake_storage_account_name, datalake_storage_account_key) with pytest.raises(ValueError): - DataLakeFileClient(self.dsc.url + "?sig=foo", self.file_system_name, "foo", credential=AzureSasCredential("?foo=bar")) + DataLakeFileClient( + self.dsc.url + "?sig=foo", + self.file_system_name, + "foo", + credential=AzureSasCredential("?foo=bar") + ) @pytest.mark.live_test_only @DataLakePreparer() @@ -1178,7 +1192,8 @@ async def test_set_expiry(self, **kwargs): content_language='spanish', content_disposition='inline') expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(hours=1)) - file_client = await directory_client.create_file("newfile", metadata=metadata, content_settings=content_settings) + file_client = await directory_client.create_file( + "newfile", metadata=metadata, content_settings=content_settings) # Act / Assert await file_client.set_file_expiry("Absolute", expires_on=expiry_time) @@ -1267,23 +1282,25 @@ async def test_rename_file_with_file_sas(self, **kwargs): await self._setUp(datalake_storage_account_name, datalake_storage_account_key) # SAS URL is calculated from storage key, so this test runs live only - token = generate_file_sas(self.dsc.account_name, - self.file_system_name, - None, - "oldfile", - datalake_storage_account_key.secret, - permission=FileSasPermissions(read=True, create=True, write=True, delete=True, move=True), - expiry=datetime.utcnow() + timedelta(hours=1), - ) - - new_token = generate_file_sas(self.dsc.account_name, - self.file_system_name, - None, - "newname", - datalake_storage_account_key.secret, - permission=FileSasPermissions(read=True, create=True, write=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1), - ) + token = generate_file_sas( + self.dsc.account_name, + self.file_system_name, + None, + "oldfile", + datalake_storage_account_key.secret, + permission=FileSasPermissions(read=True, create=True, write=True, delete=True, move=True), + expiry=datetime.utcnow() + timedelta(hours=1), + ) + + new_token = generate_file_sas( + self.dsc.account_name, + self.file_system_name, + None, + "newname", + datalake_storage_account_key.secret, + permission=FileSasPermissions(read=True, create=True, write=True, delete=True), + expiry=datetime.utcnow() + timedelta(hours=1), + ) # read the created file which is under root directory file_client = DataLakeFileClient(self.dsc.url, self.file_system_name, "oldfile", credential=token) @@ -1570,7 +1587,7 @@ async def test_bad_audience_file_client(self, **kwargs): file_client.file_system_name + '/', '/' + file_client.path_name, credential=token_credential, - audience=f'https://badaudience.blob.core.windows.net/' + audience='https://badaudience.blob.core.windows.net/' ) # Will not raise ClientAuthenticationError despite bad audience due to Bearer Challenge @@ -1671,7 +1688,8 @@ async def test_download_file_decompress(self, **kwargs): # Arrange await self._setUp(datalake_storage_account_name, datalake_storage_account_key) file_client = await self._create_file_and_return_client() - compressed_data = b'\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xcaH\xcd\xc9\xc9WH+\xca\xcfUH\xaf\xca,\x00\x00\x00\x00\xff\xff\x03\x00d\xaa\x8e\xb5\x0f\x00\x00\x00' + compressed_data = (b'\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xcaH\xcd\xc9\xc9WH+\xca\xcfUH' + b'\xaf\xca,\x00\x00\x00\x00\xff\xff\x03\x00d\xaa\x8e\xb5\x0f\x00\x00\x00') decompressed_data = b"hello from gzip" content_settings = ContentSettings(content_encoding='gzip') @@ -1710,7 +1728,8 @@ async def test_download_file_no_decompress_chunks(self, **kwargs): ) await file_client.create_file() - compressed_data = b'\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xcaH\xcd\xc9\xc9WH+\xca\xcfUH\xaf\xca,\x00\x00\x00\x00\xff\xff\x03\x00d\xaa\x8e\xb5\x0f\x00\x00\x00' + compressed_data = (b'\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xcaH\xcd\xc9\xc9WH+\xca\xcfUH' + b'\xaf\xca,\x00\x00\x00\x00\xff\xff\x03\x00d\xaa\x8e\xb5\x0f\x00\x00\x00') content_settings = ContentSettings(content_encoding='gzip') # Act / Assert @@ -1730,7 +1749,8 @@ async def test_datalake_dynamic_user_delegation_sas(self, **kwargs): datalake_storage_account_name = kwargs.pop("datalake_storage_account_name") token_credential = self.get_credential(DataLakeServiceClient, is_async=True) - dsc = DataLakeServiceClient(self.account_url(datalake_storage_account_name, "dfs"), credential=token_credential) + dsc = DataLakeServiceClient( + self.account_url(datalake_storage_account_name, "dfs"), credential=token_credential) fs_name, file_name = self.get_resource_name('filesystem'), self.get_resource_name('file') fs = await dsc.create_file_system(fs_name) file = await fs.create_file(file_name) @@ -1823,7 +1843,8 @@ async def test_data_lake_tags(self, **kwargs): with pytest.raises(ResourceModifiedError): await file_client.get_tags(if_unmodified_since=early) with pytest.raises(ResourceModifiedError): - await file_client.set_tags(first_tags, etag=first_resp['etag'], match_condition=MatchConditions.IfNotModified) + await file_client.set_tags( + first_tags, etag=first_resp['etag'], match_condition=MatchConditions.IfNotModified) await file_client.set_tags(first_tags, if_modified_since=early) tags = await file_client.get_tags(if_modified_since=early) diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file_system.py b/sdk/storage/azure-storage-file-datalake/tests/test_file_system.py index b82cba4cfe35..5d246b09b0b3 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file_system.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file_system.py @@ -3,14 +3,19 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -import jwt import unittest from datetime import datetime, timedelta from time import sleep +import jwt import pytest + +from devtools_testutils import recorded_by_proxy +from devtools_testutils.storage import StorageRecordedTestCase +from settings.testcase import DataLakePreparer + from azure.core import MatchConditions -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceNotFoundError +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError from azure.storage.filedatalake import ( AccessPolicy, AccountSasPermissions, @@ -25,13 +30,10 @@ generate_file_sas, generate_file_system_sas, PublicAccess, - ResourceTypes + ResourceTypes, ) from azure.storage.filedatalake._models import FileSasPermissions -from devtools_testutils import recorded_by_proxy -from devtools_testutils.storage import StorageRecordedTestCase -from settings.testcase import DataLakePreparer # ------------------------------------------------------------------------------ TEST_FILE_SYSTEM_PREFIX = 'filesystem' @@ -62,7 +64,7 @@ def _get_file_system_reference(self, prefix=TEST_FILE_SYSTEM_PREFIX): def _create_file_system(self, file_system_prefix=TEST_FILE_SYSTEM_PREFIX): try: return self.dsc.create_file_system(self._get_file_system_reference(prefix=file_system_prefix)) - except: + except ResourceExistsError: pass def _is_almost_equal(self, first, second, delta): @@ -74,7 +76,6 @@ def _is_almost_equal(self, first, second, delta): return True return False - # --Test cases for file system --------------------------------------------- @DataLakePreparer() @@ -1145,7 +1146,7 @@ def test_bad_audience_service_client(self, **kwargs): fsc = FileSystemClient( url, file_system_name, credential=token_credential, - audience=f'https://badaudience.blob.core.windows.net/' + audience='https://badaudience.blob.core.windows.net/' ) # Will not raise ClientAuthenticationError despite bad audience due to Bearer Challenge @@ -1212,7 +1213,7 @@ def test_get_user_delegation_sas(self, **kwargs): @pytest.mark.live_test_only @DataLakePreparer() - def test_datalake_cross_tenant_delegation_sas(self, **kwargs): + def test_datalake_cross_tenant_delegation_sas(self, **kwargs): # pylint: disable=too-many-locals datalake_storage_account_name = kwargs.pop("datalake_storage_account_name") token_credential = self.get_credential(DataLakeServiceClient) @@ -1230,7 +1231,7 @@ def test_datalake_cross_tenant_delegation_sas(self, **kwargs): start = datetime.utcnow() expiry = datetime.utcnow() + timedelta(hours=1) token = token_credential.get_token("https://storage.azure.com/.default") - decoded = jwt.decode(token.token, options={"verify_signature": False}) + decoded = jwt.decode(token.token, options={"verify_signature": False}) # pylint: disable=no-member user_delegation_oid = decoded.get("oid") delegated_user_tid = decoded.get("tid") user_delegation_key = dsc.get_user_delegation_key( diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file_system_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_file_system_async.py index cdd419b30989..adbbd77d55ec 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file_system_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file_system_async.py @@ -3,16 +3,20 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -import asyncio -import jwt import unittest import uuid from datetime import datetime, timedelta from time import sleep +import jwt import pytest + +from devtools_testutils.aio import recorded_by_proxy_async +from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase +from settings.testcase import DataLakePreparer + from azure.core import MatchConditions -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceNotFoundError +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError from azure.storage.filedatalake import ( AccessPolicy, AccountSasPermissions, @@ -24,14 +28,16 @@ generate_file_sas, generate_file_system_sas, PublicAccess, - ResourceTypes + ResourceTypes, ) -from azure.storage.filedatalake.aio import DataLakeDirectoryClient, DataLakeFileClient, DataLakeServiceClient, FileSystemClient from azure.storage.filedatalake._models import FileSasPermissions +from azure.storage.filedatalake.aio import ( + DataLakeDirectoryClient, + DataLakeFileClient, + DataLakeServiceClient, + FileSystemClient, +) -from devtools_testutils.aio import recorded_by_proxy_async -from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase -from settings.testcase import DataLakePreparer # ------------------------------------------------------------------------------ TEST_FILE_SYSTEM_PREFIX = 'filesystem' @@ -45,16 +51,6 @@ def _setUp(self, account_name, account_key): self.config = self.dsc._config self.test_file_systems = [] - def tearDown(self): - if not self.is_playback(): - loop = asyncio.get_event_loop() - try: - for file_system in self.test_file_systems: - loop.run_until_complete(self.dsc.delete_file_system(file_system)) - loop.run_until_complete(self.fsc.__aexit__()) - except: - pass - # --Helpers----------------------------------------------------------------- def _get_file_system_reference(self, prefix=TEST_FILE_SYSTEM_PREFIX): file_system_name = self.get_resource_name(prefix) @@ -62,7 +58,12 @@ def _get_file_system_reference(self, prefix=TEST_FILE_SYSTEM_PREFIX): return file_system_name async def _create_file_system(self, file_system_prefix=TEST_FILE_SYSTEM_PREFIX): - return await self.dsc.create_file_system(self._get_file_system_reference(prefix=file_system_prefix)) + try: + return await self.dsc.create_file_create_file_system( + self._get_file_system_reference(prefix=file_system_prefix) + ) + except ResourceExistsError: + pass async def _to_list(self, async_iterator): result = [] @@ -410,7 +411,7 @@ async def test_delete_file_system_with_existing_file_system_async(self, **kwargs self._setUp(datalake_storage_account_name, datalake_storage_account_key) # Arrange - file_system = await self._create_file_system() + file_system = await self._system() # Act deleted = await file_system.delete_file_system() @@ -1174,8 +1175,7 @@ async def test_list_paths_using_file_sys_delegation_sas_async(self, **kwargs): paths = [] async for path in sas_directory_client.get_paths(): paths.append(path) - - assert 0 == 0 + assert paths == [] @DataLakePreparer() @recorded_by_proxy_async @@ -1277,7 +1277,7 @@ async def test_bad_audience_service_client(self, **kwargs): fsc = FileSystemClient( url, file_system_name, credential=token_credential, - audience=f'https://badaudience.blob.core.windows.net/' + audience='https://badaudience.blob.core.windows.net/' ) # Will not raise ClientAuthenticationError despite bad audience due to Bearer Challenge @@ -1344,7 +1344,7 @@ async def test_get_user_delegation_sas(self, **kwargs): @pytest.mark.live_test_only @DataLakePreparer() - async def test_datalake_cross_tenant_delegation_sas(self, **kwargs): + async def test_datalake_cross_tenant_delegation_sas(self, **kwargs): # pylint: disable=too-many-locals datalake_storage_account_name = kwargs.pop("datalake_storage_account_name") token_credential = self.get_credential(DataLakeServiceClient, is_async=True) @@ -1362,7 +1362,7 @@ async def test_datalake_cross_tenant_delegation_sas(self, **kwargs): start = datetime.utcnow() expiry = datetime.utcnow() + timedelta(hours=1) token = await token_credential.get_token("https://storage.azure.com/.default") - decoded = jwt.decode(token.token, options={"verify_signature": False}) + decoded = jwt.decode(token.token, options={"verify_signature": False}) # pylint: disable=no-member user_delegation_oid = decoded.get("oid") delegated_user_tid = decoded.get("tid") user_delegation_key = await dsc.get_user_delegation_key( diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_helpers.py b/sdk/storage/azure-storage-file-datalake/tests/test_helpers.py index 377e1081c9e4..6549cdbca590 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_helpers.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_helpers.py @@ -5,14 +5,15 @@ # -------------------------------------------------------------------------- from typing import Any, Dict, Optional -from typing_extensions import Self from urllib.parse import urlparse -from azure.core.pipeline.transport import HttpTransport, RequestsTransportResponse -from azure.core.rest import HttpRequest from requests import Response +from typing_extensions import Self from urllib3 import HTTPResponse +from azure.core.pipeline.transport import HttpTransport, RequestsTransportResponse # pylint: disable=no-name-in-module +from azure.core.rest import HttpRequest + class ProgressTracker: def __init__(self, total: int, step: int): diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_helpers_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_helpers_async.py index 964a63da4016..ab4e99ea7652 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_helpers_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_helpers_async.py @@ -8,11 +8,12 @@ from typing import Any, Dict, Optional from urllib.parse import urlparse -from azure.core.pipeline.transport import AioHttpTransportResponse, AsyncHttpTransport -from azure.core.rest import HttpRequest from aiohttp import ClientResponse -from aiohttp.streams import StreamReader from aiohttp.client_proto import ResponseHandler +from aiohttp.streams import StreamReader + +from azure.core.pipeline.transport import AioHttpTransportResponse, AsyncHttpTransport # pylint: disable=no-name-in-module +from azure.core.rest import HttpRequest class ProgressTracker: diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_large_file.py b/sdk/storage/azure-storage-file-datalake/tests/test_large_file.py index f93a4c40d67d..92ad41d49281 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_large_file.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_large_file.py @@ -3,19 +3,22 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- + import platform import re import unittest from os import urandom import pytest + +from devtools_testutils.storage import StorageRecordedTestCase +from settings.testcase import DataLakePreparer + from azure.core.exceptions import ResourceExistsError from azure.core.pipeline.policies import HTTPPolicy from azure.storage.blob._shared.base_client import _format_shared_key_credential from azure.storage.filedatalake import DataLakeServiceClient -from devtools_testutils.storage import StorageRecordedTestCase -from settings.testcase import DataLakePreparer # ------------------------------------------------------------------------------ TEST_DIRECTORY_PREFIX = 'directory' @@ -31,10 +34,12 @@ def _setUp(self, account_name, account_key): self.payload_dropping_policy = PayloadDroppingPolicy() credential_policy = _format_shared_key_credential(account_name, account_key.secret) - self.dsc = DataLakeServiceClient(url, - credential=account_key.secret, - logging_enable=True, - _additional_pipeline_policies=[self.payload_dropping_policy, credential_policy]) + self.dsc = DataLakeServiceClient( + url, + credential=account_key.secret, + logging_enable=True, + _additional_pipeline_policies=[self.payload_dropping_policy, credential_policy] + ) self.config = self.dsc._config self.file_system_name = self.get_resource_name('filesystem') @@ -46,15 +51,6 @@ def _setUp(self, account_name, account_key): except ResourceExistsError: pass - def tearDown(self): - if not self.is_playback(): - try: - self.dsc.delete_file_system(self.file_system_name) - except: - pass - - return super(TestLargeFile, self).tearDown() - @pytest.mark.live_test_only @DataLakePreparer() def test_append_large_stream_without_network(self, **kwargs): @@ -80,7 +76,10 @@ def test_append_large_stream_without_network(self, **kwargs): assert self.payload_dropping_policy.append_counter == 1 assert self.payload_dropping_policy.append_sizes[0] == LARGEST_BLOCK_SIZE - @pytest.mark.skipif(platform.python_implementation() == "PyPy", reason="Test failing on Pypy3 Linux, skip to investigate") + @pytest.mark.skipif( + platform.python_implementation() == "PyPy", + reason="Test failing on Pypy3 Linux, skip to investigate" + ) @pytest.mark.live_test_only @DataLakePreparer() def test_upload_large_stream_without_network(self, **kwargs): @@ -146,7 +145,8 @@ def send(self, request): # type: (PipelineRequest) -> PipelineResponse if _is_append_request(request): if request.http_request.body: position = self.append_counter*len(self.dummy_body) - request.http_request.url = re.sub(r'position=\d+', "position=" + str(position), request.http_request.url) + request.http_request.url = re.sub( + r'position=\d+', "position=" + str(position), request.http_request.url) self.append_sizes.append(_get_body_length(request)) replacement = self.dummy_body request.http_request.body = replacement @@ -154,7 +154,8 @@ def send(self, request): # type: (PipelineRequest) -> PipelineResponse self.append_counter = self.append_counter + 1 if _is_flush_request(request): position = self.append_counter * len(self.dummy_body) - request.http_request.url = re.sub(r'position=\d+', "position=" + str(position), request.http_request.url) + request.http_request.url = re.sub( + r'position=\d+', "position=" + str(position), request.http_request.url) return self.next.send(request) diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_large_file_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_large_file_async.py index ed3155307704..e630e4beec1d 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_large_file_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_large_file_async.py @@ -3,7 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -import asyncio + import platform import re import unittest @@ -11,13 +11,15 @@ from os import urandom import pytest + +from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase +from settings.testcase import DataLakePreparer + from azure.core.exceptions import ResourceExistsError from azure.core.pipeline.policies import SansIOHTTPPolicy from azure.storage.blob._shared.base_client import _format_shared_key_credential from azure.storage.filedatalake.aio import DataLakeServiceClient -from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase -from settings.testcase import DataLakePreparer # ------------------------------------------------------------------------------ TEST_DIRECTORY_PREFIX = 'directory' @@ -32,9 +34,11 @@ async def _setUp(self, account_name, account_key): url = self.account_url(account_name, 'dfs') self.payload_dropping_policy = PayloadDroppingPolicy() credential_policy = _format_shared_key_credential(account_name, account_key.secret) - self.dsc = DataLakeServiceClient(url, - credential=account_key.secret, - _additional_pipeline_policies=[self.payload_dropping_policy, credential_policy]) + self.dsc = DataLakeServiceClient( + url, + credential=account_key.secret, + _additional_pipeline_policies=[self.payload_dropping_policy, credential_policy] + ) self.config = self.dsc._config @@ -48,17 +52,6 @@ async def _setUp(self, account_name, account_key): except ResourceExistsError: pass - def tearDown(self): - if not self.is_playback(): - try: - loop = asyncio.get_event_loop() - loop.run_until_complete(self.dsc.delete_file_system(self.file_system_name)) - loop.run_until_complete(self.dsc.__aexit__()) - except: - pass - - return super(TestLargeFileAsync, self).tearDown() - # --Helpers----------------------------------------------------------------- def _get_directory_reference(self, prefix=TEST_DIRECTORY_PREFIX): directory_name = self.get_resource_name(prefix) @@ -90,7 +83,10 @@ async def test_append_large_stream_without_network(self, **kwargs): assert self.payload_dropping_policy.append_counter == 1 assert self.payload_dropping_policy.append_sizes[0] == LARGEST_BLOCK_SIZE - @pytest.mark.skipif(platform.python_implementation() == "PyPy", reason="Test failing on Pypy3 Linux, skip to investigate") + @pytest.mark.skipif( + platform.python_implementation() == "PyPy", + reason="Test failing on Pypy3 Linux, skip to investigate" + ) @pytest.mark.live_test_only @DataLakePreparer() async def test_upload_large_stream_without_network(self, **kwargs): @@ -159,7 +155,8 @@ def on_request(self, request): # type: (PipelineRequest) -> Union[None, Awaitab if _is_append_request(request): if request.http_request.body: position = self.append_counter*len(self.dummy_body) - request.http_request.url = re.sub(r'position=\d+', "position=" + str(position), request.http_request.url) + request.http_request.url = re.sub( + r'position=\d+', "position=" + str(position), request.http_request.url) self.append_sizes.append(_get_body_length(request)) replacement = self.dummy_body request.http_request.body = replacement @@ -167,7 +164,8 @@ def on_request(self, request): # type: (PipelineRequest) -> Union[None, Awaitab self.append_counter = self.append_counter + 1 elif _is_flush_request(request): position = self.append_counter * len(self.dummy_body) - request.http_request.url = re.sub(r'position=\d+', "position=" + str(position), request.http_request.url) + request.http_request.url = re.sub( + r'position=\d+', "position=" + str(position), request.http_request.url) def _is_append_request(request): From 0de5be5582c863405acd2650e02cc5e9fa7cef02 Mon Sep 17 00:00:00 2001 From: Peter Wu Date: Sat, 9 May 2026 11:05:51 -0400 Subject: [PATCH 2/4] line too long test query --- .../tests/test_quick_query.py | 244 ++++++++++-------- 1 file changed, 130 insertions(+), 114 deletions(-) diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_quick_query.py b/sdk/storage/azure-storage-file-datalake/tests/test_quick_query.py index 4d40d5c4cdd3..f78eed5bce58 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_quick_query.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_quick_query.py @@ -21,95 +21,101 @@ # ------------------------------------------------------------------------------ from azure.storage.filedatalake import DataLakeServiceClient -CSV_DATA = b'Service,Package,Version,RepoPath,MissingDocs\r\nApp Configuration,' \ - b'azure-data-appconfiguration,1,appconfiguration,FALSE\r\nEvent Hubs' \ - b'\r\nEvent Hubs - Azure Storage CheckpointStore,' \ - b'azure-messaging-eventhubs-checkpointstore-blob,1.0.1,eventhubs,FALSE\r\nIdentity,azure-identity,' \ - b'1.1.0-beta.1,identity,FALSE\r\nKey Vault - Certificates,azure-security-keyvault-certificates,' \ - b'4.0.0,keyvault,FALSE\r\nKey Vault - Keys,azure-security-keyvault-keys,4.2.0-beta.1,keyvault,' \ - b'FALSE\r\nKey Vault - Secrets,azure-security-keyvault-secrets,4.1.0,keyvault,FALSE\r\n' \ - b'Storage - Blobs,azure-storage-blob,12.4.0,storage,FALSE\r\nStorage - Blobs Batch,' \ - b'azure-storage-blob-batch,12.4.0-beta.1,storage,FALSE\r\nStorage - Blobs Cryptography,' \ - b'azure-storage-blob-cryptography,12.4.0,storage,FALSE\r\nStorage - File Shares,' \ - b'azure-storage-file-share,12.2.0,storage,FALSE\r\nStorage - Queues,' \ - b'azure-storage-queue,12.3.0,storage,FALSE\r\nText Analytics,' \ - b'azure-ai-textanalytics,1.0.0-beta.2,textanalytics,FALSE\r\nTracing,' \ - b'azure-core-tracing-opentelemetry,1.0.0-beta.2,core,FALSE\r\nService,Package,Version,RepoPath,' \ - b'MissingDocs\r\nApp Configuration,azure-data-appconfiguration,1.0.1,appconfiguration,FALSE\r\n' \ - b'Event Hubs,azure-messaging-eventhubs,5.0.1,eventhubs,FALSE\r\n' \ - b'Event Hubs - Azure Storage CheckpointStore,azure-messaging-eventhubs-checkpointstore-blob,' \ - b'1.0.1,eventhubs,FALSE\r\nIdentity,azure-identity,1.1.0-beta.1,identity,FALSE\r\n' \ - b'Key Vault - Certificates,azure-security-keyvault-certificates,4.0.0,keyvault,FALSE\r\n' \ - b'Key Vault - Keys,azure-security-keyvault-keys,4.2.0-beta.1,keyvault,FALSE\r\n' \ - b'Key Vault - Secrets,azure-security-keyvault-secrets,4.1.0,keyvault,FALSE\r\n' \ - b'Storage - Blobs,azure-storage-blob,12.4.0,storage,FALSE\r\n' \ - b'Storage - Blobs Batch,azure-storage-blob-batch,12.4.0-beta.1,storage,FALSE\r\n' \ - b'Storage - Blobs Cryptography,azure-storage-blob-cryptography,12.4.0,storage,FALSE\r\n' \ - b'Storage - File Shares,azure-storage-file-share,12.2.0,storage,FALSE\r\n' \ - b'Storage - Queues,azure-storage-queue,12.3.0,storage,FALSE\r\n' \ - b'Text Analytics,azure-ai-textanalytics,1.0.0-beta.2,textanalytics,FALSE\r\n' \ - b'Tracing,azure-core-tracing-opentelemetry,1.0.0-beta.2,core,FALSE\r\n' \ - b'Service,Package,Version,RepoPath,MissingDocs\r\n' \ - b'App Configuration,azure-data-appconfiguration,1.0.1,appconfiguration,FALSE\r\n' \ - b'Event Hubs,azure-messaging-eventhubs,5.0.1,eventhubs,FALSE\r\n' - -DATALAKE_CSV_DATA = b'DataLakeStorage,Package,Version,RepoPath,MissingDocs\r\nApp Configuration,' \ - b'azure-data-appconfiguration,1,appconfiguration,FALSE\r\nEvent Hubs' \ - b'\r\nEvent Hubs - Azure Storage CheckpointStore,' \ - b'azure-messaging-eventhubs-checkpointstore-blob,1.0.1,eventhubs,FALSE\r\nIdentity,azure-identity,' \ - b'1.1.0-beta.1,identity,FALSE\r\nKey Vault - Certificates,azure-security-keyvault-certificates,' \ - b'4.0.0,keyvault,FALSE\r\nKey Vault - Keys,azure-security-keyvault-keys,4.2.0-beta.1,keyvault,' \ - b'FALSE\r\nKey Vault - Secrets,azure-security-keyvault-secrets,4.1.0,keyvault,FALSE\r\n' \ - b'Storage - Blobs,azure-storage-blob,12.4.0,storage,FALSE\r\nStorage - Blobs Batch,' \ - b'azure-storage-blob-batch,12.4.0-beta.1,storage,FALSE\r\nStorage - Blobs Cryptography,' \ - b'azure-storage-blob-cryptography,12.4.0,storage,FALSE\r\nStorage - File Shares,' \ - b'azure-storage-file-share,12.2.0,storage,FALSE\r\nStorage - Queues,' \ - b'azure-storage-queue,12.3.0,storage,FALSE\r\nText Analytics,' \ - b'azure-ai-textanalytics,1.0.0-beta.2,textanalytics,FALSE\r\nTracing,' \ - b'azure-core-tracing-opentelemetry,1.0.0-beta.2,core,FALSE\r\nService,Package,Version,RepoPath,' \ - b'MissingDocs\r\nApp Configuration,azure-data-appconfiguration,1.0.1,appconfiguration,FALSE\r\n' \ - b'Event Hubs,azure-messaging-eventhubs,5.0.1,eventhubs,FALSE\r\n' \ - b'Event Hubs - Azure Storage CheckpointStore,azure-messaging-eventhubs-checkpointstore-blob,' \ - b'1.0.1,eventhubs,FALSE\r\nIdentity,azure-identity,1.1.0-beta.1,identity,FALSE\r\n' \ - b'Key Vault - Certificates,azure-security-keyvault-certificates,4.0.0,keyvault,FALSE\r\n' \ - b'Key Vault - Keys,azure-security-keyvault-keys,4.2.0-beta.1,keyvault,FALSE\r\n' \ - b'Key Vault - Secrets,azure-security-keyvault-secrets,4.1.0,keyvault,FALSE\r\n' \ - b'Storage - Blobs,azure-storage-blob,12.4.0,storage,FALSE\r\n' \ - b'Storage - Blobs Batch,azure-storage-blob-batch,12.4.0-beta.1,storage,FALSE\r\n' \ - b'Storage - Blobs Cryptography,azure-storage-blob-cryptography,12.4.0,storage,FALSE\r\n' \ - b'Storage - File Shares,azure-storage-file-share,12.2.0,storage,FALSE\r\n' \ - b'Storage - Queues,azure-storage-queue,12.3.0,storage,FALSE\r\n' \ - b'Text Analytics,azure-ai-textanalytics,1.0.0-beta.2,textanalytics,FALSE\r\n' \ - b'Tracing,azure-core-tracing-opentelemetry,1.0.0-beta.2,core,FALSE\r\n' \ - b'Service,Package,Version,RepoPath,MissingDocs\r\n' \ - b'App Configuration,azure-data-appconfiguration,1.0.1,appconfiguration,FALSE\r\n' \ - b'Event Hubs,azure-messaging-eventhubs,5.0.1,eventhubs,FALSE\r\n' - -CONVERTED_CSV_DATA = b"Service;Package;Version;RepoPath;MissingDocs.App Configuration;azure-data-appconfiguration;" \ - b"1;appconfiguration;FALSE.Event Hubs.Event Hubs - Azure Storage CheckpointStore;azure-messaging-eventhubs-checkpointstore-blob;" \ - b"'1.0.1';eventhubs;FALSE.Identity;azure-identity;'1.1.0-beta.1';identity;FALSE.Key Vault - Certificates;" \ - b"azure-security-keyvault-certificates;'4.0.0';keyvault;FALSE.Key Vault - Keys;azure-security-keyvault-keys;" \ - b"'4.2.0-beta.1';keyvault;FALSE.Key Vault - Secrets;azure-security-keyvault-secrets;'4.1.0';keyvault;" \ - b"FALSE.Storage - Blobs;azure-storage-blob;'12.4.0';storage;FALSE.Storage - Blobs Batch;" \ - b"azure-storage-blob-batch;'12.4.0-beta.1';storage;FALSE.Storage - Blobs Cryptography;" \ - b"azure-storage-blob-cryptography;'12.4.0';storage;FALSE.Storage - File Shares;azure-storage-file-share;" \ - b"'12.2.0';storage;FALSE.Storage - Queues;azure-storage-queue;'12.3.0';storage;FALSE.Text Analytics;" \ - b"azure-ai-textanalytics;'1.0.0-beta.2';textanalytics;FALSE.Tracing;azure-core-tracing-opentelemetry;" \ - b"'1.0.0-beta.2';core;FALSE.Service;Package;Version;RepoPath;MissingDocs.App Configuration;" \ - b"azure-data-appconfiguration;'1.0.1';appconfiguration;FALSE.Event Hubs;azure-messaging-eventhubs;" \ - b"'5.0.1';eventhubs;FALSE.Event Hubs - Azure Storage CheckpointStore;azure-messaging-eventhubs-checkpointstore-blob;" \ - b"'1.0.1';eventhubs;FALSE.Identity;azure-identity;'1.1.0-beta.1';identity;" \ - b"FALSE.Key Vault - Certificates;azure-security-keyvault-certificates;'4.0.0';" \ - b"keyvault;FALSE.Key Vault - Keys;azure-security-keyvault-keys;'4.2.0-beta.1';keyvault;FALSE.Key Vault - Secrets;" \ - b"azure-security-keyvault-secrets;'4.1.0';keyvault;FALSE.Storage - Blobs;azure-storage-blob;'12.4.0';" \ - b"storage;FALSE.Storage - Blobs Batch;azure-storage-blob-batch;'12.4.0-beta.1';storage;FALSE.Storage - Blobs Cryptography;" \ - b"azure-storage-blob-cryptography;'12.4.0';storage;FALSE.Storage - File Shares;azure-storage-file-share;" \ - b"'12.2.0';storage;FALSE.Storage - Queues;azure-storage-queue;'12.3.0';storage;FALSE.Text Analytics;" \ - b"azure-ai-textanalytics;'1.0.0-beta.2';textanalytics;FALSE.Tracing;azure-core-tracing-opentelemetry;" \ - b"'1.0.0-beta.2';core;FALSE.Service;Package;Version;RepoPath;MissingDocs.App Configuration;" \ - b"azure-data-appconfiguration;'1.0.1';appconfiguration;FALSE.Event Hubs;azure-messaging-eventhubs;" \ - b"'5.0.1';eventhubs;FALSE." - +CSV_DATA = ( + b'Service,Package,Version,RepoPath,MissingDocs\r\nApp Configuration,' + b'azure-data-appconfiguration,1,appconfiguration,FALSE\r\nEvent Hubs' + b'\r\nEvent Hubs - Azure Storage CheckpointStore,' + b'azure-messaging-eventhubs-checkpointstore-blob,1.0.1,eventhubs,FALSE\r\nIdentity,azure-identity,' + b'1.1.0-beta.1,identity,FALSE\r\nKey Vault - Certificates,azure-security-keyvault-certificates,' + b'4.0.0,keyvault,FALSE\r\nKey Vault - Keys,azure-security-keyvault-keys,4.2.0-beta.1,keyvault,' + b'FALSE\r\nKey Vault - Secrets,azure-security-keyvault-secrets,4.1.0,keyvault,FALSE\r\n' + b'Storage - Blobs,azure-storage-blob,12.4.0,storage,FALSE\r\nStorage - Blobs Batch,' + b'azure-storage-blob-batch,12.4.0-beta.1,storage,FALSE\r\nStorage - Blobs Cryptography,' + b'azure-storage-blob-cryptography,12.4.0,storage,FALSE\r\nStorage - File Shares,' + b'azure-storage-file-share,12.2.0,storage,FALSE\r\nStorage - Queues,' + b'azure-storage-queue,12.3.0,storage,FALSE\r\nText Analytics,' + b'azure-ai-textanalytics,1.0.0-beta.2,textanalytics,FALSE\r\nTracing,' + b'azure-core-tracing-opentelemetry,1.0.0-beta.2,core,FALSE\r\nService,Package,Version,RepoPath,' + b'MissingDocs\r\nApp Configuration,azure-data-appconfiguration,1.0.1,appconfiguration,FALSE\r\n' + b'Event Hubs,azure-messaging-eventhubs,5.0.1,eventhubs,FALSE\r\n' + b'Event Hubs - Azure Storage CheckpointStore,azure-messaging-eventhubs-checkpointstore-blob,' + b'1.0.1,eventhubs,FALSE\r\nIdentity,azure-identity,1.1.0-beta.1,identity,FALSE\r\n' + b'Key Vault - Certificates,azure-security-keyvault-certificates,4.0.0,keyvault,FALSE\r\n' + b'Key Vault - Keys,azure-security-keyvault-keys,4.2.0-beta.1,keyvault,FALSE\r\n' + b'Key Vault - Secrets,azure-security-keyvault-secrets,4.1.0,keyvault,FALSE\r\n' + b'Storage - Blobs,azure-storage-blob,12.4.0,storage,FALSE\r\n' + b'Storage - Blobs Batch,azure-storage-blob-batch,12.4.0-beta.1,storage,FALSE\r\n' + b'Storage - Blobs Cryptography,azure-storage-blob-cryptography,12.4.0,storage,FALSE\r\n' + b'Storage - File Shares,azure-storage-file-share,12.2.0,storage,FALSE\r\n' + b'Storage - Queues,azure-storage-queue,12.3.0,storage,FALSE\r\n' + b'Text Analytics,azure-ai-textanalytics,1.0.0-beta.2,textanalytics,FALSE\r\n' + b'Tracing,azure-core-tracing-opentelemetry,1.0.0-beta.2,core,FALSE\r\n' + b'Service,Package,Version,RepoPath,MissingDocs\r\n' + b'App Configuration,azure-data-appconfiguration,1.0.1,appconfiguration,FALSE\r\n' + b'Event Hubs,azure-messaging-eventhubs,5.0.1,eventhubs,FALSE\r\n' +) +DATALAKE_CSV_DATA = ( + b'DataLakeStorage,Package,Version,RepoPath,MissingDocs\r\nApp Configuration,' + b'azure-data-appconfiguration,1,appconfiguration,FALSE\r\nEvent Hubs' + b'\r\nEvent Hubs - Azure Storage CheckpointStore,' + b'azure-messaging-eventhubs-checkpointstore-blob,1.0.1,eventhubs,FALSE\r\nIdentity,azure-identity,' + b'1.1.0-beta.1,identity,FALSE\r\nKey Vault - Certificates,azure-security-keyvault-certificates,' + b'4.0.0,keyvault,FALSE\r\nKey Vault - Keys,azure-security-keyvault-keys,4.2.0-beta.1,keyvault,' + b'FALSE\r\nKey Vault - Secrets,azure-security-keyvault-secrets,4.1.0,keyvault,FALSE\r\n' + b'Storage - Blobs,azure-storage-blob,12.4.0,storage,FALSE\r\nStorage - Blobs Batch,' + b'azure-storage-blob-batch,12.4.0-beta.1,storage,FALSE\r\nStorage - Blobs Cryptography,' + b'azure-storage-blob-cryptography,12.4.0,storage,FALSE\r\nStorage - File Shares,' + b'azure-storage-file-share,12.2.0,storage,FALSE\r\nStorage - Queues,' + b'azure-storage-queue,12.3.0,storage,FALSE\r\nText Analytics,' + b'azure-ai-textanalytics,1.0.0-beta.2,textanalytics,FALSE\r\nTracing,' + b'azure-core-tracing-opentelemetry,1.0.0-beta.2,core,FALSE\r\nService,Package,Version,RepoPath,' + b'MissingDocs\r\nApp Configuration,azure-data-appconfiguration,1.0.1,appconfiguration,FALSE\r\n' + b'Event Hubs,azure-messaging-eventhubs,5.0.1,eventhubs,FALSE\r\n' + b'Event Hubs - Azure Storage CheckpointStore,azure-messaging-eventhubs-checkpointstore-blob,' + b'1.0.1,eventhubs,FALSE\r\nIdentity,azure-identity,1.1.0-beta.1,identity,FALSE\r\n' + b'Key Vault - Certificates,azure-security-keyvault-certificates,4.0.0,keyvault,FALSE\r\n' + b'Key Vault - Keys,azure-security-keyvault-keys,4.2.0-beta.1,keyvault,FALSE\r\n' + b'Key Vault - Secrets,azure-security-keyvault-secrets,4.1.0,keyvault,FALSE\r\n' + b'Storage - Blobs,azure-storage-blob,12.4.0,storage,FALSE\r\n' + b'Storage - Blobs Batch,azure-storage-blob-batch,12.4.0-beta.1,storage,FALSE\r\n' + b'Storage - Blobs Cryptography,azure-storage-blob-cryptography,12.4.0,storage,FALSE\r\n' + b'Storage - File Shares,azure-storage-file-share,12.2.0,storage,FALSE\r\n' + b'Storage - Queues,azure-storage-queue,12.3.0,storage,FALSE\r\n' + b'Text Analytics,azure-ai-textanalytics,1.0.0-beta.2,textanalytics,FALSE\r\n' + b'Tracing,azure-core-tracing-opentelemetry,1.0.0-beta.2,core,FALSE\r\n' + b'Service,Package,Version,RepoPath,MissingDocs\r\n' + b'App Configuration,azure-data-appconfiguration,1.0.1,appconfiguration,FALSE\r\n' + b'Event Hubs,azure-messaging-eventhubs,5.0.1,eventhubs,FALSE\r\n' +) +CONVERTED_CSV_DATA = ( + b"Service;Package;Version;RepoPath;MissingDocs.App Configuration;azure-data-appconfiguration;" + b"1;appconfiguration;FALSE.Event Hubs.Event Hubs - Azure Storage CheckpointStore;" + b"azure-messaging-eventhubs-checkpointstore-blob;" + b"'1.0.1';eventhubs;FALSE.Identity;azure-identity;'1.1.0-beta.1';identity;FALSE.Key Vault - Certificates;" + b"azure-security-keyvault-certificates;'4.0.0';keyvault;FALSE.Key Vault - Keys;azure-security-keyvault-keys;" + b"'4.2.0-beta.1';keyvault;FALSE.Key Vault - Secrets;azure-security-keyvault-secrets;'4.1.0';keyvault;" + b"FALSE.Storage - Blobs;azure-storage-blob;'12.4.0';storage;FALSE.Storage - Blobs Batch;" + b"azure-storage-blob-batch;'12.4.0-beta.1';storage;FALSE.Storage - Blobs Cryptography;" + b"azure-storage-blob-cryptography;'12.4.0';storage;FALSE.Storage - File Shares;azure-storage-file-share;" + b"'12.2.0';storage;FALSE.Storage - Queues;azure-storage-queue;'12.3.0';storage;FALSE.Text Analytics;" + b"azure-ai-textanalytics;'1.0.0-beta.2';textanalytics;FALSE.Tracing;azure-core-tracing-opentelemetry;" + b"'1.0.0-beta.2';core;FALSE.Service;Package;Version;RepoPath;MissingDocs.App Configuration;" + b"azure-data-appconfiguration;'1.0.1';appconfiguration;FALSE.Event Hubs;azure-messaging-eventhubs;" + b"'5.0.1';eventhubs;FALSE.Event Hubs - Azure Storage CheckpointStore;" + b"azure-messaging-eventhubs-checkpointstore-blob;" + b"'1.0.1';eventhubs;FALSE.Identity;azure-identity;'1.1.0-beta.1';identity;" + b"FALSE.Key Vault - Certificates;azure-security-keyvault-certificates;'4.0.0';" + b"keyvault;FALSE.Key Vault - Keys;azure-security-keyvault-keys;'4.2.0-beta.1';keyvault;FALSE.Key Vault - Secrets;" + b"azure-security-keyvault-secrets;'4.1.0';keyvault;FALSE.Storage - Blobs;azure-storage-blob;'12.4.0';" + b"storage;FALSE.Storage - Blobs Batch;azure-storage-blob-batch;'12.4.0-beta.1';storage;" + b"FALSE.Storage - Blobs Cryptography;" + b"azure-storage-blob-cryptography;'12.4.0';storage;FALSE.Storage - File Shares;azure-storage-file-share;" + b"'12.2.0';storage;FALSE.Storage - Queues;azure-storage-queue;'12.3.0';storage;FALSE.Text Analytics;" + b"azure-ai-textanalytics;'1.0.0-beta.2';textanalytics;FALSE.Tracing;azure-core-tracing-opentelemetry;" + b"'1.0.0-beta.2';core;FALSE.Service;Package;Version;RepoPath;MissingDocs.App Configuration;" + b"azure-data-appconfiguration;'1.0.1';appconfiguration;FALSE.Event Hubs;azure-messaging-eventhubs;" + b"'5.0.1';eventhubs;FALSE." +) # ------------------------------------------------------------------------------ @@ -444,11 +450,13 @@ def test_quick_query_readall_with_fatal_error_handler(self, **kwargs): # Arrange data1 = b'{name: owner}' data2 = b'{name2: owner2}' - data3 = b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' \ - b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' \ - b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' \ - b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' \ - b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' + data3 = ( + b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' + b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' + b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' + b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' + b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' + ) data = data1 + b'\n' + data2 + b'\n' + data1 # upload the json file @@ -489,11 +497,13 @@ def test_quick_query_iter_records_with_fatal_error_handler(self, **kwargs): # Arrange data1 = b'{name: owner}' data2 = b'{name2: owner2}' - data3 = b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' \ - b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' \ - b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' \ - b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' \ - b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' + data3 = ( + b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' + b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' + b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' + b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' + b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' + ) data = data1 + b'\n' + data2 + b'\n' + data1 # upload the json file @@ -536,11 +546,13 @@ def test_quick_query_readall_with_fatal_error_handler_raise(self, **kwargs): # Arrange data1 = b'{name: owner}' data2 = b'{name2: owner2}' - data3 = b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' \ - b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' \ - b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' \ - b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' \ - b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' + data3 = ( + b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' + b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' + b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' + b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' + b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' + ) data = data1 + b'\n' + data2 + b'\n' + data1 # upload the json file @@ -578,11 +590,13 @@ def test_quick_query_iter_records_with_fatal_error_handler_raise(self, **kwargs) # Arrange data1 = b'{name: owner}' data2 = b'{name2: owner2}' - data3 = b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' \ - b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' \ - b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' \ - b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' \ - b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' + data3 = ( + b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' + b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' + b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' + b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' + b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' + ) data = data1 + b'\n' + data2 + b'\n' + data1 # upload the json file @@ -652,11 +666,13 @@ def test_quick_query_iter_records_with_fatal_error_ignore(self, **kwargs): # Arrange data1 = b'{name: owner}' data2 = b'{name2: owner2}' - data3 = b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' \ - b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' \ - b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' \ - b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' \ - b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' + data3 = ( + b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' + b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' + b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' + b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' + b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' + ) data = data1 + b'\n' + data2 + b'\n' + data1 # upload the json file From 86def6751690d78edcf303ae47a776484ab06b58 Mon Sep 17 00:00:00 2001 From: Peter Wu Date: Sun, 10 May 2026 04:46:32 -0400 Subject: [PATCH 3/4] GG --- .../tests/test_datalake_service_client.py | 16 ++-- .../test_datalake_service_client_async.py | 33 ++++--- .../tests/test_file.py | 47 ++++++++-- .../tests/test_file_system.py | 5 +- .../tests/test_file_system_async.py | 11 ++- .../tests/test_quick_query.py | 86 +++++++++---------- 6 files changed, 113 insertions(+), 85 deletions(-) diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_datalake_service_client.py b/sdk/storage/azure-storage-file-datalake/tests/test_datalake_service_client.py index f21f57090b07..653312d37131 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_datalake_service_client.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_datalake_service_client.py @@ -101,9 +101,7 @@ def _assert_cors_equal(self, cors1, cors2): assert len(cors1) == len(cors2) - for i in range(0, len(cors1)): - rule1 = cors1[i] - rule2 = cors2[i] + for rule1, rule2 in zip(cors1, cors2): assert len(rule1.allowed_origins) == len(rule2.allowed_origins) assert len(rule1.allowed_methods) == len(rule2.allowed_methods) assert rule1.max_age_in_seconds == rule2.max_age_in_seconds @@ -443,7 +441,7 @@ def test_azure_named_key_credential_access(self, **kwargs): assert props is not None @DataLakePreparer() - def test_datalake_clients_properly_close(self, **kwargs): + def test_datalake_clients_properly_close(self): account_name = "adlsstorage" # secret attribute necessary for credential parameter because of hidden environment variables from loader account_key = NamedTuple("StorageAccountKey", [("secret", str)])("adlskey") @@ -465,13 +463,13 @@ def test_datalake_clients_properly_close(self, **kwargs): # Act with self.dsc as dsc: - pass + pass # pylint: disable=unnecessary-pass with file_system_client as fsc: - pass + pass # pylint: disable=unnecessary-pass with dir_client as dc: - pass + pass # pylint: disable=unnecessary-pass with file_client as fc: - pass + pass # pylint: disable=unnecessary-pass # Assert self.dsc._blob_service_client.__exit__.assert_called_once() @@ -522,7 +520,7 @@ def test_bad_audience_service_client(self, **kwargs): dsc = DataLakeServiceClient( self.account_url(datalake_storage_account_name, "blob"), credential=token_credential, - audience=f'https://badaudience.blob.core.windows.net/' + audience='https://badaudience.blob.core.windows.net/' ) # Will not raise ClientAuthenticationError despite bad audience due to Bearer Challenge diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_datalake_service_client_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_datalake_service_client_async.py index 3abc85b49caa..4e7942402a87 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_datalake_service_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_datalake_service_client_async.py @@ -106,9 +106,7 @@ def _assert_cors_equal(self, cors1, cors2): assert len(cors1) == len(cors2) - for i in range(0, len(cors1)): - rule1 = cors1[i] - rule2 = cors2[i] + for rule1, rule2 in zip(cors1, cors2): assert len(rule1.allowed_origins) == len(rule2.allowed_origins) assert len(rule1.allowed_methods) == len(rule2.allowed_methods) assert rule1.max_age_in_seconds == rule2.max_age_in_seconds @@ -219,7 +217,10 @@ async def test_set_delete_retention_policy_edge_cases(self, **kwargs): # Assert received_props = await self.dsc.get_service_properties() - self._assert_delete_retention_policy_not_equal(received_props['delete_retention_policy'], delete_retention_policy) + self._assert_delete_retention_policy_not_equal( + received_props['delete_retention_policy'], + delete_retention_policy + ) # Should not work with 366 days delete_retention_policy = RetentionPolicy(enabled=True, days=366) @@ -229,7 +230,10 @@ async def test_set_delete_retention_policy_edge_cases(self, **kwargs): # Assert received_props = await self.dsc.get_service_properties() - self._assert_delete_retention_policy_not_equal(received_props['delete_retention_policy'], delete_retention_policy) + self._assert_delete_retention_policy_not_equal( + received_props['delete_retention_policy'], + delete_retention_policy + ) @DataLakePreparer() @recorded_by_proxy_async @@ -316,7 +320,12 @@ async def test_set_logging(self, **kwargs): datalake_storage_account_key = kwargs.pop("datalake_storage_account_key") self._setup(datalake_storage_account_name, datalake_storage_account_key) - logging = AnalyticsLogging(read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5)) + logging = AnalyticsLogging( + read=True, + write=True, + delete=True, + retention_policy=RetentionPolicy(enabled=True, days=5) + ) # Act await self.dsc.set_service_properties(analytics_logging=logging) @@ -445,7 +454,7 @@ async def test_azure_named_key_credential_access(self, **kwargs): @pytest.mark.skipif(sys.version_info < (3, 8), reason="AsyncMock not introduced until 3.8") @DataLakePreparer() - async def test_datalake_clients_properly_close(self, **kwargs): + async def test_datalake_clients_properly_close(self): account_name = "adlsstorage" # secret attribute necessary for credential parameter because of hidden environment variables from loader account_key = NamedTuple("StorageAccountKey", [("secret", str)])("adlskey") @@ -467,13 +476,13 @@ async def test_datalake_clients_properly_close(self, **kwargs): # Act async with self.dsc as dsc: - pass + pass # pylint: disable=unnecessary-pass async with file_system_client as fsc: - pass + pass # pylint: disable=unnecessary-pass async with dir_client as dc: - pass + pass # pylint: disable=unnecessary-pass async with file_client as fc: - pass + pass # pylint: disable=unnecessary-pass # Assert self.dsc._blob_service_client.__aexit__.assert_called_once() @@ -524,7 +533,7 @@ async def test_bad_audience_service_client(self, **kwargs): dsc = DataLakeServiceClient( self.account_url(datalake_storage_account_name, "blob"), credential=token_credential, - audience=f'https://badaudience.blob.core.windows.net/' + audience='https://badaudience.blob.core.windows.net/' ) # Will not raise ClientAuthenticationError despite bad audience due to Bearer Challenge diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file.py b/sdk/storage/azure-storage-file-datalake/tests/test_file.py index cc87bc10a1fb..8aad8143c6aa 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file.py @@ -653,7 +653,12 @@ def test_upload_data_to_existing_file_with_content_settings(self, **kwargs): content_language='spanish', content_disposition='inline') - file_client.upload_data(data, content_settings=content_settings, etag=etag, match_condition=MatchConditions.IfNotModified) + file_client.upload_data( + data, + content_settings=content_settings, + etag=etag, + match_condition=MatchConditions.IfNotModified + ) downloaded_data = file_client.download_file().readall() properties = file_client.get_file_properties() @@ -681,7 +686,14 @@ def test_upload_data_to_existing_file_with_permission_and_umask(self, **kwargs): # to override the existing file data = self.get_random_bytes(100) - file_client.upload_data(data, overwrite=True, permissions='0777', umask="0000", etag=etag, match_condition=MatchConditions.IfNotModified) + file_client.upload_data( + data, + overwrite=True, + permissions='0777', + umask="0000", + etag=etag, + match_condition=MatchConditions.IfNotModified + ) downloaded_data = file_client.download_file().readall() prop = file_client.get_access_control() @@ -765,7 +777,11 @@ def test_read_file_with_user_delegation_key(self, **kwargs): # Get user delegation key token_credential = self.get_credential(DataLakeServiceClient) - service_client = DataLakeServiceClient(self.account_url(datalake_storage_account_name, 'dfs'), credential=token_credential, logging_enable=True) + service_client = DataLakeServiceClient( + self.account_url(datalake_storage_account_name, 'dfs'), + credential=token_credential, + logging_enable=True + ) user_delegation_key = service_client.get_user_delegation_key(datetime.utcnow(), datetime.utcnow() + timedelta(hours=1)) @@ -804,7 +820,10 @@ def test_set_acl_with_user_delegation_key(self, **kwargs): # Get user delegation key token_credential = self.get_credential(DataLakeServiceClient) - service_client = DataLakeServiceClient(self.account_url(datalake_storage_account_name, 'dfs'), credential=token_credential) + service_client = DataLakeServiceClient( + self.account_url(datalake_storage_account_name, 'dfs'), + credential=token_credential + ) user_delegation_key = service_client.get_user_delegation_key(datetime.utcnow(), datetime.utcnow() + timedelta(hours=1)) @@ -850,7 +869,10 @@ def test_preauthorize_user_with_user_delegation_key(self, **kwargs): # Get user delegation key token_credential = self.get_credential(DataLakeServiceClient) - service_client = DataLakeServiceClient(self.account_url(datalake_storage_account_name, 'dfs'), credential=token_credential) + service_client = DataLakeServiceClient( + self.account_url(datalake_storage_account_name, 'dfs'), + credential=token_credential + ) user_delegation_key = service_client.get_user_delegation_key(datetime.utcnow(), datetime.utcnow() + timedelta(hours=1)) @@ -962,7 +984,12 @@ def test_account_sas_raises_if_sas_already_in_uri(self, **kwargs): self._setUp(datalake_storage_account_name, datalake_storage_account_key) with pytest.raises(ValueError): - DataLakeFileClient(self.dsc.url + "?sig=foo", self.file_system_name, "foo", credential=AzureSasCredential("?foo=bar")) + DataLakeFileClient( + self.dsc.url + "?sig=foo", + self.file_system_name, + "foo", + credential=AzureSasCredential("?foo=bar") + ) @pytest.mark.live_test_only @DataLakePreparer() @@ -1033,7 +1060,7 @@ def test_delete_file_oauth(self, **kwargs): # Arrange self._setUp(datalake_storage_account_name, datalake_storage_account_key) - + file_name = self._get_file_reference() token_credential = self.get_credential(DataLakeServiceClient) @@ -1766,7 +1793,8 @@ def test_download_file_decompress(self, **kwargs): # Arrange self._setUp(datalake_storage_account_name, datalake_storage_account_key) file_client = self._create_file_and_return_client() - compressed_data = b'\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xcaH\xcd\xc9\xc9WH+\xca\xcfUH\xaf\xca,\x00\x00\x00\x00\xff\xff\x03\x00d\xaa\x8e\xb5\x0f\x00\x00\x00' + compressed_data = (b'\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xcaH\xcd\xc9\xc9WH+\xca\xcfUH' + b'\xaf\xca,\x00\x00\x00\x00\xff\xff\x03\x00d\xaa\x8e\xb5\x0f\x00\x00\x00') decompressed_data = b"hello from gzip" content_settings = ContentSettings(content_encoding='gzip') @@ -1803,7 +1831,8 @@ def test_download_file_no_decompress_chunks(self, **kwargs): ) file_client.create_file() - compressed_data = b'\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xcaH\xcd\xc9\xc9WH+\xca\xcfUH\xaf\xca,\x00\x00\x00\x00\xff\xff\x03\x00d\xaa\x8e\xb5\x0f\x00\x00\x00' + compressed_data = (b'\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xcaH\xcd\xc9\xc9WH+\xca\xcfUH' + b'\xaf\xca,\x00\x00\x00\x00\xff\xff\x03\x00d\xaa\x8e\xb5\x0f\x00\x00\x00') content_settings = ContentSettings(content_encoding='gzip') # Act / Assert diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file_system.py b/sdk/storage/azure-storage-file-datalake/tests/test_file_system.py index 5d246b09b0b3..31f344ca02bd 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file_system.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file_system.py @@ -62,10 +62,11 @@ def _get_file_system_reference(self, prefix=TEST_FILE_SYSTEM_PREFIX): return file_system_name def _create_file_system(self, file_system_prefix=TEST_FILE_SYSTEM_PREFIX): + file_system_name = self._get_file_system_reference(prefix=file_system_prefix) try: - return self.dsc.create_file_system(self._get_file_system_reference(prefix=file_system_prefix)) + return self.dsc.create_file_system(file_system_name) except ResourceExistsError: - pass + return self.dsc.get_file_system_client(file_system_name) def _is_almost_equal(self, first, second, delta): if first == second: diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file_system_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_file_system_async.py index adbbd77d55ec..2ba8e97baa2e 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file_system_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file_system_async.py @@ -58,12 +58,11 @@ def _get_file_system_reference(self, prefix=TEST_FILE_SYSTEM_PREFIX): return file_system_name async def _create_file_system(self, file_system_prefix=TEST_FILE_SYSTEM_PREFIX): + file_system_name = self._get_file_system_reference(prefix=file_system_prefix) try: - return await self.dsc.create_file_create_file_system( - self._get_file_system_reference(prefix=file_system_prefix) - ) + return await self.dsc.create_file_system(file_system_name) except ResourceExistsError: - pass + return self.dsc.get_file_system_client(file_system_name) async def _to_list(self, async_iterator): result = [] @@ -411,7 +410,7 @@ async def test_delete_file_system_with_existing_file_system_async(self, **kwargs self._setUp(datalake_storage_account_name, datalake_storage_account_key) # Arrange - file_system = await self._system() + file_system = await self._create_file_system() # Act deleted = await file_system.delete_file_system() @@ -1175,7 +1174,7 @@ async def test_list_paths_using_file_sys_delegation_sas_async(self, **kwargs): paths = [] async for path in sas_directory_client.get_paths(): paths.append(path) - assert paths == [] + assert not paths @DataLakePreparer() @recorded_by_proxy_async diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_quick_query.py b/sdk/storage/azure-storage-file-datalake/tests/test_quick_query.py index f78eed5bce58..a62d1792e23e 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_quick_query.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_quick_query.py @@ -7,19 +7,21 @@ import os import pytest + +from devtools_testutils import recorded_by_proxy +from devtools_testutils.storage import StorageRecordedTestCase +from settings.testcase import DataLakePreparer + from azure.storage.filedatalake import ( ArrowDialect, ArrowType, DelimitedJsonDialect, DelimitedTextDialect, - QuickQueryDialect + QuickQueryDialect, ) - -from devtools_testutils import recorded_by_proxy -from devtools_testutils.storage import StorageRecordedTestCase -from settings.testcase import DataLakePreparer -# ------------------------------------------------------------------------------ from azure.storage.filedatalake import DataLakeServiceClient +# ------------------------------------------------------------------------------ + CSV_DATA = ( b'Service,Package,Version,RepoPath,MissingDocs\r\nApp Configuration,' @@ -132,15 +134,6 @@ def _setUp(self, account_name, account_key): except: pass - def tearDown(self): - if not self.is_playback(): - try: - self.dsc.delete_file_system(self.filesystem_name) - except: - pass - - return super(TestStorageQuickQuery, self).tearDown() - # --Helpers----------------------------------------------------------------- def _get_file_reference(self): @@ -172,7 +165,7 @@ def on_error(error): assert len(errors) == 0 assert len(reader) == len(CSV_DATA) assert len(reader) == reader._blob_query_reader._bytes_processed - assert data, CSV_DATA.replace(b'\r\n' == b'\n') + assert data == CSV_DATA.replace(b'\r\n', b'\n') @DataLakePreparer() @recorded_by_proxy @@ -226,7 +219,7 @@ def test_quick_query_iter_records(self, **kwargs): assert len(reader) == len(CSV_DATA) assert len(reader) == reader._blob_query_reader._bytes_processed - assert data, CSV_DATA.replace(b'\r\n' == b'') + assert data == CSV_DATA.replace(b'\r\n', b'') @DataLakePreparer() @recorded_by_proxy @@ -252,7 +245,7 @@ def on_error(error): assert len(errors) == 0 assert len(reader) == len(CSV_DATA) assert len(reader) == reader._blob_query_reader._bytes_processed - assert data, CSV_DATA.replace(b'\r\n' == b'\n').decode('utf-8') + assert data == CSV_DATA.replace(b'\r\n', b'\n').decode('utf-8') @DataLakePreparer() @recorded_by_proxy @@ -268,13 +261,11 @@ def test_quick_query_iter_records_with_encoding(self, **kwargs): file_client.upload_data(CSV_DATA, overwrite=True) reader = file_client.query_file("SELECT * from BlobStorage", encoding='utf-8') - data = '' - for record in reader.records(): - data += record + data = "".join(record for record in reader.records()) assert len(reader) == len(CSV_DATA) assert len(reader) == reader._blob_query_reader._bytes_processed - assert data, CSV_DATA.replace(b'\r\n' == b'').decode('utf-8') + assert data == CSV_DATA.replace(b'\r\n', b'').decode('utf-8') @DataLakePreparer() @recorded_by_proxy @@ -291,7 +282,11 @@ def test_quick_query_iter_output_records_excluding_headers(self, **kwargs): input_format = DelimitedTextDialect(has_header=True) output_format = DelimitedTextDialect(has_header=False) - reader = file_client.query_file("SELECT * from BlobStorage", file_format=input_format, output_format=output_format) + reader = file_client.query_file( + "SELECT * from BlobStorage", + file_format=input_format, + output_format=output_format + ) read_records = reader.records() # Assert first line does not include header @@ -303,7 +298,7 @@ def test_quick_query_iter_output_records_excluding_headers(self, **kwargs): assert len(reader) == len(CSV_DATA) assert len(reader) == reader._blob_query_reader._bytes_processed - assert data, CSV_DATA.replace(b'\r\n' == b'')[44:] + assert data == CSV_DATA.replace(b'\r\n', b'')[44:] @DataLakePreparer() @recorded_by_proxy @@ -331,7 +326,7 @@ def test_quick_query_iter_output_records_including_headers(self, **kwargs): assert len(reader) == len(CSV_DATA) assert len(reader) == reader._blob_query_reader._bytes_processed - assert data, CSV_DATA.replace(b'\r\n' == b'') + assert data == CSV_DATA.replace(b'\r\n', b'') @DataLakePreparer() @recorded_by_proxy @@ -355,7 +350,7 @@ def test_quick_query_iter_records_with_progress(self, **kwargs): progress += len(record) + 2 assert len(reader) == len(CSV_DATA) assert len(reader) == reader._blob_query_reader._bytes_processed - assert data, CSV_DATA.replace(b'\r\n' == b'') + assert data == CSV_DATA.replace(b'\r\n', b'') assert progress == len(reader) @DataLakePreparer() @@ -457,7 +452,7 @@ def test_quick_query_readall_with_fatal_error_handler(self, **kwargs): b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' ) - data = data1 + b'\n' + data2 + b'\n' + data1 + data = data1 + b'\n' + data2 + b'\n' + data3 # upload the json file file_name = self._get_file_reference() @@ -484,7 +479,7 @@ def on_error(error): query_result = resp.readall() assert len(errors) == 1 - assert len(resp) == 43 + assert len(resp) == 414 assert query_result == b'' @DataLakePreparer() @@ -504,7 +499,7 @@ def test_quick_query_iter_records_with_fatal_error_handler(self, **kwargs): b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' ) - data = data1 + b'\n' + data2 + b'\n' + data1 + data = data1 + b'\n' + data2 + b'\n' + data3 # upload the json file file_name = self._get_file_reference() @@ -531,9 +526,9 @@ def on_error(error): data = [] for record in resp.records(): data.append(record) - + assert len(errors) == 1 - assert len(resp) == 43 + assert len(resp) == 414 assert data == [b''] @DataLakePreparer() @@ -553,17 +548,15 @@ def test_quick_query_readall_with_fatal_error_handler_raise(self, **kwargs): b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' ) - data = data1 + b'\n' + data2 + b'\n' + data1 + data = data1 + b'\n' + data2 + b'\n' + data3 # upload the json file file_name = self._get_file_reference() file_client = self.dsc.get_file_client(self.filesystem_name, file_name) file_client.upload_data(data, overwrite=True) - errors = [] - def on_error(error): - raise Exception(error.description) + raise ValueError(error.description) input_format = DelimitedJsonDialect() output_format = DelimitedTextDialect( @@ -577,8 +570,8 @@ def on_error(error): on_error=on_error, file_format=input_format, output_format=output_format) - with pytest.raises(Exception): - query_result = resp.readall() + with pytest.raises(ValueError): + resp.readall() @DataLakePreparer() @recorded_by_proxy @@ -597,17 +590,15 @@ def test_quick_query_iter_records_with_fatal_error_handler_raise(self, **kwargs) b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' ) - data = data1 + b'\n' + data2 + b'\n' + data1 + data = data1 + b'\n' + data2 + b'\n' + data3 # upload the json file file_name = self._get_file_reference() file_client = self.dsc.get_file_client(self.filesystem_name, file_name) file_client.upload_data(data, overwrite=True) - errors = [] - def on_error(error): - raise Exception(error.description) + raise ValueError(error.description) input_format = DelimitedJsonDialect() output_format = DelimitedTextDialect( @@ -620,9 +611,10 @@ def on_error(error): "SELECT * from BlobStorage", on_error=on_error, file_format=input_format, - output_format=output_format) + output_format=output_format + ) - with pytest.raises(Exception): + with pytest.raises(ValueError): for record in resp.records(): print(record) @@ -673,7 +665,7 @@ def test_quick_query_iter_records_with_fatal_error_ignore(self, **kwargs): b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' ) - data = data1 + b'\n' + data2 + b'\n' + data1 + data = data1 + b'\n' + data2 + b'\n' + data3 # upload the json file file_name = self._get_file_reference() @@ -915,7 +907,7 @@ def on_error(error): assert len(errors) == 0 assert len(resp) == len(data) - assert listdata, [b'{"name":"owner"}',b'{}',b'{"name":"owner"}' == b''] + assert listdata, [b'{"name":"owner"}', b'{}', b'{"name":"owner"}', b''] @DataLakePreparer() @recorded_by_proxy @@ -978,10 +970,10 @@ def on_error(error): on_error=on_error, output_format=output_format) query_result = base64.b64encode(resp.readall()) - # expected_result = b'/////3gAAAAQAAAAAAAKAAwABgAFAAgACgAAAAABBAAMAAAACAAIAAAABAAIAAAABAAAAAEAAAAUAAAAEAAUAAgABgAHAAwAAAAQABAAAAAAAAEHEAAAABwAAAAEAAAAAAAAAAMAAABhYmMACAAMAAQACAAIAAAABAAAAAIAAAD/////cAAAABAAAAAAAAoADgAGAAUACAAKAAAAAAMEABAAAAAAAAoADAAAAAQACAAKAAAAMAAAAAQAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAD/////AAAAAP////+IAAAAFAAAAAAAAAAMABYABgAFAAgADAAMAAAAAAMEABgAAAAQAAAAAAAAAAAACgAYAAwABAAIAAoAAAA8AAAAEAAAAAEAAAAAAAAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAABAAAAAQAAAAAAAAAAAAAAAAAAAJABAAAAAAAAAAAAAAAAAAA=' + # expected_result = b'/////3gAAAAQAAAAAAAKAAwABgAFAAgACgAAAAABBAAMAAAACAAIAAAABAAIAAAABAAAAAEAAAAUAAAAEAAUAAgABgAHAAwAAAAQABAAAAAAAAEHEAAAABwAAAAEAAAAAAAAAAMAAABhYmMACAAMAAQACAAIAAAABAAAAAIAAAD/////cAAAABAAAAAAAAoADgAGAAUACAAKAAAAAAMEABAAAAAAAAoADAAAAAQACAAKAAAAMAAAAAQAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAD/////AAAAAP////+IAAAAFAAAAAAAAAAMABYABgAFAAgADAAMAAAAAAMEABgAAAAQAAAAAAAAAAAACgAYAAwABAAIAAoAAAA8AAAAEAAAAAEAAAAAAAAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAABAAAAAQAAAAAAAAAAAAAAAAAAAJABAAAAAAAAAAAAAAAAAAA=' # pylint: disable=line-too-long assert len(errors) == 0 - # Skip this assert for now, requires further investigation: https://github.com/Azure/azure-sdk-for-python/issues/24690 + # Skip this assert for now, requires investigation: https://github.com/Azure/azure-sdk-for-python/issues/24690 # assert query_result == expected_result @DataLakePreparer() From 923f7858cdd635c074dfc3893fa37bd1538a6216 Mon Sep 17 00:00:00 2001 From: Peter Wu Date: Mon, 11 May 2026 00:27:21 -0400 Subject: [PATCH 4/4] Recorded tests --- sdk/storage/azure-storage-file-datalake/assets.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/storage/azure-storage-file-datalake/assets.json b/sdk/storage/azure-storage-file-datalake/assets.json index 3d5483d66f97..df6db651c451 100644 --- a/sdk/storage/azure-storage-file-datalake/assets.json +++ b/sdk/storage/azure-storage-file-datalake/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/storage/azure-storage-file-datalake", - "Tag": "python/storage/azure-storage-file-datalake_c0870501f2" + "Tag": "python/storage/azure-storage-file-datalake_b358bed301" }