From a5402723095200b15058999c621c57793467a447 Mon Sep 17 00:00:00 2001 From: azure-sdk Date: Thu, 7 May 2026 09:38:40 +0000 Subject: [PATCH] Configurations: 'specification/storage/Storage.Management/tspconfig.yaml', API Version: 2025-08-01, SDK Release Type: stable, and CommitSHA: '7368cdcd5be76911942ac8efd7f7abe060879b42' in SpecRepo: 'https://github.com/Azure/azure-rest-api-specs' Pipeline run: https://dev.azure.com/azure-sdk/internal/_build/results?buildId=6263760 Refer to https://eng.ms/docs/products/azure-developer-experience/develop/sdk-release/sdk-release-prerequisites to prepare for SDK release. --- sdk/storage/azure-mgmt-storage/CHANGELOG.md | 340 + sdk/storage/azure-mgmt-storage/README.md | 4 +- sdk/storage/azure-mgmt-storage/_metadata.json | 15 +- .../apiview-properties.json | 1037 +- .../azure/mgmt/storage/__init__.py | 4 +- ...torage_management_client.py => _client.py} | 119 +- .../azure/mgmt/storage/_configuration.py | 17 +- .../azure/mgmt/storage/_patch.py | 4 +- .../azure/mgmt/storage/_utils/__init__.py | 2 +- .../azure/mgmt/storage/_utils/model_base.py | 1459 ++ .../mgmt/storage/_utils/serialization.py | 2 +- .../azure/mgmt/storage/_utils/utils.py | 40 + .../azure/mgmt/storage/_validation.py | 66 + .../azure/mgmt/storage/_version.py | 7 +- .../azure/mgmt/storage/aio/__init__.py | 4 +- ...torage_management_client.py => _client.py} | 121 +- .../azure/mgmt/storage/aio/_configuration.py | 17 +- .../azure/mgmt/storage/aio/_patch.py | 4 +- .../mgmt/storage/aio/operations/__init__.py | 75 +- .../operations/_blob_containers_operations.py | 1828 -- .../_blob_inventory_policies_operations.py | 430 - .../operations/_blob_services_operations.py | 354 - .../_deleted_accounts_operations.py | 192 - .../_encryption_scopes_operations.py | 563 - .../operations/_file_services_operations.py | 481 - .../aio/operations/_file_shares_operations.py | 987 - .../aio/operations/_local_users_operations.py | 587 - .../_management_policies_operations.py | 337 - ...ity_perimeter_configurations_operations.py | 355 - ..._object_replication_policies_operations.py | 449 - .../storage/aio/operations/_operations.py | 16573 ++++++++++++- .../mgmt/storage/aio/operations/_patch.py | 4 +- ...private_endpoint_connections_operations.py | 432 - .../_private_link_resources_operations.py | 112 - .../aio/operations/_queue_operations.py | 595 - .../operations/_queue_services_operations.py | 321 - .../aio/operations/_skus_operations.py | 129 - .../_storage_accounts_operations.py | 2207 -- ..._assignment_instances_report_operations.py | 168 - ...assignments_instances_report_operations.py | 162 - .../_storage_task_assignments_operations.py | 829 - .../aio/operations/_table_operations.py | 578 - .../operations/_table_services_operations.py | 321 - .../aio/operations/_usages_operations.py | 133 - .../azure/mgmt/storage/models/__init__.py | 156 +- ...e_management_client_enums.py => _enums.py} | 366 +- .../azure/mgmt/storage/models/_models.py | 11828 +++++++++ .../azure/mgmt/storage/models/_models_py3.py | 10726 --------- .../azure/mgmt/storage/models/_patch.py | 4 +- .../azure/mgmt/storage/operations/__init__.py | 75 +- .../operations/_blob_containers_operations.py | 2390 -- .../_blob_inventory_policies_operations.py | 583 - .../operations/_blob_services_operations.py | 461 - .../_deleted_accounts_operations.py | 249 - .../_encryption_scopes_operations.py | 728 - .../operations/_file_services_operations.py | 668 - .../operations/_file_shares_operations.py | 1295 - .../operations/_local_users_operations.py | 815 - .../_management_policies_operations.py | 453 - ...ity_perimeter_configurations_operations.py | 476 - ..._object_replication_policies_operations.py | 595 - .../mgmt/storage/operations/_operations.py | 20026 +++++++++++++++- .../azure/mgmt/storage/operations/_patch.py | 4 +- ...private_endpoint_connections_operations.py | 590 - .../_private_link_resources_operations.py | 150 - .../storage/operations/_queue_operations.py | 814 - .../operations/_queue_services_operations.py | 430 - .../storage/operations/_skus_operations.py | 155 - .../_storage_accounts_operations.py | 2827 --- ..._assignment_instances_report_operations.py | 223 - ...assignments_instances_report_operations.py | 208 - .../_storage_task_assignments_operations.py | 1036 - .../storage/operations/_table_operations.py | 767 - .../operations/_table_services_operations.py | 430 - .../storage/operations/_usages_operations.py | 162 - .../blob_containers_clear_legal_hold.py | 9 +- .../blob_containers_delete.py | 7 +- ...b_containers_delete_immutability_policy.py | 10 +- ...b_containers_extend_immutability_policy.py | 10 +- .../generated_samples/blob_containers_get.py | 7 +- ...blob_containers_get_immutability_policy.py | 7 +- ..._with_allow_protected_append_writes_all.py | 7 +- .../blob_containers_lease_acquire.py | 7 +- .../blob_containers_lease_break.py | 7 +- .../generated_samples/blob_containers_list.py | 7 +- ...lob_containers_lock_immutability_policy.py | 10 +- .../blob_containers_patch.py | 7 +- .../generated_samples/blob_containers_put.py | 121 +- ...containers_put_default_encryption_scope.py | 7 +- ...blob_containers_put_immutability_policy.py | 7 +- ...olicy_allow_protected_append_writes_all.py | 7 +- .../blob_containers_put_object_level_worm.py | 7 +- .../blob_containers_set_legal_hold.py | 9 +- ..._hold_allow_protected_append_writes_all.py | 9 +- .../generated_samples/blob_ranges_restore.py | 7 +- .../generated_samples/blob_services_get.py | 7 +- .../generated_samples/blob_services_list.py | 7 +- .../generated_samples/blob_services_put.py | 12 +- ...lob_services_put_allow_permanent_delete.py | 7 +- ...ces_put_last_access_time_based_tracking.py | 7 +- .../delete_storage_task_assignment.py | 7 +- .../generated_samples/deleted_account_get.py | 7 +- .../generated_samples/deleted_account_list.py | 7 +- .../deleted_blob_containers_list.py | 7 +- .../deleted_file_shares_list.py | 7 +- .../generated_samples/file_services_get.py | 7 +- .../file_services_get_usage.py | 7 +- .../generated_samples/file_services_list.py | 7 +- .../file_services_list_usages.py | 7 +- .../generated_samples/file_services_put.py | 7 +- ...services_put_enable_secure_smb_features.py | 7 +- ...le_services_put_enable_smb_multichannel.py | 7 +- ...ices_put_encryption_in_transit_required.py | 7 +- .../file_share_acls_patch.py | 7 +- .../file_share_snapshots_list.py | 7 +- .../generated_samples/file_shares_delete.py | 7 +- .../generated_samples/file_shares_get.py | 7 +- .../file_shares_get_paid_bursting.py | 7 +- .../file_shares_get_provisioned_v2.py | 7 +- .../file_shares_get_stats.py | 7 +- .../file_shares_lease_acquire.py | 7 +- .../file_shares_lease_break.py | 7 +- .../generated_samples/file_shares_list.py | 7 +- .../file_shares_list_paid_bursting.py | 7 +- .../file_shares_list_provisioned_v2.py | 7 +- .../generated_samples/file_shares_patch.py | 7 +- .../file_shares_patch_paid_bursting.py | 7 +- .../file_shares_patch_provisioned_v2.py | 7 +- .../generated_samples/file_shares_put.py | 95 +- .../file_shares_put_access_tier.py | 7 +- .../generated_samples/file_shares_put_nfs.py | 7 +- .../file_shares_put_paid_bursting.py | 7 +- .../file_shares_put_provisioned_v2.py | 7 +- .../generated_samples/file_shares_restore.py | 7 +- .../get_storage_task_assignment.py | 7 +- ...ask_assignment_instances_report_summary.py | 7 +- ...st_storage_task_assignments_for_account.py | 7 +- ...sk_assignments_instances_report_summary.py | 7 +- .../generated_samples/local_user_create.py | 7 +- .../local_user_create_nf_sv3_enabled.py | 7 +- .../generated_samples/local_user_delete.py | 7 +- .../generated_samples/local_user_get.py | 7 +- .../generated_samples/local_user_list_keys.py | 7 +- .../local_user_regenerate_password.py | 7 +- .../generated_samples/local_user_update.py | 7 +- .../generated_samples/local_users_list.py | 7 +- .../local_users_list_nf_sv3_enabled.py | 7 +- ...rk_security_perimeter_configuration_get.py | 7 +- ...k_security_perimeter_configuration_list.py | 7 +- ...urity_perimeter_configuration_reconcile.py | 7 +- .../nfs_v3_account_create.py | 6 +- .../object_level_worm_container_migration.py | 7 +- .../generated_samples/operations_list.py | 5 +- .../patch_storage_task_assignment.py | 7 +- .../put_storage_task_assignment.py | 6 +- .../put_storage_task_assignment_mock_run.py | 56 + ...age_task_assignment_required_properties.py | 6 +- .../queue_operation_delete.py | 7 +- .../generated_samples/queue_operation_get.py | 7 +- .../generated_samples/queue_operation_list.py | 7 +- .../queue_operation_patch.py | 22 +- .../generated_samples/queue_operation_put.py | 22 +- .../queue_operation_put_with_metadata.py | 7 +- .../generated_samples/queue_services_get.py | 7 +- .../generated_samples/queue_services_list.py | 7 +- .../generated_samples/queue_services_put.py | 7 +- .../generated_samples/sku_list.py | 6 +- .../sku_list_with_location_info.py | 7 +- .../stop_storage_task_assignment.py | 42 + ..._abort_hierarchical_namespace_migration.py | 7 +- ...storage_account_check_name_availability.py | 7 +- .../storage_account_create.py | 13 +- ...ccount_create_allowed_copy_scope_to_aad.py | 7 +- ...eate_allowed_copy_scope_to_private_link.py | 7 +- ...t_create_disallow_public_network_access.py | 7 +- ...ate_dns_endpoint_type_to_azure_dns_zone.py | 7 +- ...nt_create_dns_endpoint_type_to_standard.py | 7 +- ...unt_create_enable_public_network_access.py | 7 +- ...bject_replication_policy_on_destination.py | 8 +- ...ate_object_replication_policy_on_source.py | 8 +- .../storage_account_create_placement.py | 7 +- ...count_create_premium_block_blob_storage.py | 7 +- ...r_assigned_encryption_identity_with_cmk.py | 6 +- ...ntity_with_federated_identity_client_id.py | 6 +- ...t_create_with_data_collaboration_policy.py | 54 + ...account_create_with_immutability_policy.py | 7 +- ...e_account_create_with_smart_access_tier.py | 74 + .../storage_account_create_zones.py | 7 +- .../storage_account_delete.py | 7 +- ...ge_account_delete_blob_inventory_policy.py | 7 +- ...torage_account_delete_management_policy.py | 7 +- ...ccount_delete_object_replication_policy.py | 7 +- ...ount_delete_private_endpoint_connection.py | 7 +- .../storage_account_enable_ad.py | 7 +- .../storage_account_enable_cmk.py | 7 +- .../storage_account_enable_smb_oauth.py | 7 +- .../storage_account_encryption_scope_list.py | 7 +- .../storage_account_failover.py | 7 +- .../storage_account_failover_planned.py | 7 +- ...account_get_async_sku_conversion_status.py | 7 +- ...orage_account_get_blob_inventory_policy.py | 7 +- .../storage_account_get_encryption_scope.py | 7 +- .../storage_account_get_management_policy.py | 7 +- .../storage_account_get_migration_failed.py | 7 +- ...orage_account_get_migration_in_progress.py | 7 +- ...e_account_get_object_replication_policy.py | 7 +- ...account_get_private_endpoint_connection.py | 7 +- .../storage_account_get_properties.py | 7 +- ...rage_account_get_properties_cmk_enabled.py | 7 +- ..._properties_cmk_version_expiration_time.py | 7 +- ...geo_replication_statscan_failover_false.py | 7 +- ..._geo_replication_statscan_failover_true.py | 7 +- ...ccount_hierarchical_namespace_migration.py | 7 +- .../storage_account_leverage_ipv6_ability.py | 7 +- .../generated_samples/storage_account_list.py | 7 +- .../storage_account_list_account_sas.py | 7 +- ...rage_account_list_blob_inventory_policy.py | 7 +- .../storage_account_list_by_resource_group.py | 7 +- .../storage_account_list_keys.py | 7 +- .../storage_account_list_location_usage.py | 7 +- ...ccount_list_object_replication_policies.py | 7 +- ...count_list_private_endpoint_connections.py | 7 +- ...age_account_list_private_link_resources.py | 7 +- .../storage_account_list_service_sas.py | 7 +- .../storage_account_patch_encryption_scope.py | 7 +- .../storage_account_post_migration.py | 7 +- .../storage_account_put_encryption_scope.py | 37 +- ...on_scope_with_infrastructure_encryption.py | 7 +- ...account_put_private_endpoint_connection.py | 7 +- .../storage_account_regenerate_kerb_key.py | 7 +- .../storage_account_regenerate_key.py | 7 +- ...age_account_revoke_user_delegation_keys.py | 7 +- ...orage_account_set_blob_inventory_policy.py | 7 +- ...e_delete_and_new_schema_for_hns_account.py | 7 +- ...lete_and_new_schema_for_non_hns_account.py | 7 +- .../storage_account_set_management_policy.py | 7 +- ...y_base_blob_days_after_creation_actions.py | 7 +- ...set_management_policy_cold_tier_actions.py | 7 +- ...ement_policy_for_block_and_append_blobs.py | 7 +- ..._set_management_policy_hot_tier_actions.py | 7 +- ...icy_last_access_time_based_blob_actions.py | 7 +- ...nt_policy_last_tier_change_time_actions.py | 7 +- ...gement_policy_with_snapshot_and_version.py | 7 +- .../storage_account_update.py | 12 +- ...age_account_update_access_tier_to_smart.py | 79 + ...ccount_update_allowed_copy_scope_to_aad.py | 6 +- ...nt_update_disable_public_network_access.py | 6 +- ...bject_replication_policy_on_destination.py | 8 +- ...ate_object_replication_policy_on_source.py | 8 +- .../storage_account_update_placement.py | 6 +- ...r_assigned_encryption_identity_with_cmk.py | 6 +- ...ntity_with_federated_identity_client_id.py | 6 +- ...t_update_with_data_collaboration_policy.py | 51 + ...account_update_with_immutability_policy.py | 7 +- .../storage_account_update_zones.py | 6 +- .../storage_connectors_create.py | 63 + .../storage_connectors_delete.py | 42 + .../storage_connectors_get.py | 43 + ...rage_connectors_list_by_storage_account.py | 43 + ...age_connectors_test_existing_connection.py | 44 + .../storage_connectors_update.py | 55 + .../storage_data_shares_create.py | 57 + .../storage_data_shares_delete.py | 42 + .../storage_data_shares_get.py | 43 + ...age_data_shares_list_by_storage_account.py | 43 + .../storage_data_shares_update.py | 56 + .../table_operation_delete.py | 7 +- .../generated_samples/table_operation_get.py | 7 +- .../generated_samples/table_operation_list.py | 7 +- .../table_operation_patch.py | 7 +- .../generated_samples/table_operation_put.py | 7 +- .../table_operation_put_or_patch_acls.py | 9 +- ...peration_put_or_patch_acls_table_create.py | 43 + .../generated_samples/table_services_get.py | 7 +- .../generated_samples/table_services_list.py | 7 +- .../generated_samples/table_services_put.py | 7 +- .../generated_tests/conftest.py | 2 +- ...e_management_blob_containers_operations.py | 316 +- ...gement_blob_containers_operations_async.py | 330 +- ...ment_blob_inventory_policies_operations.py | 60 +- ...lob_inventory_policies_operations_async.py | 60 +- ...age_management_blob_services_operations.py | 91 +- ...nagement_blob_services_operations_async.py | 91 +- ...torage_management_connectors_operations.py | 141 + ..._management_connectors_operations_async.py | 150 + ...orage_management_data_shares_operations.py | 124 + ...management_data_shares_operations_async.py | 131 + ..._management_deleted_accounts_operations.py | 21 +- ...ement_deleted_accounts_operations_async.py | 21 +- ...management_encryption_scopes_operations.py | 88 +- ...ment_encryption_scopes_operations_async.py | 88 +- ...age_management_file_services_operations.py | 84 +- ...nagement_file_services_operations_async.py | 84 +- ...orage_management_file_shares_operations.py | 213 +- ...management_file_shares_operations_async.py | 213 +- ...orage_management_local_users_operations.py | 56 +- ...management_local_users_operations_async.py | 56 +- ...nagement_management_policies_operations.py | 193 +- ...nt_management_policies_operations_async.py | 193 +- ...ity_perimeter_configurations_operations.py | 19 +- ...rimeter_configurations_operations_async.py | 19 +- ..._object_replication_policies_operations.py | 67 +- ...t_replication_policies_operations_async.py | 67 +- .../test_storage_management_operations.py | 6 +- ...est_storage_management_operations_async.py | 6 +- ...private_endpoint_connections_operations.py | 48 +- ...e_endpoint_connections_operations_async.py | 48 +- ...ement_private_link_resources_operations.py | 3 +- ...private_link_resources_operations_async.py | 3 +- ...est_storage_management_queue_operations.py | 63 +- ...orage_management_queue_operations_async.py | 63 +- ...ge_management_queue_services_operations.py | 47 +- ...agement_queue_services_operations_async.py | 47 +- ...test_storage_management_skus_operations.py | 6 +- ...torage_management_skus_operations_async.py | 6 +- ..._management_storage_accounts_operations.py | 433 +- ...ement_storage_accounts_operations_async.py | 351 +- ..._assignment_instances_report_operations.py | 3 +- ...nment_instances_report_operations_async.py | 3 +- ...assignments_instances_report_operations.py | 3 +- ...ments_instances_report_operations_async.py | 3 +- ...ent_storage_task_assignments_operations.py | 55 +- ...orage_task_assignments_operations_async.py | 57 +- ...est_storage_management_table_operations.py | 19 +- ...orage_management_table_operations_async.py | 19 +- ...ge_management_table_services_operations.py | 47 +- ...agement_table_services_operations_async.py | 47 +- ...st_storage_management_usages_operations.py | 3 +- ...rage_management_usages_operations_async.py | 3 +- .../tests/disable_test_cli_mgmt_storage.py | 1 + .../azure-mgmt-storage/tsp-location.yaml | 4 + 331 files changed, 55796 insertions(+), 43229 deletions(-) rename sdk/storage/azure-mgmt-storage/azure/mgmt/storage/{_storage_management_client.py => _client.py} (88%) create mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_utils/model_base.py create mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_utils/utils.py create mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_validation.py rename sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/{_storage_management_client.py => _client.py} (89%) delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_blob_containers_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_blob_inventory_policies_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_blob_services_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_deleted_accounts_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_encryption_scopes_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_file_services_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_file_shares_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_local_users_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_management_policies_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_network_security_perimeter_configurations_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_object_replication_policies_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_private_endpoint_connections_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_private_link_resources_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_queue_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_queue_services_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_skus_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_storage_accounts_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_storage_task_assignment_instances_report_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_storage_task_assignments_instances_report_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_storage_task_assignments_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_table_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_table_services_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_usages_operations.py rename sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/{_storage_management_client_enums.py => _enums.py} (71%) create mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_models.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_models_py3.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_blob_containers_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_blob_inventory_policies_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_blob_services_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_deleted_accounts_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_encryption_scopes_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_file_services_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_file_shares_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_local_users_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_management_policies_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_network_security_perimeter_configurations_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_object_replication_policies_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_private_endpoint_connections_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_private_link_resources_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_queue_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_queue_services_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_skus_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_storage_accounts_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_storage_task_assignment_instances_report_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_storage_task_assignments_instances_report_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_storage_task_assignments_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_table_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_table_services_operations.py delete mode 100644 sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_usages_operations.py rename sdk/storage/azure-mgmt-storage/generated_samples/{storage_task_assignments_crud => }/delete_storage_task_assignment.py (79%) rename sdk/storage/azure-mgmt-storage/generated_samples/{storage_task_assignments_crud => }/get_storage_task_assignment.py (80%) rename sdk/storage/azure-mgmt-storage/generated_samples/{storage_task_assignments_list => }/list_storage_task_assignment_instances_report_summary.py (79%) rename sdk/storage/azure-mgmt-storage/generated_samples/{storage_task_assignments_list => }/list_storage_task_assignments_for_account.py (79%) rename sdk/storage/azure-mgmt-storage/generated_samples/{storage_task_assignments_list => }/list_storage_task_assignments_instances_report_summary.py (79%) rename sdk/storage/azure-mgmt-storage/generated_samples/{storage_task_assignments_crud => }/patch_storage_task_assignment.py (84%) rename sdk/storage/azure-mgmt-storage/generated_samples/{storage_task_assignments_crud => }/put_storage_task_assignment.py (87%) create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/put_storage_task_assignment_mock_run.py rename sdk/storage/azure-mgmt-storage/generated_samples/{storage_task_assignments_crud => }/put_storage_task_assignment_required_properties.py (86%) create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/stop_storage_task_assignment.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_with_data_collaboration_policy.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_with_smart_access_tier.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_access_tier_to_smart.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_with_data_collaboration_policy.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_create.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_delete.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_get.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_list_by_storage_account.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_test_existing_connection.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_update.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_create.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_delete.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_get.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_list_by_storage_account.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_update.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_samples/table_operation_put_or_patch_acls_table_create.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_connectors_operations.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_connectors_operations_async.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_data_shares_operations.py create mode 100644 sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_data_shares_operations_async.py create mode 100644 sdk/storage/azure-mgmt-storage/tsp-location.yaml diff --git a/sdk/storage/azure-mgmt-storage/CHANGELOG.md b/sdk/storage/azure-mgmt-storage/CHANGELOG.md index 7cd2a2e371f8..bfef89365cf2 100644 --- a/sdk/storage/azure-mgmt-storage/CHANGELOG.md +++ b/sdk/storage/azure-mgmt-storage/CHANGELOG.md @@ -1,5 +1,345 @@ # Release History +## 25.0.0 (2026-05-07) + +### Features Added + + - Client `StorageManagementClient` added method `send_request` + - Client `StorageManagementClient` added operation group `connectors` + - Client `StorageManagementClient` added operation group `data_shares` + - Enum `AccessTier` added member `SMART` + - Enum `AllowedCopyScope` added member `ALL` + - Model `AzureEntityResource` added property `system_data` + - Model `BlobContainer` added property `container_properties` + - Model `BlobContainer` added property `system_data` + - Model `BlobServiceProperties` added property `blob_service_properties` + - Model `BlobServiceProperties` added property `system_data` + - Model `DeletedAccount` added property `system_data` + - Model `EncryptionScope` added property `encryption_scope_properties` + - Model `EncryptionScope` added property `system_data` + - Model `FileServiceProperties` added property `file_service_properties` + - Model `FileServiceProperties` added property `system_data` + - Model `FileServiceUsage` added property `system_data` + - Model `FileShare` added property `file_share_properties` + - Model `FileShare` added property `system_data` + - Model `FileShareItem` added property `properties` + - Model `FileShareItem` added property `system_data` + - Model `ImmutabilityPolicy` added property `properties` + - Model `ImmutabilityPolicy` added property `system_data` + - Model `ImmutabilityPolicyProperties` added property `properties` + - Model `ListContainerItem` added property `properties` + - Model `ListContainerItem` added property `system_data` + - Model `ListQueue` added property `queue_properties` + - Model `ListQueue` added property `system_data` + - Model `ManagementPolicy` added property `system_data` + - Model `ObjectReplicationPolicy` added property `system_data` + - Model `Operation` added property `operation_properties` + - Model `PrivateEndpointConnection` added property `system_data` + - Model `PrivateLinkResource` added property `system_data` + - Model `ProxyResource` added property `system_data` + - Model `QueueServiceProperties` added property `queue_service_properties` + - Model `QueueServiceProperties` added property `system_data` + - Model `Restriction` added property `values_property` + - Model `StorageAccount` added property `system_data` + - Model `StorageAccountCheckNameAvailabilityParameters` added property `type` + - Model `StorageAccountCreateParameters` added property `properties` + - Model `StorageAccountMigration` added property `storage_account_migration_details` + - Model `StorageAccountMigration` added property `system_data` + - Model `StorageAccountUpdateParameters` added property `properties` + - Model `StorageQueue` added property `queue_properties` + - Model `StorageQueue` added property `system_data` + - Model `StorageTaskAssignment` added property `system_data` + - Model `StorageTaskReportInstance` added property `system_data` + - Model `Table` added property `table_properties` + - Model `Table` added property `system_data` + - Model `TableServiceProperties` added property `table_service_properties` + - Model `TableServiceProperties` added property `system_data` + - Model `TrackedResource` added property `system_data` + - Enum `TriggerType` added member `MOCK_RUN` + - Model `UpdateHistoryProperty` added property `update_property` + - Added model `BlobServicePropertiesProperties` + - Added model `CloudError` + - Added model `Connector` + - Added model `ContainerProperties` + - Added model `DataShare` + - Added model `DataShareConnection` + - Added model `DataShareSource` + - Added model `EncryptionScopeProperties` + - Added model `FileServicePropertiesProperties` + - Added model `FileShareProperties` + - Added model `ImmutabilityPolicyProperty` + - Added enum `LeaseContainerRequestAction` + - Added model `ListQueueProperties` + - Added model `ManagedIdentityAuthProperties` + - Added enum `NativeDataSharingProvisioningState` + - Added model `ObjectReplicationPolicyPropertiesTagsReplication` + - Added model `OperationProperties` + - Added model `QueueProperties` + - Added model `QueueServicePropertiesProperties` + - Added model `ServiceSharedKeyAccessProperties` + - Added model `StaticWebsite` + - Added model `StorageAccountMigrationProperties` + - Added model `StorageAccountPropertiesCreateParameters` + - Added model `StorageAccountPropertiesUpdateParameters` + - Added model `StorageAccountSharedKeyAccessProperties` + - Added model `StorageConnectorAuthProperties` + - Added enum `StorageConnectorAuthType` + - Added model `StorageConnectorConnection` + - Added enum `StorageConnectorConnectionType` + - Added enum `StorageConnectorDataSourceType` + - Added model `StorageConnectorProperties` + - Added model `StorageConnectorSource` + - Added enum `StorageConnectorSourceType` + - Added enum `StorageConnectorState` + - Added model `StorageDataCollaborationPolicyProperties` + - Added model `StorageDataShareAccessPolicy` + - Added enum `StorageDataShareAccessPolicyPermission` + - Added model `StorageDataShareAsset` + - Added model `StorageDataShareProperties` + - Added enum `StorageTaskAssignmentProvisioningState` + - Added model `TableProperties` + - Added model `TableServicePropertiesProperties` + - Added model `TestConnectionResponse` + - Added model `TestExistingConnectionRequest` + - Model `BlobContainersOperations` added parameter `etag` in method `create_or_update_immutability_policy` + - Model `BlobContainersOperations` added parameter `match_condition` in method `create_or_update_immutability_policy` + - Model `BlobContainersOperations` added parameter `etag` in method `delete_immutability_policy` + - Model `BlobContainersOperations` added parameter `match_condition` in method `delete_immutability_policy` + - Model `BlobContainersOperations` added parameter `etag` in method `extend_immutability_policy` + - Model `BlobContainersOperations` added parameter `match_condition` in method `extend_immutability_policy` + - Model `BlobContainersOperations` added parameter `etag` in method `get_immutability_policy` + - Model `BlobContainersOperations` added parameter `match_condition` in method `get_immutability_policy` + - Model `BlobContainersOperations` added parameter `etag` in method `lock_immutability_policy` + - Model `BlobContainersOperations` added parameter `match_condition` in method `lock_immutability_policy` + - Model `StorageTaskAssignmentsOperations` added method `begin_stop_assignment` + - Added model `ConnectorsOperations` + - Added model `DataSharesOperations` + +### Breaking Changes + + - Model `BlobContainer` deleted or renamed its instance variable `version` + - Model `BlobContainer` deleted or renamed its instance variable `deleted` + - Model `BlobContainer` deleted or renamed its instance variable `deleted_time` + - Model `BlobContainer` deleted or renamed its instance variable `remaining_retention_days` + - Model `BlobContainer` deleted or renamed its instance variable `default_encryption_scope` + - Model `BlobContainer` deleted or renamed its instance variable `deny_encryption_scope_override` + - Model `BlobContainer` deleted or renamed its instance variable `public_access` + - Model `BlobContainer` deleted or renamed its instance variable `last_modified_time` + - Model `BlobContainer` deleted or renamed its instance variable `lease_status` + - Model `BlobContainer` deleted or renamed its instance variable `lease_state` + - Model `BlobContainer` deleted or renamed its instance variable `lease_duration` + - Model `BlobContainer` deleted or renamed its instance variable `metadata` + - Model `BlobContainer` deleted or renamed its instance variable `immutability_policy` + - Model `BlobContainer` deleted or renamed its instance variable `legal_hold` + - Model `BlobContainer` deleted or renamed its instance variable `has_legal_hold` + - Model `BlobContainer` deleted or renamed its instance variable `has_immutability_policy` + - Model `BlobContainer` deleted or renamed its instance variable `immutable_storage_with_versioning` + - Model `BlobContainer` deleted or renamed its instance variable `enable_nfs_v3_root_squash` + - Model `BlobContainer` deleted or renamed its instance variable `enable_nfs_v3_all_squash` + - Model `BlobServiceProperties` deleted or renamed its instance variable `cors` + - Model `BlobServiceProperties` deleted or renamed its instance variable `default_service_version` + - Model `BlobServiceProperties` deleted or renamed its instance variable `delete_retention_policy` + - Model `BlobServiceProperties` deleted or renamed its instance variable `is_versioning_enabled` + - Model `BlobServiceProperties` deleted or renamed its instance variable `automatic_snapshot_policy_enabled` + - Model `BlobServiceProperties` deleted or renamed its instance variable `change_feed` + - Model `BlobServiceProperties` deleted or renamed its instance variable `restore_policy` + - Model `BlobServiceProperties` deleted or renamed its instance variable `container_delete_retention_policy` + - Model `BlobServiceProperties` deleted or renamed its instance variable `last_access_time_tracking_policy` + - Model `EncryptionScope` deleted or renamed its instance variable `source` + - Model `EncryptionScope` deleted or renamed its instance variable `state` + - Model `EncryptionScope` deleted or renamed its instance variable `creation_time` + - Model `EncryptionScope` deleted or renamed its instance variable `last_modified_time` + - Model `EncryptionScope` deleted or renamed its instance variable `key_vault_properties` + - Model `EncryptionScope` deleted or renamed its instance variable `require_infrastructure_encryption` + - Model `FileServiceProperties` deleted or renamed its instance variable `cors` + - Model `FileServiceProperties` deleted or renamed its instance variable `share_delete_retention_policy` + - Model `FileServiceProperties` deleted or renamed its instance variable `protocol_settings` + - Model `FileShare` deleted or renamed its instance variable `last_modified_time` + - Model `FileShare` deleted or renamed its instance variable `metadata` + - Model `FileShare` deleted or renamed its instance variable `share_quota` + - Model `FileShare` deleted or renamed its instance variable `provisioned_iops` + - Model `FileShare` deleted or renamed its instance variable `provisioned_bandwidth_mibps` + - Model `FileShare` deleted or renamed its instance variable `included_burst_iops` + - Model `FileShare` deleted or renamed its instance variable `max_burst_credits_for_iops` + - Model `FileShare` deleted or renamed its instance variable `next_allowed_quota_downgrade_time` + - Model `FileShare` deleted or renamed its instance variable `next_allowed_provisioned_iops_downgrade_time` + - Model `FileShare` deleted or renamed its instance variable `next_allowed_provisioned_bandwidth_downgrade_time` + - Model `FileShare` deleted or renamed its instance variable `enabled_protocols` + - Model `FileShare` deleted or renamed its instance variable `root_squash` + - Model `FileShare` deleted or renamed its instance variable `version` + - Model `FileShare` deleted or renamed its instance variable `deleted` + - Model `FileShare` deleted or renamed its instance variable `deleted_time` + - Model `FileShare` deleted or renamed its instance variable `remaining_retention_days` + - Model `FileShare` deleted or renamed its instance variable `access_tier` + - Model `FileShare` deleted or renamed its instance variable `access_tier_change_time` + - Model `FileShare` deleted or renamed its instance variable `access_tier_status` + - Model `FileShare` deleted or renamed its instance variable `share_usage_bytes` + - Model `FileShare` deleted or renamed its instance variable `lease_status` + - Model `FileShare` deleted or renamed its instance variable `lease_state` + - Model `FileShare` deleted or renamed its instance variable `lease_duration` + - Model `FileShare` deleted or renamed its instance variable `signed_identifiers` + - Model `FileShare` deleted or renamed its instance variable `snapshot_time` + - Model `FileShare` deleted or renamed its instance variable `file_share_paid_bursting` + - Model `FileShareItem` deleted or renamed its instance variable `last_modified_time` + - Model `FileShareItem` deleted or renamed its instance variable `metadata` + - Model `FileShareItem` deleted or renamed its instance variable `share_quota` + - Model `FileShareItem` deleted or renamed its instance variable `provisioned_iops` + - Model `FileShareItem` deleted or renamed its instance variable `provisioned_bandwidth_mibps` + - Model `FileShareItem` deleted or renamed its instance variable `included_burst_iops` + - Model `FileShareItem` deleted or renamed its instance variable `max_burst_credits_for_iops` + - Model `FileShareItem` deleted or renamed its instance variable `next_allowed_quota_downgrade_time` + - Model `FileShareItem` deleted or renamed its instance variable `next_allowed_provisioned_iops_downgrade_time` + - Model `FileShareItem` deleted or renamed its instance variable `next_allowed_provisioned_bandwidth_downgrade_time` + - Model `FileShareItem` deleted or renamed its instance variable `enabled_protocols` + - Model `FileShareItem` deleted or renamed its instance variable `root_squash` + - Model `FileShareItem` deleted or renamed its instance variable `version` + - Model `FileShareItem` deleted or renamed its instance variable `deleted` + - Model `FileShareItem` deleted or renamed its instance variable `deleted_time` + - Model `FileShareItem` deleted or renamed its instance variable `remaining_retention_days` + - Model `FileShareItem` deleted or renamed its instance variable `access_tier` + - Model `FileShareItem` deleted or renamed its instance variable `access_tier_change_time` + - Model `FileShareItem` deleted or renamed its instance variable `access_tier_status` + - Model `FileShareItem` deleted or renamed its instance variable `share_usage_bytes` + - Model `FileShareItem` deleted or renamed its instance variable `lease_status` + - Model `FileShareItem` deleted or renamed its instance variable `lease_state` + - Model `FileShareItem` deleted or renamed its instance variable `lease_duration` + - Model `FileShareItem` deleted or renamed its instance variable `signed_identifiers` + - Model `FileShareItem` deleted or renamed its instance variable `snapshot_time` + - Model `FileShareItem` deleted or renamed its instance variable `file_share_paid_bursting` + - Model `ImmutabilityPolicy` deleted or renamed its instance variable `immutability_period_since_creation_in_days` + - Model `ImmutabilityPolicy` deleted or renamed its instance variable `state` + - Model `ImmutabilityPolicy` deleted or renamed its instance variable `allow_protected_append_writes` + - Model `ImmutabilityPolicy` deleted or renamed its instance variable `allow_protected_append_writes_all` + - Model `ImmutabilityPolicyProperties` deleted or renamed its instance variable `immutability_period_since_creation_in_days` + - Model `ImmutabilityPolicyProperties` deleted or renamed its instance variable `state` + - Model `ImmutabilityPolicyProperties` deleted or renamed its instance variable `allow_protected_append_writes` + - Model `ImmutabilityPolicyProperties` deleted or renamed its instance variable `allow_protected_append_writes_all` + - Model `ListContainerItem` deleted or renamed its instance variable `version` + - Model `ListContainerItem` deleted or renamed its instance variable `deleted` + - Model `ListContainerItem` deleted or renamed its instance variable `deleted_time` + - Model `ListContainerItem` deleted or renamed its instance variable `remaining_retention_days` + - Model `ListContainerItem` deleted or renamed its instance variable `default_encryption_scope` + - Model `ListContainerItem` deleted or renamed its instance variable `deny_encryption_scope_override` + - Model `ListContainerItem` deleted or renamed its instance variable `public_access` + - Model `ListContainerItem` deleted or renamed its instance variable `last_modified_time` + - Model `ListContainerItem` deleted or renamed its instance variable `lease_status` + - Model `ListContainerItem` deleted or renamed its instance variable `lease_state` + - Model `ListContainerItem` deleted or renamed its instance variable `lease_duration` + - Model `ListContainerItem` deleted or renamed its instance variable `metadata` + - Model `ListContainerItem` deleted or renamed its instance variable `immutability_policy` + - Model `ListContainerItem` deleted or renamed its instance variable `legal_hold` + - Model `ListContainerItem` deleted or renamed its instance variable `has_legal_hold` + - Model `ListContainerItem` deleted or renamed its instance variable `has_immutability_policy` + - Model `ListContainerItem` deleted or renamed its instance variable `immutable_storage_with_versioning` + - Model `ListContainerItem` deleted or renamed its instance variable `enable_nfs_v3_root_squash` + - Model `ListContainerItem` deleted or renamed its instance variable `enable_nfs_v3_all_squash` + - Model `ListQueue` deleted or renamed its instance variable `metadata` + - Model `Operation` deleted or renamed its instance variable `service_specification` + - Deleted or renamed enum value `ProvisioningState.ACCEPTED` + - Deleted or renamed enum value `ProvisioningState.CANCELED` + - Deleted or renamed enum value `ProvisioningState.DELETING` + - Deleted or renamed enum value `ProvisioningState.FAILED` + - Deleted or renamed enum value `ProvisioningState.VALIDATE_SUBSCRIPTION_QUOTA_BEGIN` + - Deleted or renamed enum value `ProvisioningState.VALIDATE_SUBSCRIPTION_QUOTA_END` + - Model `QueueServiceProperties` deleted or renamed its instance variable `cors` + - Model `Restriction` deleted or renamed its instance variable `values` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `allowed_copy_scope` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `public_network_access` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `sas_policy` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `key_policy` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `custom_domain` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `encryption` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `network_rule_set` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `access_tier` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `azure_files_identity_based_authentication` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `enable_https_traffic_only` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `is_sftp_enabled` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `is_local_user_enabled` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `enable_extended_groups` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `is_hns_enabled` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `large_file_shares_state` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `routing_preference` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `dual_stack_endpoint_preference` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `allow_blob_public_access` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `minimum_tls_version` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `allow_shared_key_access` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `enable_nfs_v3` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `allow_cross_tenant_replication` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `default_to_o_auth_authentication` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `immutable_storage_with_versioning` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `dns_endpoint_type` + - Model `StorageAccountCreateParameters` deleted or renamed its instance variable `geo_priority_replication_status` + - Model `StorageAccountListKeysResult` deleted or renamed its instance variable `keys` + - Model `StorageAccountMigration` deleted or renamed its instance variable `target_sku_name` + - Model `StorageAccountMigration` deleted or renamed its instance variable `migration_status` + - Model `StorageAccountMigration` deleted or renamed its instance variable `migration_failed_reason` + - Model `StorageAccountMigration` deleted or renamed its instance variable `migration_failed_detailed_reason` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `custom_domain` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `encryption` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `sas_policy` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `key_policy` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `access_tier` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `azure_files_identity_based_authentication` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `enable_https_traffic_only` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `is_sftp_enabled` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `is_local_user_enabled` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `enable_extended_groups` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `network_rule_set` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `large_file_shares_state` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `routing_preference` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `dual_stack_endpoint_preference` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `allow_blob_public_access` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `minimum_tls_version` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `allow_shared_key_access` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `allow_cross_tenant_replication` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `default_to_o_auth_authentication` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `public_network_access` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `immutable_storage_with_versioning` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `allowed_copy_scope` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `dns_endpoint_type` + - Model `StorageAccountUpdateParameters` deleted or renamed its instance variable `geo_priority_replication_status` + - Model `StorageQueue` deleted or renamed its instance variable `metadata` + - Model `StorageQueue` deleted or renamed its instance variable `approximate_message_count` + - Model `Table` deleted or renamed its instance variable `table_name` + - Model `Table` deleted or renamed its instance variable `signed_identifiers` + - Model `TableServiceProperties` deleted or renamed its instance variable `cors` + - Model `UpdateHistoryProperty` deleted or renamed its instance variable `update` + - Deleted or renamed model `BlobServiceItems` + - Deleted or renamed model `FileServiceUsages` + - Deleted or renamed model `FileShareItems` + - Deleted or renamed model `LeaseContainerRequestEnum` + - Deleted or renamed model `ListBlobInventoryPolicy` + - Deleted or renamed model `ListContainerItems` + - Deleted or renamed model `ListQueueResource` + - Deleted or renamed model `ListTableResource` + - Deleted or renamed model `LocalUsers` + - Deleted or renamed model `NetworkSecurityPerimeterConfigurationList` + - Deleted or renamed model `ObjectReplicationPolicies` + - Deleted or renamed model `ProxyResourceAutoGenerated` + - Deleted or renamed model `Resource` + - Deleted or renamed model `StorageTaskAssignmentsList` + - Deleted or renamed model `StorageTaskReportSummary` + - Method `BlobContainersOperations.create_or_update_immutability_policy` deleted or renamed its parameter `if_match` of kind `positional_or_keyword` + - Method `BlobContainersOperations.delete_immutability_policy` deleted or renamed its parameter `if_match` of kind `positional_or_keyword` + - Method `BlobContainersOperations.extend_immutability_policy` deleted or renamed its parameter `if_match` of kind `positional_or_keyword` + - Method `BlobContainersOperations.get_immutability_policy` deleted or renamed its parameter `if_match` of kind `positional_or_keyword` + - Method `BlobContainersOperations.list` changed its parameter `include` from `positional_or_keyword` to `keyword_only` + - Method `BlobContainersOperations.lock_immutability_policy` deleted or renamed its parameter `if_match` of kind `positional_or_keyword` + - Method `EncryptionScopesOperations.list` changed its parameter `include` from `positional_or_keyword` to `keyword_only` + - Method `FileSharesOperations.create` changed its parameter `expand` from `positional_or_keyword` to `keyword_only` + - Method `FileSharesOperations.delete` changed its parameter `x_ms_snapshot` from `positional_or_keyword` to `keyword_only` + - Method `FileSharesOperations.delete` changed its parameter `include` from `positional_or_keyword` to `keyword_only` + - Method `FileSharesOperations.get` changed its parameter `expand` from `positional_or_keyword` to `keyword_only` + - Method `FileSharesOperations.get` changed its parameter `x_ms_snapshot` from `positional_or_keyword` to `keyword_only` + - Method `FileSharesOperations.lease` changed its parameter `x_ms_snapshot` from `positional_or_keyword` to `keyword_only` + - Method `FileSharesOperations.list` changed its parameter `expand` from `positional_or_keyword` to `keyword_only` + - Method `LocalUsersOperations.list` changed its parameter `include` from `positional_or_keyword` to `keyword_only` + - Method `StorageAccountsOperations.begin_failover` changed its parameter `failover_type` from `positional_or_keyword` to `keyword_only` + - Method `StorageAccountsOperations.begin_hierarchical_namespace_migration` changed its parameter `request_type` from `positional_or_keyword` to `keyword_only` + - Method `StorageAccountsOperations.get_properties` changed its parameter `expand` from `positional_or_keyword` to `keyword_only` + - Method `StorageAccountsOperations.list_keys` changed its parameter `expand` from `positional_or_keyword` to `keyword_only` + - Method `FileSharesOperations.lease` re-ordered its parameters from `['self', 'resource_group_name', 'account_name', 'share_name', 'x_ms_snapshot', 'parameters', 'kwargs']` to `['self', 'resource_group_name', 'account_name', 'share_name', 'parameters', 'x_ms_snapshot', 'kwargs']` + ## 24.0.1 (2026-03-24) ### Other Changes diff --git a/sdk/storage/azure-mgmt-storage/README.md b/sdk/storage/azure-mgmt-storage/README.md index fc1642c658d6..230f48acc2c9 100644 --- a/sdk/storage/azure-mgmt-storage/README.md +++ b/sdk/storage/azure-mgmt-storage/README.md @@ -1,7 +1,7 @@ # Microsoft Azure SDK for Python This is the Microsoft Azure Storage Management Client Library. -This package has been tested with Python 3.9+. +This package has been tested with Python 3.10+. For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). ## _Disclaimer_ @@ -12,7 +12,7 @@ _Azure SDK Python packages support for Python 2.7 has ended 01 January 2022. For ### Prerequisites -- Python 3.9+ is required to use this package. +- Python 3.10+ is required to use this package. - [Azure subscription](https://azure.microsoft.com/free/) ### Install the package diff --git a/sdk/storage/azure-mgmt-storage/_metadata.json b/sdk/storage/azure-mgmt-storage/_metadata.json index f56df5c02e37..c93f6e2bb3f9 100644 --- a/sdk/storage/azure-mgmt-storage/_metadata.json +++ b/sdk/storage/azure-mgmt-storage/_metadata.json @@ -1,11 +1,10 @@ { - "commit": "ef72f8c9c0b8ef57b1ea289f565751668e314cc7", + "apiVersion": "2025-08-01", + "apiVersions": { + "Microsoft.Storage": "2025-08-01" + }, + "commit": "7368cdcd5be76911942ac8efd7f7abe060879b42", "repository_url": "https://github.com/Azure/azure-rest-api-specs", - "autorest": "3.10.2", - "use": [ - "@autorest/python@6.50.0", - "@autorest/modelerfour@4.27.0" - ], - "autorest_command": "autorest specification/storage/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --package-mode=azure-mgmt --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.50.0 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False", - "readme": "specification/storage/resource-manager/readme.md" + "typespec_src": "specification/storage/Storage.Management", + "emitterVersion": "0.61.3" } \ No newline at end of file diff --git a/sdk/storage/azure-mgmt-storage/apiview-properties.json b/sdk/storage/azure-mgmt-storage/apiview-properties.json index 68140f99ded6..b35656b47a11 100644 --- a/sdk/storage/azure-mgmt-storage/apiview-properties.json +++ b/sdk/storage/azure-mgmt-storage/apiview-properties.json @@ -1,495 +1,550 @@ { - "CrossLanguagePackageId": null, + "CrossLanguagePackageId": "Microsoft.Storage", "CrossLanguageDefinitionId": { - "azure.mgmt.storage.models.AccessPolicy": null, - "azure.mgmt.storage.models.AccountImmutabilityPolicyProperties": null, - "azure.mgmt.storage.models.AccountLimits": null, - "azure.mgmt.storage.models.AccountSasParameters": null, - "azure.mgmt.storage.models.AccountUsage": null, - "azure.mgmt.storage.models.AccountUsageElements": null, - "azure.mgmt.storage.models.ActiveDirectoryProperties": null, - "azure.mgmt.storage.models.Resource": null, - "azure.mgmt.storage.models.AzureEntityResource": null, - "azure.mgmt.storage.models.AzureFilesIdentityBasedAuthentication": null, - "azure.mgmt.storage.models.BlobContainer": null, - "azure.mgmt.storage.models.BlobInventoryCreationTime": null, - "azure.mgmt.storage.models.BlobInventoryPolicy": null, - "azure.mgmt.storage.models.BlobInventoryPolicyDefinition": null, - "azure.mgmt.storage.models.BlobInventoryPolicyFilter": null, - "azure.mgmt.storage.models.BlobInventoryPolicyRule": null, - "azure.mgmt.storage.models.BlobInventoryPolicySchema": null, - "azure.mgmt.storage.models.BlobRestoreParameters": null, - "azure.mgmt.storage.models.BlobRestoreRange": null, - "azure.mgmt.storage.models.BlobRestoreStatus": null, - "azure.mgmt.storage.models.BlobServiceItems": null, - "azure.mgmt.storage.models.BlobServiceProperties": null, - "azure.mgmt.storage.models.BurstingConstants": null, - "azure.mgmt.storage.models.ChangeFeed": null, - "azure.mgmt.storage.models.CheckNameAvailabilityResult": null, - "azure.mgmt.storage.models.CloudErrorBody": null, - "azure.mgmt.storage.models.CorsRule": null, - "azure.mgmt.storage.models.CorsRules": null, - "azure.mgmt.storage.models.CustomDomain": null, - "azure.mgmt.storage.models.DateAfterCreation": null, - "azure.mgmt.storage.models.DateAfterModification": null, - "azure.mgmt.storage.models.ProxyResource": null, - "azure.mgmt.storage.models.DeletedAccount": null, - "azure.mgmt.storage.models.DeletedAccountListResult": null, - "azure.mgmt.storage.models.DeletedShare": null, - "azure.mgmt.storage.models.DeleteRetentionPolicy": null, - "azure.mgmt.storage.models.Dimension": null, - "azure.mgmt.storage.models.DualStackEndpointPreference": null, - "azure.mgmt.storage.models.Encryption": null, - "azure.mgmt.storage.models.EncryptionIdentity": null, - "azure.mgmt.storage.models.EncryptionInTransit": null, - "azure.mgmt.storage.models.EncryptionScope": null, - "azure.mgmt.storage.models.EncryptionScopeKeyVaultProperties": null, - "azure.mgmt.storage.models.EncryptionScopeListResult": null, - "azure.mgmt.storage.models.EncryptionService": null, - "azure.mgmt.storage.models.EncryptionServices": null, - "azure.mgmt.storage.models.Endpoints": null, - "azure.mgmt.storage.models.ErrorAdditionalInfo": null, - "azure.mgmt.storage.models.ErrorDetail": null, - "azure.mgmt.storage.models.ErrorResponse": null, - "azure.mgmt.storage.models.ErrorResponseAutoGenerated": null, - "azure.mgmt.storage.models.ErrorResponseBody": null, - "azure.mgmt.storage.models.ExecutionTarget": null, - "azure.mgmt.storage.models.ExecutionTrigger": null, - "azure.mgmt.storage.models.ExecutionTriggerUpdate": null, - "azure.mgmt.storage.models.ExtendedLocation": null, - "azure.mgmt.storage.models.FileServiceItems": null, - "azure.mgmt.storage.models.FileServiceProperties": null, - "azure.mgmt.storage.models.FileServiceUsage": null, - "azure.mgmt.storage.models.FileServiceUsageProperties": null, - "azure.mgmt.storage.models.FileServiceUsages": null, - "azure.mgmt.storage.models.FileShare": null, - "azure.mgmt.storage.models.FileShareItem": null, - "azure.mgmt.storage.models.FileShareItems": null, - "azure.mgmt.storage.models.FileShareLimits": null, - "azure.mgmt.storage.models.FileSharePropertiesFileSharePaidBursting": null, - "azure.mgmt.storage.models.FileShareRecommendations": null, - "azure.mgmt.storage.models.GeoPriorityReplicationStatus": null, - "azure.mgmt.storage.models.GeoReplicationStats": null, - "azure.mgmt.storage.models.Identity": null, - "azure.mgmt.storage.models.ImmutabilityPolicy": null, - "azure.mgmt.storage.models.ImmutabilityPolicyProperties": null, - "azure.mgmt.storage.models.ImmutableStorageAccount": null, - "azure.mgmt.storage.models.ImmutableStorageWithVersioning": null, - "azure.mgmt.storage.models.IPRule": null, - "azure.mgmt.storage.models.KeyCreationTime": null, - "azure.mgmt.storage.models.KeyPolicy": null, - "azure.mgmt.storage.models.KeyVaultProperties": null, - "azure.mgmt.storage.models.LastAccessTimeTrackingPolicy": null, - "azure.mgmt.storage.models.LeaseContainerRequest": null, - "azure.mgmt.storage.models.LeaseContainerResponse": null, - "azure.mgmt.storage.models.LeaseShareRequest": null, - "azure.mgmt.storage.models.LeaseShareResponse": null, - "azure.mgmt.storage.models.LegalHold": null, - "azure.mgmt.storage.models.LegalHoldProperties": null, - "azure.mgmt.storage.models.ListAccountSasResponse": null, - "azure.mgmt.storage.models.ListBlobInventoryPolicy": null, - "azure.mgmt.storage.models.ListContainerItem": null, - "azure.mgmt.storage.models.ListContainerItems": null, - "azure.mgmt.storage.models.ListQueue": null, - "azure.mgmt.storage.models.ListQueueResource": null, - "azure.mgmt.storage.models.ListQueueServices": null, - "azure.mgmt.storage.models.ListServiceSasResponse": null, - "azure.mgmt.storage.models.ListTableResource": null, - "azure.mgmt.storage.models.ListTableServices": null, - "azure.mgmt.storage.models.LocalUser": null, - "azure.mgmt.storage.models.LocalUserKeys": null, - "azure.mgmt.storage.models.LocalUserRegeneratePasswordResult": null, - "azure.mgmt.storage.models.LocalUsers": null, - "azure.mgmt.storage.models.ManagementPolicy": null, - "azure.mgmt.storage.models.ManagementPolicyAction": null, - "azure.mgmt.storage.models.ManagementPolicyBaseBlob": null, - "azure.mgmt.storage.models.ManagementPolicyDefinition": null, - "azure.mgmt.storage.models.ManagementPolicyFilter": null, - "azure.mgmt.storage.models.ManagementPolicyRule": null, - "azure.mgmt.storage.models.ManagementPolicySchema": null, - "azure.mgmt.storage.models.ManagementPolicySnapShot": null, - "azure.mgmt.storage.models.ManagementPolicyVersion": null, - "azure.mgmt.storage.models.MetricSpecification": null, - "azure.mgmt.storage.models.Multichannel": null, - "azure.mgmt.storage.models.NetworkRuleSet": null, - "azure.mgmt.storage.models.NetworkSecurityPerimeter": null, - "azure.mgmt.storage.models.ResourceAutoGenerated": null, - "azure.mgmt.storage.models.ProxyResourceAutoGenerated": null, - "azure.mgmt.storage.models.NetworkSecurityPerimeterConfiguration": null, - "azure.mgmt.storage.models.NetworkSecurityPerimeterConfigurationList": null, - "azure.mgmt.storage.models.NetworkSecurityPerimeterConfigurationPropertiesProfile": null, - "azure.mgmt.storage.models.NetworkSecurityPerimeterConfigurationPropertiesResourceAssociation": null, - "azure.mgmt.storage.models.NfsSetting": null, - "azure.mgmt.storage.models.NspAccessRule": null, - "azure.mgmt.storage.models.NspAccessRuleProperties": null, - "azure.mgmt.storage.models.NspAccessRulePropertiesSubscriptionsItem": null, - "azure.mgmt.storage.models.ObjectReplicationPolicies": null, - "azure.mgmt.storage.models.ObjectReplicationPolicy": null, - "azure.mgmt.storage.models.ObjectReplicationPolicyFilter": null, - "azure.mgmt.storage.models.ObjectReplicationPolicyPropertiesMetrics": null, - "azure.mgmt.storage.models.ObjectReplicationPolicyPropertiesPriorityReplication": null, - "azure.mgmt.storage.models.ObjectReplicationPolicyRule": null, - "azure.mgmt.storage.models.Operation": null, - "azure.mgmt.storage.models.OperationDisplay": null, - "azure.mgmt.storage.models.OperationListResult": null, - "azure.mgmt.storage.models.PermissionScope": null, - "azure.mgmt.storage.models.Placement": null, - "azure.mgmt.storage.models.PrivateEndpoint": null, - "azure.mgmt.storage.models.PrivateEndpointConnection": null, - "azure.mgmt.storage.models.PrivateEndpointConnectionListResult": null, - "azure.mgmt.storage.models.PrivateLinkResource": null, - "azure.mgmt.storage.models.PrivateLinkResourceListResult": null, - "azure.mgmt.storage.models.PrivateLinkServiceConnectionState": null, - "azure.mgmt.storage.models.ProtectedAppendWritesHistory": null, - "azure.mgmt.storage.models.ProtocolSettings": null, - "azure.mgmt.storage.models.ProvisioningIssue": null, - "azure.mgmt.storage.models.ProvisioningIssueProperties": null, - "azure.mgmt.storage.models.QueueServiceProperties": null, - "azure.mgmt.storage.models.ResourceAccessRule": null, - "azure.mgmt.storage.models.RestorePolicyProperties": null, - "azure.mgmt.storage.models.Restriction": null, - "azure.mgmt.storage.models.RoutingPreference": null, - "azure.mgmt.storage.models.SasPolicy": null, - "azure.mgmt.storage.models.ServiceSasParameters": null, - "azure.mgmt.storage.models.ServiceSpecification": null, - "azure.mgmt.storage.models.SignedIdentifier": null, - "azure.mgmt.storage.models.Sku": null, - "azure.mgmt.storage.models.SKUCapability": null, - "azure.mgmt.storage.models.SkuInformation": null, - "azure.mgmt.storage.models.SkuInformationLocationInfoItem": null, - "azure.mgmt.storage.models.SmbOAuthSettings": null, - "azure.mgmt.storage.models.SmbSetting": null, - "azure.mgmt.storage.models.SshPublicKey": null, - "azure.mgmt.storage.models.TrackedResource": null, - "azure.mgmt.storage.models.StorageAccount": null, - "azure.mgmt.storage.models.StorageAccountCheckNameAvailabilityParameters": null, - "azure.mgmt.storage.models.StorageAccountCreateParameters": null, - "azure.mgmt.storage.models.StorageAccountInternetEndpoints": null, - "azure.mgmt.storage.models.StorageAccountIpv6Endpoints": null, - "azure.mgmt.storage.models.StorageAccountKey": null, - "azure.mgmt.storage.models.StorageAccountListKeysResult": null, - "azure.mgmt.storage.models.StorageAccountListResult": null, - "azure.mgmt.storage.models.StorageAccountMicrosoftEndpoints": null, - "azure.mgmt.storage.models.StorageAccountMigration": null, - "azure.mgmt.storage.models.StorageAccountRegenerateKeyParameters": null, - "azure.mgmt.storage.models.StorageAccountSkuConversionStatus": null, - "azure.mgmt.storage.models.StorageAccountUpdateParameters": null, - "azure.mgmt.storage.models.StorageQueue": null, - "azure.mgmt.storage.models.StorageSkuListResult": null, - "azure.mgmt.storage.models.StorageTaskAssignment": null, - "azure.mgmt.storage.models.StorageTaskAssignmentExecutionContext": null, - "azure.mgmt.storage.models.StorageTaskAssignmentProperties": null, - "azure.mgmt.storage.models.StorageTaskAssignmentReport": null, - "azure.mgmt.storage.models.StorageTaskAssignmentsList": null, - "azure.mgmt.storage.models.StorageTaskAssignmentUpdateExecutionContext": null, - "azure.mgmt.storage.models.StorageTaskAssignmentUpdateParameters": null, - "azure.mgmt.storage.models.StorageTaskAssignmentUpdateProperties": null, - "azure.mgmt.storage.models.StorageTaskAssignmentUpdateReport": null, - "azure.mgmt.storage.models.StorageTaskReportInstance": null, - "azure.mgmt.storage.models.StorageTaskReportProperties": null, - "azure.mgmt.storage.models.StorageTaskReportSummary": null, - "azure.mgmt.storage.models.SystemData": null, - "azure.mgmt.storage.models.Table": null, - "azure.mgmt.storage.models.TableAccessPolicy": null, - "azure.mgmt.storage.models.TableServiceProperties": null, - "azure.mgmt.storage.models.TableSignedIdentifier": null, - "azure.mgmt.storage.models.TagFilter": null, - "azure.mgmt.storage.models.TagProperty": null, - "azure.mgmt.storage.models.TriggerParameters": null, - "azure.mgmt.storage.models.TriggerParametersUpdate": null, - "azure.mgmt.storage.models.UpdateHistoryProperty": null, - "azure.mgmt.storage.models.Usage": null, - "azure.mgmt.storage.models.UsageListResult": null, - "azure.mgmt.storage.models.UsageName": null, - "azure.mgmt.storage.models.UserAssignedIdentity": null, - "azure.mgmt.storage.models.VirtualNetworkRule": null, - "azure.mgmt.storage.models.AllowedMethods": null, - "azure.mgmt.storage.models.SkuName": null, - "azure.mgmt.storage.models.SkuTier": null, - "azure.mgmt.storage.models.Name": null, - "azure.mgmt.storage.models.ImmutabilityPolicyUpdateType": null, - "azure.mgmt.storage.models.PublicAccess": null, - "azure.mgmt.storage.models.LeaseStatus": null, - "azure.mgmt.storage.models.LeaseState": null, - "azure.mgmt.storage.models.LeaseDuration": null, - "azure.mgmt.storage.models.ImmutabilityPolicyState": null, - "azure.mgmt.storage.models.MigrationState": null, - "azure.mgmt.storage.models.EnabledProtocols": null, - "azure.mgmt.storage.models.RootSquashType": null, - "azure.mgmt.storage.models.ShareAccessTier": null, - "azure.mgmt.storage.models.ReasonCode": null, - "azure.mgmt.storage.models.Kind": null, - "azure.mgmt.storage.models.State": null, - "azure.mgmt.storage.models.PrivateEndpointServiceConnectionStatus": null, - "azure.mgmt.storage.models.PrivateEndpointConnectionProvisioningState": null, - "azure.mgmt.storage.models.IdentityType": null, - "azure.mgmt.storage.models.ExtendedLocationTypes": null, - "azure.mgmt.storage.models.ZonePlacementPolicy": null, - "azure.mgmt.storage.models.ProvisioningState": null, - "azure.mgmt.storage.models.AccountStatus": null, - "azure.mgmt.storage.models.ExpirationAction": null, - "azure.mgmt.storage.models.KeyType": null, - "azure.mgmt.storage.models.KeySource": null, - "azure.mgmt.storage.models.AccessTier": null, - "azure.mgmt.storage.models.DirectoryServiceOptions": null, - "azure.mgmt.storage.models.AccountType": null, - "azure.mgmt.storage.models.DefaultSharePermission": null, - "azure.mgmt.storage.models.Bypass": null, - "azure.mgmt.storage.models.DefaultAction": null, - "azure.mgmt.storage.models.GeoReplicationStatus": null, - "azure.mgmt.storage.models.PostFailoverRedundancy": null, - "azure.mgmt.storage.models.PostPlannedFailoverRedundancy": null, - "azure.mgmt.storage.models.LargeFileSharesState": null, - "azure.mgmt.storage.models.RoutingChoice": null, - "azure.mgmt.storage.models.BlobRestoreProgressStatus": null, - "azure.mgmt.storage.models.MinimumTlsVersion": null, - "azure.mgmt.storage.models.PublicNetworkAccess": null, - "azure.mgmt.storage.models.AccountImmutabilityPolicyState": null, - "azure.mgmt.storage.models.AllowedCopyScope": null, - "azure.mgmt.storage.models.SkuConversionStatus": null, - "azure.mgmt.storage.models.DnsEndpointType": null, - "azure.mgmt.storage.models.KeyPermission": null, - "azure.mgmt.storage.models.UsageUnit": null, - "azure.mgmt.storage.models.RuleType": null, - "azure.mgmt.storage.models.Format": null, - "azure.mgmt.storage.models.Schedule": null, - "azure.mgmt.storage.models.ObjectType": null, - "azure.mgmt.storage.models.CreatedByType": null, - "azure.mgmt.storage.models.InventoryRuleType": null, - "azure.mgmt.storage.models.EncryptionScopeSource": null, - "azure.mgmt.storage.models.EncryptionScopeState": null, - "azure.mgmt.storage.models.IssueType": null, - "azure.mgmt.storage.models.Severity": null, - "azure.mgmt.storage.models.NspAccessRuleDirection": null, - "azure.mgmt.storage.models.NetworkSecurityPerimeterConfigurationProvisioningState": null, - "azure.mgmt.storage.models.ResourceAssociationAccessMode": null, - "azure.mgmt.storage.models.TriggerType": null, - "azure.mgmt.storage.models.IntervalUnit": null, - "azure.mgmt.storage.models.RunStatusEnum": null, - "azure.mgmt.storage.models.RunResult": null, - "azure.mgmt.storage.models.ListContainersInclude": null, - "azure.mgmt.storage.models.LeaseContainerRequestEnum": null, - "azure.mgmt.storage.models.LeaseShareAction": null, - "azure.mgmt.storage.models.Services": null, - "azure.mgmt.storage.models.SignedResourceTypes": null, - "azure.mgmt.storage.models.Permissions": null, - "azure.mgmt.storage.models.SignedResource": null, - "azure.mgmt.storage.models.MigrationStatus": null, - "azure.mgmt.storage.models.MigrationName": null, - "azure.mgmt.storage.models.ManagementPolicyName": null, - "azure.mgmt.storage.models.BlobInventoryPolicyName": null, - "azure.mgmt.storage.models.ListLocalUserIncludeParam": null, - "azure.mgmt.storage.models.ListEncryptionScopesInclude": null, - "azure.mgmt.storage.models.Reason": null, - "azure.mgmt.storage.models.HttpProtocol": null, - "azure.mgmt.storage.models.StorageAccountExpand": null, - "azure.mgmt.storage.operations.BlobServicesOperations.list": null, - "azure.mgmt.storage.aio.operations.BlobServicesOperations.list": null, - "azure.mgmt.storage.operations.BlobServicesOperations.set_service_properties": null, - "azure.mgmt.storage.aio.operations.BlobServicesOperations.set_service_properties": null, - "azure.mgmt.storage.operations.BlobServicesOperations.get_service_properties": null, - "azure.mgmt.storage.aio.operations.BlobServicesOperations.get_service_properties": null, - "azure.mgmt.storage.operations.BlobContainersOperations.list": null, - "azure.mgmt.storage.aio.operations.BlobContainersOperations.list": null, - "azure.mgmt.storage.operations.BlobContainersOperations.create": null, - "azure.mgmt.storage.aio.operations.BlobContainersOperations.create": null, - "azure.mgmt.storage.operations.BlobContainersOperations.update": null, - "azure.mgmt.storage.aio.operations.BlobContainersOperations.update": null, - "azure.mgmt.storage.operations.BlobContainersOperations.get": null, - "azure.mgmt.storage.aio.operations.BlobContainersOperations.get": null, - "azure.mgmt.storage.operations.BlobContainersOperations.delete": null, - "azure.mgmt.storage.aio.operations.BlobContainersOperations.delete": null, - "azure.mgmt.storage.operations.BlobContainersOperations.set_legal_hold": null, - "azure.mgmt.storage.aio.operations.BlobContainersOperations.set_legal_hold": null, - "azure.mgmt.storage.operations.BlobContainersOperations.clear_legal_hold": null, - "azure.mgmt.storage.aio.operations.BlobContainersOperations.clear_legal_hold": null, - "azure.mgmt.storage.operations.BlobContainersOperations.create_or_update_immutability_policy": null, - "azure.mgmt.storage.aio.operations.BlobContainersOperations.create_or_update_immutability_policy": null, - "azure.mgmt.storage.operations.BlobContainersOperations.get_immutability_policy": null, - "azure.mgmt.storage.aio.operations.BlobContainersOperations.get_immutability_policy": null, - "azure.mgmt.storage.operations.BlobContainersOperations.delete_immutability_policy": null, - "azure.mgmt.storage.aio.operations.BlobContainersOperations.delete_immutability_policy": null, - "azure.mgmt.storage.operations.BlobContainersOperations.lock_immutability_policy": null, - "azure.mgmt.storage.aio.operations.BlobContainersOperations.lock_immutability_policy": null, - "azure.mgmt.storage.operations.BlobContainersOperations.extend_immutability_policy": null, - "azure.mgmt.storage.aio.operations.BlobContainersOperations.extend_immutability_policy": null, - "azure.mgmt.storage.operations.BlobContainersOperations.lease": null, - "azure.mgmt.storage.aio.operations.BlobContainersOperations.lease": null, - "azure.mgmt.storage.operations.BlobContainersOperations.begin_object_level_worm": null, - "azure.mgmt.storage.aio.operations.BlobContainersOperations.begin_object_level_worm": null, - "azure.mgmt.storage.operations.FileServicesOperations.list": null, - "azure.mgmt.storage.aio.operations.FileServicesOperations.list": null, - "azure.mgmt.storage.operations.FileServicesOperations.set_service_properties": null, - "azure.mgmt.storage.aio.operations.FileServicesOperations.set_service_properties": null, - "azure.mgmt.storage.operations.FileServicesOperations.get_service_properties": null, - "azure.mgmt.storage.aio.operations.FileServicesOperations.get_service_properties": null, - "azure.mgmt.storage.operations.FileServicesOperations.list_service_usages": null, - "azure.mgmt.storage.aio.operations.FileServicesOperations.list_service_usages": null, - "azure.mgmt.storage.operations.FileServicesOperations.get_service_usage": null, - "azure.mgmt.storage.aio.operations.FileServicesOperations.get_service_usage": null, - "azure.mgmt.storage.operations.FileSharesOperations.list": null, - "azure.mgmt.storage.aio.operations.FileSharesOperations.list": null, - "azure.mgmt.storage.operations.FileSharesOperations.create": null, - "azure.mgmt.storage.aio.operations.FileSharesOperations.create": null, - "azure.mgmt.storage.operations.FileSharesOperations.update": null, - "azure.mgmt.storage.aio.operations.FileSharesOperations.update": null, - "azure.mgmt.storage.operations.FileSharesOperations.get": null, - "azure.mgmt.storage.aio.operations.FileSharesOperations.get": null, - "azure.mgmt.storage.operations.FileSharesOperations.delete": null, - "azure.mgmt.storage.aio.operations.FileSharesOperations.delete": null, - "azure.mgmt.storage.operations.FileSharesOperations.restore": null, - "azure.mgmt.storage.aio.operations.FileSharesOperations.restore": null, - "azure.mgmt.storage.operations.FileSharesOperations.lease": null, - "azure.mgmt.storage.aio.operations.FileSharesOperations.lease": null, - "azure.mgmt.storage.operations.QueueServicesOperations.list": null, - "azure.mgmt.storage.aio.operations.QueueServicesOperations.list": null, - "azure.mgmt.storage.operations.QueueServicesOperations.set_service_properties": null, - "azure.mgmt.storage.aio.operations.QueueServicesOperations.set_service_properties": null, - "azure.mgmt.storage.operations.QueueServicesOperations.get_service_properties": null, - "azure.mgmt.storage.aio.operations.QueueServicesOperations.get_service_properties": null, - "azure.mgmt.storage.operations.QueueOperations.create": null, - "azure.mgmt.storage.aio.operations.QueueOperations.create": null, - "azure.mgmt.storage.operations.QueueOperations.update": null, - "azure.mgmt.storage.aio.operations.QueueOperations.update": null, - "azure.mgmt.storage.operations.QueueOperations.get": null, - "azure.mgmt.storage.aio.operations.QueueOperations.get": null, - "azure.mgmt.storage.operations.QueueOperations.delete": null, - "azure.mgmt.storage.aio.operations.QueueOperations.delete": null, - "azure.mgmt.storage.operations.QueueOperations.list": null, - "azure.mgmt.storage.aio.operations.QueueOperations.list": null, - "azure.mgmt.storage.operations.Operations.list": null, - "azure.mgmt.storage.aio.operations.Operations.list": null, - "azure.mgmt.storage.operations.SkusOperations.list": null, - "azure.mgmt.storage.aio.operations.SkusOperations.list": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.check_name_availability": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.check_name_availability": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.begin_create": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.begin_create": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.delete": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.delete": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.get_properties": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.get_properties": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.update": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.update": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.list": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.list": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.list_by_resource_group": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.list_by_resource_group": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.list_keys": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.list_keys": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.regenerate_key": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.regenerate_key": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.list_account_sas": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.list_account_sas": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.list_service_sas": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.list_service_sas": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.begin_failover": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.begin_failover": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.begin_hierarchical_namespace_migration": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.begin_hierarchical_namespace_migration": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.begin_abort_hierarchical_namespace_migration": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.begin_abort_hierarchical_namespace_migration": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.begin_customer_initiated_migration": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.begin_customer_initiated_migration": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.get_customer_initiated_migration": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.get_customer_initiated_migration": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.begin_restore_blob_ranges": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.begin_restore_blob_ranges": null, - "azure.mgmt.storage.operations.StorageAccountsOperations.revoke_user_delegation_keys": null, - "azure.mgmt.storage.aio.operations.StorageAccountsOperations.revoke_user_delegation_keys": null, - "azure.mgmt.storage.operations.DeletedAccountsOperations.list": null, - "azure.mgmt.storage.aio.operations.DeletedAccountsOperations.list": null, - "azure.mgmt.storage.operations.DeletedAccountsOperations.get": null, - "azure.mgmt.storage.aio.operations.DeletedAccountsOperations.get": null, - "azure.mgmt.storage.operations.UsagesOperations.list_by_location": null, - "azure.mgmt.storage.aio.operations.UsagesOperations.list_by_location": null, - "azure.mgmt.storage.operations.ManagementPoliciesOperations.get": null, - "azure.mgmt.storage.aio.operations.ManagementPoliciesOperations.get": null, - "azure.mgmt.storage.operations.ManagementPoliciesOperations.create_or_update": null, - "azure.mgmt.storage.aio.operations.ManagementPoliciesOperations.create_or_update": null, - "azure.mgmt.storage.operations.ManagementPoliciesOperations.delete": null, - "azure.mgmt.storage.aio.operations.ManagementPoliciesOperations.delete": null, - "azure.mgmt.storage.operations.BlobInventoryPoliciesOperations.get": null, - "azure.mgmt.storage.aio.operations.BlobInventoryPoliciesOperations.get": null, - "azure.mgmt.storage.operations.BlobInventoryPoliciesOperations.create_or_update": null, - "azure.mgmt.storage.aio.operations.BlobInventoryPoliciesOperations.create_or_update": null, - "azure.mgmt.storage.operations.BlobInventoryPoliciesOperations.delete": null, - "azure.mgmt.storage.aio.operations.BlobInventoryPoliciesOperations.delete": null, - "azure.mgmt.storage.operations.BlobInventoryPoliciesOperations.list": null, - "azure.mgmt.storage.aio.operations.BlobInventoryPoliciesOperations.list": null, - "azure.mgmt.storage.operations.PrivateEndpointConnectionsOperations.list": null, - "azure.mgmt.storage.aio.operations.PrivateEndpointConnectionsOperations.list": null, - "azure.mgmt.storage.operations.PrivateEndpointConnectionsOperations.get": null, - "azure.mgmt.storage.aio.operations.PrivateEndpointConnectionsOperations.get": null, - "azure.mgmt.storage.operations.PrivateEndpointConnectionsOperations.put": null, - "azure.mgmt.storage.aio.operations.PrivateEndpointConnectionsOperations.put": null, - "azure.mgmt.storage.operations.PrivateEndpointConnectionsOperations.delete": null, - "azure.mgmt.storage.aio.operations.PrivateEndpointConnectionsOperations.delete": null, - "azure.mgmt.storage.operations.PrivateLinkResourcesOperations.list_by_storage_account": null, - "azure.mgmt.storage.aio.operations.PrivateLinkResourcesOperations.list_by_storage_account": null, - "azure.mgmt.storage.operations.ObjectReplicationPoliciesOperations.list": null, - "azure.mgmt.storage.aio.operations.ObjectReplicationPoliciesOperations.list": null, - "azure.mgmt.storage.operations.ObjectReplicationPoliciesOperations.get": null, - "azure.mgmt.storage.aio.operations.ObjectReplicationPoliciesOperations.get": null, - "azure.mgmt.storage.operations.ObjectReplicationPoliciesOperations.create_or_update": null, - "azure.mgmt.storage.aio.operations.ObjectReplicationPoliciesOperations.create_or_update": null, - "azure.mgmt.storage.operations.ObjectReplicationPoliciesOperations.delete": null, - "azure.mgmt.storage.aio.operations.ObjectReplicationPoliciesOperations.delete": null, - "azure.mgmt.storage.operations.LocalUsersOperations.list": null, - "azure.mgmt.storage.aio.operations.LocalUsersOperations.list": null, - "azure.mgmt.storage.operations.LocalUsersOperations.get": null, - "azure.mgmt.storage.aio.operations.LocalUsersOperations.get": null, - "azure.mgmt.storage.operations.LocalUsersOperations.create_or_update": null, - "azure.mgmt.storage.aio.operations.LocalUsersOperations.create_or_update": null, - "azure.mgmt.storage.operations.LocalUsersOperations.delete": null, - "azure.mgmt.storage.aio.operations.LocalUsersOperations.delete": null, - "azure.mgmt.storage.operations.LocalUsersOperations.list_keys": null, - "azure.mgmt.storage.aio.operations.LocalUsersOperations.list_keys": null, - "azure.mgmt.storage.operations.LocalUsersOperations.regenerate_password": null, - "azure.mgmt.storage.aio.operations.LocalUsersOperations.regenerate_password": null, - "azure.mgmt.storage.operations.EncryptionScopesOperations.put": null, - "azure.mgmt.storage.aio.operations.EncryptionScopesOperations.put": null, - "azure.mgmt.storage.operations.EncryptionScopesOperations.patch": null, - "azure.mgmt.storage.aio.operations.EncryptionScopesOperations.patch": null, - "azure.mgmt.storage.operations.EncryptionScopesOperations.get": null, - "azure.mgmt.storage.aio.operations.EncryptionScopesOperations.get": null, - "azure.mgmt.storage.operations.EncryptionScopesOperations.list": null, - "azure.mgmt.storage.aio.operations.EncryptionScopesOperations.list": null, - "azure.mgmt.storage.operations.TableServicesOperations.list": null, - "azure.mgmt.storage.aio.operations.TableServicesOperations.list": null, - "azure.mgmt.storage.operations.TableServicesOperations.set_service_properties": null, - "azure.mgmt.storage.aio.operations.TableServicesOperations.set_service_properties": null, - "azure.mgmt.storage.operations.TableServicesOperations.get_service_properties": null, - "azure.mgmt.storage.aio.operations.TableServicesOperations.get_service_properties": null, - "azure.mgmt.storage.operations.TableOperations.create": null, - "azure.mgmt.storage.aio.operations.TableOperations.create": null, - "azure.mgmt.storage.operations.TableOperations.update": null, - "azure.mgmt.storage.aio.operations.TableOperations.update": null, - "azure.mgmt.storage.operations.TableOperations.get": null, - "azure.mgmt.storage.aio.operations.TableOperations.get": null, - "azure.mgmt.storage.operations.TableOperations.delete": null, - "azure.mgmt.storage.aio.operations.TableOperations.delete": null, - "azure.mgmt.storage.operations.TableOperations.list": null, - "azure.mgmt.storage.aio.operations.TableOperations.list": null, - "azure.mgmt.storage.operations.NetworkSecurityPerimeterConfigurationsOperations.list": null, - "azure.mgmt.storage.aio.operations.NetworkSecurityPerimeterConfigurationsOperations.list": null, - "azure.mgmt.storage.operations.NetworkSecurityPerimeterConfigurationsOperations.get": null, - "azure.mgmt.storage.aio.operations.NetworkSecurityPerimeterConfigurationsOperations.get": null, - "azure.mgmt.storage.operations.NetworkSecurityPerimeterConfigurationsOperations.begin_reconcile": null, - "azure.mgmt.storage.aio.operations.NetworkSecurityPerimeterConfigurationsOperations.begin_reconcile": null, - "azure.mgmt.storage.operations.StorageTaskAssignmentsOperations.begin_create": null, - "azure.mgmt.storage.aio.operations.StorageTaskAssignmentsOperations.begin_create": null, - "azure.mgmt.storage.operations.StorageTaskAssignmentsOperations.begin_update": null, - "azure.mgmt.storage.aio.operations.StorageTaskAssignmentsOperations.begin_update": null, - "azure.mgmt.storage.operations.StorageTaskAssignmentsOperations.get": null, - "azure.mgmt.storage.aio.operations.StorageTaskAssignmentsOperations.get": null, - "azure.mgmt.storage.operations.StorageTaskAssignmentsOperations.begin_delete": null, - "azure.mgmt.storage.aio.operations.StorageTaskAssignmentsOperations.begin_delete": null, - "azure.mgmt.storage.operations.StorageTaskAssignmentsOperations.list": null, - "azure.mgmt.storage.aio.operations.StorageTaskAssignmentsOperations.list": null, - "azure.mgmt.storage.operations.StorageTaskAssignmentsInstancesReportOperations.list": null, - "azure.mgmt.storage.aio.operations.StorageTaskAssignmentsInstancesReportOperations.list": null, - "azure.mgmt.storage.operations.StorageTaskAssignmentInstancesReportOperations.list": null, - "azure.mgmt.storage.aio.operations.StorageTaskAssignmentInstancesReportOperations.list": null + "azure.mgmt.storage.models.AccessPolicy": "Microsoft.Storage.AccessPolicy", + "azure.mgmt.storage.models.AccountImmutabilityPolicyProperties": "Microsoft.Storage.AccountImmutabilityPolicyProperties", + "azure.mgmt.storage.models.AccountLimits": "Microsoft.Storage.AccountLimits", + "azure.mgmt.storage.models.AccountSasParameters": "Microsoft.Storage.AccountSasParameters", + "azure.mgmt.storage.models.AccountUsage": "Microsoft.Storage.AccountUsage", + "azure.mgmt.storage.models.AccountUsageElements": "Microsoft.Storage.AccountUsageElements", + "azure.mgmt.storage.models.ActiveDirectoryProperties": "Microsoft.Storage.ActiveDirectoryProperties", + "azure.mgmt.storage.models.ResourceAutoGenerated": "Azure.ResourceManager.CommonTypes.Resource", + "azure.mgmt.storage.models.AzureEntityResource": "Microsoft.Storage.AzureEntityResource", + "azure.mgmt.storage.models.AzureFilesIdentityBasedAuthentication": "Microsoft.Storage.AzureFilesIdentityBasedAuthentication", + "azure.mgmt.storage.models.ProxyResource": "Azure.ResourceManager.CommonTypes.ProxyResource", + "azure.mgmt.storage.models.BlobContainer": "Microsoft.Storage.BlobContainer", + "azure.mgmt.storage.models.BlobInventoryCreationTime": "Microsoft.Storage.BlobInventoryCreationTime", + "azure.mgmt.storage.models.BlobInventoryPolicy": "Microsoft.Storage.BlobInventoryPolicy", + "azure.mgmt.storage.models.BlobInventoryPolicyDefinition": "Microsoft.Storage.BlobInventoryPolicyDefinition", + "azure.mgmt.storage.models.BlobInventoryPolicyFilter": "Microsoft.Storage.BlobInventoryPolicyFilter", + "azure.mgmt.storage.models.BlobInventoryPolicyProperties": "Microsoft.Storage.BlobInventoryPolicyProperties", + "azure.mgmt.storage.models.BlobInventoryPolicyRule": "Microsoft.Storage.BlobInventoryPolicyRule", + "azure.mgmt.storage.models.BlobInventoryPolicySchema": "Microsoft.Storage.BlobInventoryPolicySchema", + "azure.mgmt.storage.models.BlobRestoreParameters": "Microsoft.Storage.BlobRestoreParameters", + "azure.mgmt.storage.models.BlobRestoreRange": "Microsoft.Storage.BlobRestoreRange", + "azure.mgmt.storage.models.BlobRestoreStatus": "Microsoft.Storage.BlobRestoreStatus", + "azure.mgmt.storage.models.BlobServiceProperties": "Microsoft.Storage.BlobServiceProperties", + "azure.mgmt.storage.models.BlobServicePropertiesProperties": "Microsoft.Storage.BlobServicePropertiesProperties", + "azure.mgmt.storage.models.BurstingConstants": "Microsoft.Storage.BurstingConstants", + "azure.mgmt.storage.models.ChangeFeed": "Microsoft.Storage.ChangeFeed", + "azure.mgmt.storage.models.CheckNameAvailabilityResult": "Microsoft.Storage.CheckNameAvailabilityResult", + "azure.mgmt.storage.models.CloudError": "Microsoft.Storage.CloudError", + "azure.mgmt.storage.models.CloudErrorBody": "Microsoft.Storage.CloudErrorBody", + "azure.mgmt.storage.models.TrackedResource": "Azure.ResourceManager.CommonTypes.TrackedResource", + "azure.mgmt.storage.models.Connector": "Microsoft.Storage.Connector", + "azure.mgmt.storage.models.ContainerProperties": "Microsoft.Storage.ContainerProperties", + "azure.mgmt.storage.models.CorsRule": "Microsoft.Storage.CorsRule", + "azure.mgmt.storage.models.CorsRules": "Microsoft.Storage.CorsRules", + "azure.mgmt.storage.models.CustomDomain": "Microsoft.Storage.CustomDomain", + "azure.mgmt.storage.models.DataShare": "Microsoft.Storage.DataShare", + "azure.mgmt.storage.models.StorageConnectorConnection": "Microsoft.Storage.StorageConnectorConnection", + "azure.mgmt.storage.models.DataShareConnection": "Microsoft.Storage.DataShareConnection", + "azure.mgmt.storage.models.StorageConnectorSource": "Microsoft.Storage.StorageConnectorSource", + "azure.mgmt.storage.models.DataShareSource": "Microsoft.Storage.DataShareSource", + "azure.mgmt.storage.models.DateAfterCreation": "Microsoft.Storage.DateAfterCreation", + "azure.mgmt.storage.models.DateAfterModification": "Microsoft.Storage.DateAfterModification", + "azure.mgmt.storage.models.DeletedAccount": "Microsoft.Storage.DeletedAccount", + "azure.mgmt.storage.models.DeletedAccountProperties": "Microsoft.Storage.DeletedAccountProperties", + "azure.mgmt.storage.models.DeletedShare": "Microsoft.Storage.DeletedShare", + "azure.mgmt.storage.models.DeleteRetentionPolicy": "Microsoft.Storage.DeleteRetentionPolicy", + "azure.mgmt.storage.models.Dimension": "Microsoft.Storage.Dimension", + "azure.mgmt.storage.models.DualStackEndpointPreference": "Microsoft.Storage.DualStackEndpointPreference", + "azure.mgmt.storage.models.Encryption": "Microsoft.Storage.Encryption", + "azure.mgmt.storage.models.EncryptionIdentity": "Microsoft.Storage.EncryptionIdentity", + "azure.mgmt.storage.models.EncryptionInTransit": "Microsoft.Storage.EncryptionInTransit", + "azure.mgmt.storage.models.EncryptionScope": "Microsoft.Storage.EncryptionScope", + "azure.mgmt.storage.models.EncryptionScopeKeyVaultProperties": "Microsoft.Storage.EncryptionScopeKeyVaultProperties", + "azure.mgmt.storage.models.EncryptionScopeProperties": "Microsoft.Storage.EncryptionScopeProperties", + "azure.mgmt.storage.models.EncryptionService": "Microsoft.Storage.EncryptionService", + "azure.mgmt.storage.models.EncryptionServices": "Microsoft.Storage.EncryptionServices", + "azure.mgmt.storage.models.Endpoints": "Microsoft.Storage.Endpoints", + "azure.mgmt.storage.models.ErrorAdditionalInfo": "Azure.ResourceManager.CommonTypes.ErrorAdditionalInfo", + "azure.mgmt.storage.models.ErrorDetail": "Azure.ResourceManager.CommonTypes.ErrorDetail", + "azure.mgmt.storage.models.ErrorResponse": "Microsoft.Storage.ErrorResponse", + "azure.mgmt.storage.models.ErrorResponseAutoGenerated": "Azure.ResourceManager.CommonTypes.ErrorResponse", + "azure.mgmt.storage.models.ErrorResponseBody": "Microsoft.Storage.ErrorResponseBody", + "azure.mgmt.storage.models.ExecutionTarget": "Microsoft.Storage.ExecutionTarget", + "azure.mgmt.storage.models.ExecutionTrigger": "Microsoft.Storage.ExecutionTrigger", + "azure.mgmt.storage.models.ExecutionTriggerUpdate": "Microsoft.Storage.ExecutionTriggerUpdate", + "azure.mgmt.storage.models.ExtendedLocation": "Microsoft.Storage.ExtendedLocation", + "azure.mgmt.storage.models.FileServiceItems": "Microsoft.Storage.FileServiceItems", + "azure.mgmt.storage.models.FileServiceProperties": "Microsoft.Storage.FileServiceProperties", + "azure.mgmt.storage.models.FileServicePropertiesProperties": "Microsoft.Storage.FileServicePropertiesProperties", + "azure.mgmt.storage.models.FileServiceUsage": "Microsoft.Storage.FileServiceUsage", + "azure.mgmt.storage.models.FileServiceUsageProperties": "Microsoft.Storage.FileServiceUsageProperties", + "azure.mgmt.storage.models.FileShare": "Microsoft.Storage.FileShare", + "azure.mgmt.storage.models.FileShareItem": "Microsoft.Storage.FileShareItem", + "azure.mgmt.storage.models.FileShareLimits": "Microsoft.Storage.FileShareLimits", + "azure.mgmt.storage.models.FileShareProperties": "Microsoft.Storage.FileShareProperties", + "azure.mgmt.storage.models.FileSharePropertiesFileSharePaidBursting": "Microsoft.Storage.FileSharePropertiesFileSharePaidBursting", + "azure.mgmt.storage.models.FileShareRecommendations": "Microsoft.Storage.FileShareRecommendations", + "azure.mgmt.storage.models.GeoPriorityReplicationStatus": "Microsoft.Storage.GeoPriorityReplicationStatus", + "azure.mgmt.storage.models.GeoReplicationStats": "Microsoft.Storage.GeoReplicationStats", + "azure.mgmt.storage.models.Identity": "Microsoft.Storage.Identity", + "azure.mgmt.storage.models.ImmutabilityPolicy": "Microsoft.Storage.ImmutabilityPolicy", + "azure.mgmt.storage.models.ImmutabilityPolicyProperties": "Microsoft.Storage.ImmutabilityPolicyProperties", + "azure.mgmt.storage.models.ImmutabilityPolicyProperty": "Microsoft.Storage.ImmutabilityPolicyProperty", + "azure.mgmt.storage.models.ImmutableStorageAccount": "Microsoft.Storage.ImmutableStorageAccount", + "azure.mgmt.storage.models.ImmutableStorageWithVersioning": "Microsoft.Storage.ImmutableStorageWithVersioning", + "azure.mgmt.storage.models.IPRule": "Microsoft.Storage.IPRule", + "azure.mgmt.storage.models.KeyCreationTime": "Microsoft.Storage.KeyCreationTime", + "azure.mgmt.storage.models.KeyPolicy": "Microsoft.Storage.KeyPolicy", + "azure.mgmt.storage.models.KeyVaultProperties": "Microsoft.Storage.KeyVaultProperties", + "azure.mgmt.storage.models.LastAccessTimeTrackingPolicy": "Microsoft.Storage.LastAccessTimeTrackingPolicy", + "azure.mgmt.storage.models.LeaseContainerRequest": "Microsoft.Storage.LeaseContainerRequest", + "azure.mgmt.storage.models.LeaseContainerResponse": "Microsoft.Storage.LeaseContainerResponse", + "azure.mgmt.storage.models.LeaseShareRequest": "Microsoft.Storage.LeaseShareRequest", + "azure.mgmt.storage.models.LeaseShareResponse": "Microsoft.Storage.LeaseShareResponse", + "azure.mgmt.storage.models.LegalHold": "Microsoft.Storage.LegalHold", + "azure.mgmt.storage.models.LegalHoldProperties": "Microsoft.Storage.LegalHoldProperties", + "azure.mgmt.storage.models.ListAccountSasResponse": "Microsoft.Storage.ListAccountSasResponse", + "azure.mgmt.storage.models.ListContainerItem": "Microsoft.Storage.ListContainerItem", + "azure.mgmt.storage.models.ListQueue": "Microsoft.Storage.ListQueue", + "azure.mgmt.storage.models.ListQueueProperties": "Microsoft.Storage.ListQueueProperties", + "azure.mgmt.storage.models.ListQueueServices": "Microsoft.Storage.ListQueueServices", + "azure.mgmt.storage.models.ListServiceSasResponse": "Microsoft.Storage.ListServiceSasResponse", + "azure.mgmt.storage.models.ListTableServices": "Microsoft.Storage.ListTableServices", + "azure.mgmt.storage.models.LocalUser": "Microsoft.Storage.LocalUser", + "azure.mgmt.storage.models.LocalUserKeys": "Microsoft.Storage.LocalUserKeys", + "azure.mgmt.storage.models.LocalUserProperties": "Microsoft.Storage.LocalUserProperties", + "azure.mgmt.storage.models.LocalUserRegeneratePasswordResult": "Microsoft.Storage.LocalUserRegeneratePasswordResult", + "azure.mgmt.storage.models.StorageConnectorAuthProperties": "Microsoft.Storage.StorageConnectorAuthProperties", + "azure.mgmt.storage.models.ManagedIdentityAuthProperties": "Microsoft.Storage.ManagedIdentityAuthProperties", + "azure.mgmt.storage.models.ManagementPolicy": "Microsoft.Storage.ManagementPolicy", + "azure.mgmt.storage.models.ManagementPolicyAction": "Microsoft.Storage.ManagementPolicyAction", + "azure.mgmt.storage.models.ManagementPolicyBaseBlob": "Microsoft.Storage.ManagementPolicyBaseBlob", + "azure.mgmt.storage.models.ManagementPolicyDefinition": "Microsoft.Storage.ManagementPolicyDefinition", + "azure.mgmt.storage.models.ManagementPolicyFilter": "Microsoft.Storage.ManagementPolicyFilter", + "azure.mgmt.storage.models.ManagementPolicyProperties": "Microsoft.Storage.ManagementPolicyProperties", + "azure.mgmt.storage.models.ManagementPolicyRule": "Microsoft.Storage.ManagementPolicyRule", + "azure.mgmt.storage.models.ManagementPolicySchema": "Microsoft.Storage.ManagementPolicySchema", + "azure.mgmt.storage.models.ManagementPolicySnapShot": "Microsoft.Storage.ManagementPolicySnapShot", + "azure.mgmt.storage.models.ManagementPolicyVersion": "Microsoft.Storage.ManagementPolicyVersion", + "azure.mgmt.storage.models.MetricSpecification": "Microsoft.Storage.MetricSpecification", + "azure.mgmt.storage.models.Multichannel": "Microsoft.Storage.Multichannel", + "azure.mgmt.storage.models.NetworkRuleSet": "Microsoft.Storage.NetworkRuleSet", + "azure.mgmt.storage.models.NetworkSecurityPerimeter": "Microsoft.Storage.NetworkSecurityPerimeter", + "azure.mgmt.storage.models.NetworkSecurityPerimeterConfiguration": "Microsoft.Storage.NetworkSecurityPerimeterConfiguration", + "azure.mgmt.storage.models.NetworkSecurityPerimeterConfigurationProperties": "Microsoft.Storage.NetworkSecurityPerimeterConfigurationProperties", + "azure.mgmt.storage.models.NetworkSecurityPerimeterConfigurationPropertiesProfile": "Microsoft.Storage.NetworkSecurityPerimeterConfigurationPropertiesProfile", + "azure.mgmt.storage.models.NetworkSecurityPerimeterConfigurationPropertiesResourceAssociation": "Microsoft.Storage.NetworkSecurityPerimeterConfigurationPropertiesResourceAssociation", + "azure.mgmt.storage.models.NfsSetting": "Microsoft.Storage.NfsSetting", + "azure.mgmt.storage.models.NspAccessRule": "Microsoft.Storage.NspAccessRule", + "azure.mgmt.storage.models.NspAccessRuleProperties": "Microsoft.Storage.NspAccessRuleProperties", + "azure.mgmt.storage.models.NspAccessRulePropertiesSubscriptionsItem": "Microsoft.Storage.NspAccessRulePropertiesSubscriptionsItem", + "azure.mgmt.storage.models.ObjectReplicationPolicy": "Microsoft.Storage.ObjectReplicationPolicy", + "azure.mgmt.storage.models.ObjectReplicationPolicyFilter": "Microsoft.Storage.ObjectReplicationPolicyFilter", + "azure.mgmt.storage.models.ObjectReplicationPolicyProperties": "Microsoft.Storage.ObjectReplicationPolicyProperties", + "azure.mgmt.storage.models.ObjectReplicationPolicyPropertiesMetrics": "Microsoft.Storage.ObjectReplicationPolicyPropertiesMetrics", + "azure.mgmt.storage.models.ObjectReplicationPolicyPropertiesPriorityReplication": "Microsoft.Storage.ObjectReplicationPolicyPropertiesPriorityReplication", + "azure.mgmt.storage.models.ObjectReplicationPolicyPropertiesTagsReplication": "Microsoft.Storage.ObjectReplicationPolicyPropertiesTagsReplication", + "azure.mgmt.storage.models.ObjectReplicationPolicyRule": "Microsoft.Storage.ObjectReplicationPolicyRule", + "azure.mgmt.storage.models.Operation": "Microsoft.Storage.Operation", + "azure.mgmt.storage.models.OperationDisplay": "Microsoft.Storage.OperationDisplay", + "azure.mgmt.storage.models.OperationProperties": "Microsoft.Storage.OperationProperties", + "azure.mgmt.storage.models.PermissionScope": "Microsoft.Storage.PermissionScope", + "azure.mgmt.storage.models.Placement": "Microsoft.Storage.Placement", + "azure.mgmt.storage.models.PrivateEndpoint": "Microsoft.Storage.PrivateEndpoint", + "azure.mgmt.storage.models.PrivateEndpointConnection": "Microsoft.Storage.PrivateEndpointConnection", + "azure.mgmt.storage.models.PrivateEndpointConnectionProperties": "Microsoft.Storage.PrivateEndpointConnectionProperties", + "azure.mgmt.storage.models.PrivateLinkResource": "Microsoft.Storage.PrivateLinkResource", + "azure.mgmt.storage.models.PrivateLinkResourceListResult": "Microsoft.Storage.PrivateLinkResourceListResult", + "azure.mgmt.storage.models.PrivateLinkResourceProperties": "Microsoft.Storage.PrivateLinkResourceProperties", + "azure.mgmt.storage.models.PrivateLinkServiceConnectionState": "Microsoft.Storage.PrivateLinkServiceConnectionState", + "azure.mgmt.storage.models.ProtectedAppendWritesHistory": "Microsoft.Storage.ProtectedAppendWritesHistory", + "azure.mgmt.storage.models.ProtocolSettings": "Microsoft.Storage.ProtocolSettings", + "azure.mgmt.storage.models.ProvisioningIssue": "Microsoft.Storage.ProvisioningIssue", + "azure.mgmt.storage.models.ProvisioningIssueProperties": "Microsoft.Storage.ProvisioningIssueProperties", + "azure.mgmt.storage.models.QueueProperties": "Microsoft.Storage.QueueProperties", + "azure.mgmt.storage.models.QueueServiceProperties": "Microsoft.Storage.QueueServiceProperties", + "azure.mgmt.storage.models.QueueServicePropertiesProperties": "Microsoft.Storage.QueueServicePropertiesProperties", + "azure.mgmt.storage.models.ResourceAccessRule": "Microsoft.Storage.ResourceAccessRule", + "azure.mgmt.storage.models.RestorePolicyProperties": "Microsoft.Storage.RestorePolicyProperties", + "azure.mgmt.storage.models.Restriction": "Microsoft.Storage.Restriction", + "azure.mgmt.storage.models.RoutingPreference": "Microsoft.Storage.RoutingPreference", + "azure.mgmt.storage.models.SasPolicy": "Microsoft.Storage.SasPolicy", + "azure.mgmt.storage.models.ServiceSasParameters": "Microsoft.Storage.ServiceSasParameters", + "azure.mgmt.storage.models.ServiceSharedKeyAccessProperties": "Microsoft.Storage.ServiceSharedKeyAccessProperties", + "azure.mgmt.storage.models.ServiceSpecification": "Microsoft.Storage.ServiceSpecification", + "azure.mgmt.storage.models.SignedIdentifier": "Microsoft.Storage.SignedIdentifier", + "azure.mgmt.storage.models.Sku": "Microsoft.Storage.Sku", + "azure.mgmt.storage.models.SKUCapability": "Microsoft.Storage.SKUCapability", + "azure.mgmt.storage.models.SkuInformation": "Microsoft.Storage.SkuInformation", + "azure.mgmt.storage.models.SkuInformationLocationInfoItem": "Microsoft.Storage.SkuInformationLocationInfoItem", + "azure.mgmt.storage.models.SmbOAuthSettings": "Microsoft.Storage.SmbOAuthSettings", + "azure.mgmt.storage.models.SmbSetting": "Microsoft.Storage.SmbSetting", + "azure.mgmt.storage.models.SshPublicKey": "Microsoft.Storage.SshPublicKey", + "azure.mgmt.storage.models.StaticWebsite": "Microsoft.Storage.StaticWebsite", + "azure.mgmt.storage.models.StorageAccount": "Microsoft.Storage.StorageAccount", + "azure.mgmt.storage.models.StorageAccountCheckNameAvailabilityParameters": "Microsoft.Storage.StorageAccountCheckNameAvailabilityParameters", + "azure.mgmt.storage.models.StorageAccountCreateParameters": "Microsoft.Storage.StorageAccountCreateParameters", + "azure.mgmt.storage.models.StorageAccountInternetEndpoints": "Microsoft.Storage.StorageAccountInternetEndpoints", + "azure.mgmt.storage.models.StorageAccountIpv6Endpoints": "Microsoft.Storage.StorageAccountIpv6Endpoints", + "azure.mgmt.storage.models.StorageAccountKey": "Microsoft.Storage.StorageAccountKey", + "azure.mgmt.storage.models.StorageAccountListKeysResult": "Microsoft.Storage.StorageAccountListKeysResult", + "azure.mgmt.storage.models.StorageAccountMicrosoftEndpoints": "Microsoft.Storage.StorageAccountMicrosoftEndpoints", + "azure.mgmt.storage.models.StorageAccountMigration": "Microsoft.Storage.StorageAccountMigration", + "azure.mgmt.storage.models.StorageAccountMigrationProperties": "Microsoft.Storage.StorageAccountMigrationProperties", + "azure.mgmt.storage.models.StorageAccountProperties": "Microsoft.Storage.StorageAccountProperties", + "azure.mgmt.storage.models.StorageAccountPropertiesCreateParameters": "Microsoft.Storage.StorageAccountPropertiesCreateParameters", + "azure.mgmt.storage.models.StorageAccountPropertiesUpdateParameters": "Microsoft.Storage.StorageAccountPropertiesUpdateParameters", + "azure.mgmt.storage.models.StorageAccountRegenerateKeyParameters": "Microsoft.Storage.StorageAccountRegenerateKeyParameters", + "azure.mgmt.storage.models.StorageAccountSharedKeyAccessProperties": "Microsoft.Storage.StorageAccountSharedKeyAccessProperties", + "azure.mgmt.storage.models.StorageAccountSkuConversionStatus": "Microsoft.Storage.StorageAccountSkuConversionStatus", + "azure.mgmt.storage.models.StorageAccountUpdateParameters": "Microsoft.Storage.StorageAccountUpdateParameters", + "azure.mgmt.storage.models.StorageConnectorProperties": "Microsoft.Storage.StorageConnectorProperties", + "azure.mgmt.storage.models.StorageDataCollaborationPolicyProperties": "Microsoft.Storage.StorageDataCollaborationPolicyProperties", + "azure.mgmt.storage.models.StorageDataShareAccessPolicy": "Microsoft.Storage.StorageDataShareAccessPolicy", + "azure.mgmt.storage.models.StorageDataShareAsset": "Microsoft.Storage.StorageDataShareAsset", + "azure.mgmt.storage.models.StorageDataShareProperties": "Microsoft.Storage.StorageDataShareProperties", + "azure.mgmt.storage.models.StorageQueue": "Microsoft.Storage.StorageQueue", + "azure.mgmt.storage.models.StorageTaskAssignment": "Microsoft.Storage.StorageTaskAssignment", + "azure.mgmt.storage.models.StorageTaskAssignmentExecutionContext": "Microsoft.Storage.StorageTaskAssignmentExecutionContext", + "azure.mgmt.storage.models.StorageTaskAssignmentProperties": "Microsoft.Storage.StorageTaskAssignmentProperties", + "azure.mgmt.storage.models.StorageTaskAssignmentReport": "Microsoft.Storage.StorageTaskAssignmentReport", + "azure.mgmt.storage.models.StorageTaskAssignmentUpdateExecutionContext": "Microsoft.Storage.StorageTaskAssignmentUpdateExecutionContext", + "azure.mgmt.storage.models.StorageTaskAssignmentUpdateParameters": "Microsoft.Storage.StorageTaskAssignmentUpdateParameters", + "azure.mgmt.storage.models.StorageTaskAssignmentUpdateProperties": "Microsoft.Storage.StorageTaskAssignmentUpdateProperties", + "azure.mgmt.storage.models.StorageTaskAssignmentUpdateReport": "Microsoft.Storage.StorageTaskAssignmentUpdateReport", + "azure.mgmt.storage.models.StorageTaskReportInstance": "Microsoft.Storage.StorageTaskReportInstance", + "azure.mgmt.storage.models.StorageTaskReportProperties": "Microsoft.Storage.StorageTaskReportProperties", + "azure.mgmt.storage.models.SystemData": "Azure.ResourceManager.CommonTypes.SystemData", + "azure.mgmt.storage.models.Table": "Microsoft.Storage.Table", + "azure.mgmt.storage.models.TableAccessPolicy": "Microsoft.Storage.TableAccessPolicy", + "azure.mgmt.storage.models.TableProperties": "Microsoft.Storage.TableProperties", + "azure.mgmt.storage.models.TableServiceProperties": "Microsoft.Storage.TableServiceProperties", + "azure.mgmt.storage.models.TableServicePropertiesProperties": "Microsoft.Storage.TableServicePropertiesProperties", + "azure.mgmt.storage.models.TableSignedIdentifier": "Microsoft.Storage.TableSignedIdentifier", + "azure.mgmt.storage.models.TagFilter": "Microsoft.Storage.TagFilter", + "azure.mgmt.storage.models.TagProperty": "Microsoft.Storage.TagProperty", + "azure.mgmt.storage.models.TestConnectionResponse": "Microsoft.Storage.TestConnectionResponse", + "azure.mgmt.storage.models.TestExistingConnectionRequest": "Microsoft.Storage.TestExistingConnectionRequest", + "azure.mgmt.storage.models.TriggerParameters": "Microsoft.Storage.TriggerParameters", + "azure.mgmt.storage.models.TriggerParametersUpdate": "Microsoft.Storage.TriggerParametersUpdate", + "azure.mgmt.storage.models.UpdateHistoryProperty": "Microsoft.Storage.UpdateHistoryProperty", + "azure.mgmt.storage.models.Usage": "Microsoft.Storage.Usage", + "azure.mgmt.storage.models.UsageName": "Microsoft.Storage.UsageName", + "azure.mgmt.storage.models.UserAssignedIdentity": "Microsoft.Storage.UserAssignedIdentity", + "azure.mgmt.storage.models.VirtualNetworkRule": "Microsoft.Storage.VirtualNetworkRule", + "azure.mgmt.storage.models.CreatedByType": "Azure.ResourceManager.CommonTypes.createdByType", + "azure.mgmt.storage.models.PublicAccess": "Microsoft.Storage.PublicAccess", + "azure.mgmt.storage.models.LeaseStatus": "Microsoft.Storage.LeaseStatus", + "azure.mgmt.storage.models.LeaseState": "Microsoft.Storage.LeaseState", + "azure.mgmt.storage.models.LeaseDuration": "Microsoft.Storage.LeaseDuration", + "azure.mgmt.storage.models.ImmutabilityPolicyState": "Microsoft.Storage.ImmutabilityPolicyState", + "azure.mgmt.storage.models.ImmutabilityPolicyUpdateType": "Microsoft.Storage.ImmutabilityPolicyUpdateType", + "azure.mgmt.storage.models.MigrationState": "Microsoft.Storage.MigrationState", + "azure.mgmt.storage.models.LeaseContainerRequestAction": "Microsoft.Storage.LeaseContainerRequestAction", + "azure.mgmt.storage.models.ListContainersInclude": "Microsoft.Storage.ListContainersInclude", + "azure.mgmt.storage.models.AllowedMethods": "Microsoft.Storage.AllowedMethods", + "azure.mgmt.storage.models.Name": "Microsoft.Storage.Name", + "azure.mgmt.storage.models.SkuName": "Microsoft.Storage.SkuName", + "azure.mgmt.storage.models.SkuTier": "Microsoft.Storage.SkuTier", + "azure.mgmt.storage.models.Reason": "Microsoft.Storage.Reason", + "azure.mgmt.storage.models.ProvisioningState": "Microsoft.Storage.ProvisioningState", + "azure.mgmt.storage.models.AccountStatus": "Microsoft.Storage.AccountStatus", + "azure.mgmt.storage.models.ExpirationAction": "Microsoft.Storage.ExpirationAction", + "azure.mgmt.storage.models.KeyType": "Microsoft.Storage.KeyType", + "azure.mgmt.storage.models.KeySource": "Microsoft.Storage.KeySource", + "azure.mgmt.storage.models.AccessTier": "Microsoft.Storage.AccessTier", + "azure.mgmt.storage.models.DirectoryServiceOptions": "Microsoft.Storage.DirectoryServiceOptions", + "azure.mgmt.storage.models.AccountType": "Microsoft.Storage.AccountType", + "azure.mgmt.storage.models.DefaultSharePermission": "Microsoft.Storage.DefaultSharePermission", + "azure.mgmt.storage.models.Bypass": "Microsoft.Storage.Bypass", + "azure.mgmt.storage.models.State": "Microsoft.Storage.State", + "azure.mgmt.storage.models.DefaultAction": "Microsoft.Storage.DefaultAction", + "azure.mgmt.storage.models.GeoReplicationStatus": "Microsoft.Storage.GeoReplicationStatus", + "azure.mgmt.storage.models.PostFailoverRedundancy": "Microsoft.Storage.PostFailoverRedundancy", + "azure.mgmt.storage.models.PostPlannedFailoverRedundancy": "Microsoft.Storage.PostPlannedFailoverRedundancy", + "azure.mgmt.storage.models.LargeFileSharesState": "Microsoft.Storage.LargeFileSharesState", + "azure.mgmt.storage.models.PrivateEndpointServiceConnectionStatus": "Microsoft.Storage.PrivateEndpointServiceConnectionStatus", + "azure.mgmt.storage.models.PrivateEndpointConnectionProvisioningState": "Microsoft.Storage.PrivateEndpointConnectionProvisioningState", + "azure.mgmt.storage.models.RoutingChoice": "Microsoft.Storage.RoutingChoice", + "azure.mgmt.storage.models.BlobRestoreProgressStatus": "Microsoft.Storage.BlobRestoreProgressStatus", + "azure.mgmt.storage.models.MinimumTlsVersion": "Microsoft.Storage.MinimumTlsVersion", + "azure.mgmt.storage.models.PublicNetworkAccess": "Microsoft.Storage.PublicNetworkAccess", + "azure.mgmt.storage.models.AccountImmutabilityPolicyState": "Microsoft.Storage.AccountImmutabilityPolicyState", + "azure.mgmt.storage.models.AllowedCopyScope": "Microsoft.Storage.AllowedCopyScope", + "azure.mgmt.storage.models.SkuConversionStatus": "Microsoft.Storage.SkuConversionStatus", + "azure.mgmt.storage.models.DnsEndpointType": "Microsoft.Storage.DnsEndpointType", + "azure.mgmt.storage.models.Kind": "Microsoft.Storage.Kind", + "azure.mgmt.storage.models.IdentityType": "Microsoft.Storage.IdentityType", + "azure.mgmt.storage.models.ExtendedLocationTypes": "Microsoft.Storage.ExtendedLocationTypes", + "azure.mgmt.storage.models.ZonePlacementPolicy": "Microsoft.Storage.ZonePlacementPolicy", + "azure.mgmt.storage.models.StorageAccountExpand": "Microsoft.Storage.StorageAccountExpand", + "azure.mgmt.storage.models.KeyPermission": "Microsoft.Storage.KeyPermission", + "azure.mgmt.storage.models.Services": "Microsoft.Storage.Services", + "azure.mgmt.storage.models.SignedResourceTypes": "Microsoft.Storage.SignedResourceTypes", + "azure.mgmt.storage.models.Permissions": "Microsoft.Storage.Permissions", + "azure.mgmt.storage.models.HttpProtocol": "Microsoft.Storage.HttpProtocol", + "azure.mgmt.storage.models.SignedResource": "Microsoft.Storage.SignedResource", + "azure.mgmt.storage.models.MigrationStatus": "Microsoft.Storage.migrationStatus", + "azure.mgmt.storage.models.MigrationName": "Microsoft.Storage.MigrationName", + "azure.mgmt.storage.models.EnabledProtocols": "Microsoft.Storage.EnabledProtocols", + "azure.mgmt.storage.models.RootSquashType": "Microsoft.Storage.RootSquashType", + "azure.mgmt.storage.models.ShareAccessTier": "Microsoft.Storage.ShareAccessTier", + "azure.mgmt.storage.models.LeaseShareAction": "Microsoft.Storage.LeaseShareAction", + "azure.mgmt.storage.models.RuleType": "Microsoft.Storage.RuleType", + "azure.mgmt.storage.models.ManagementPolicyName": "Microsoft.Storage.ManagementPolicyName", + "azure.mgmt.storage.models.InventoryRuleType": "Microsoft.Storage.InventoryRuleType", + "azure.mgmt.storage.models.Format": "Microsoft.Storage.Format", + "azure.mgmt.storage.models.Schedule": "Microsoft.Storage.Schedule", + "azure.mgmt.storage.models.ObjectType": "Microsoft.Storage.ObjectType", + "azure.mgmt.storage.models.BlobInventoryPolicyName": "Microsoft.Storage.BlobInventoryPolicyName", + "azure.mgmt.storage.models.EncryptionScopeSource": "Microsoft.Storage.EncryptionScopeSource", + "azure.mgmt.storage.models.EncryptionScopeState": "Microsoft.Storage.EncryptionScopeState", + "azure.mgmt.storage.models.ListEncryptionScopesInclude": "Microsoft.Storage.ListEncryptionScopesInclude", + "azure.mgmt.storage.models.NetworkSecurityPerimeterConfigurationProvisioningState": "Microsoft.Storage.NetworkSecurityPerimeterConfigurationProvisioningState", + "azure.mgmt.storage.models.IssueType": "Microsoft.Storage.IssueType", + "azure.mgmt.storage.models.Severity": "Microsoft.Storage.Severity", + "azure.mgmt.storage.models.ResourceAssociationAccessMode": "Microsoft.Storage.ResourceAssociationAccessMode", + "azure.mgmt.storage.models.NspAccessRuleDirection": "Microsoft.Storage.NspAccessRuleDirection", + "azure.mgmt.storage.models.TriggerType": "Microsoft.Storage.TriggerType", + "azure.mgmt.storage.models.IntervalUnit": "Microsoft.Storage.IntervalUnit", + "azure.mgmt.storage.models.StorageTaskAssignmentProvisioningState": "Microsoft.Storage.StorageTaskAssignmentProvisioningState", + "azure.mgmt.storage.models.RunStatusEnum": "Microsoft.Storage.RunStatusEnum", + "azure.mgmt.storage.models.RunResult": "Microsoft.Storage.RunResult", + "azure.mgmt.storage.models.StorageConnectorState": "Microsoft.Storage.StorageConnectorState", + "azure.mgmt.storage.models.StorageConnectorDataSourceType": "Microsoft.Storage.StorageConnectorDataSourceType", + "azure.mgmt.storage.models.StorageConnectorSourceType": "Microsoft.Storage.StorageConnectorSourceType", + "azure.mgmt.storage.models.StorageConnectorConnectionType": "Microsoft.Storage.StorageConnectorConnectionType", + "azure.mgmt.storage.models.StorageConnectorAuthType": "Microsoft.Storage.StorageConnectorAuthType", + "azure.mgmt.storage.models.NativeDataSharingProvisioningState": "Microsoft.Storage.NativeDataSharingProvisioningState", + "azure.mgmt.storage.models.StorageDataShareAccessPolicyPermission": "Microsoft.Storage.StorageDataShareAccessPolicyPermission", + "azure.mgmt.storage.models.ListLocalUserIncludeParam": "Microsoft.Storage.ListLocalUserIncludeParam", + "azure.mgmt.storage.models.ReasonCode": "Microsoft.Storage.ReasonCode", + "azure.mgmt.storage.models.UsageUnit": "Microsoft.Storage.UsageUnit", + "azure.mgmt.storage.operations.Operations.list": "Azure.ResourceManager.Legacy.Operations.list", + "azure.mgmt.storage.aio.operations.Operations.list": "Azure.ResourceManager.Legacy.Operations.list", + "azure.mgmt.storage.operations.BlobContainersOperations.get": "Microsoft.Storage.BlobContainers.get", + "azure.mgmt.storage.aio.operations.BlobContainersOperations.get": "Microsoft.Storage.BlobContainers.get", + "azure.mgmt.storage.operations.BlobContainersOperations.create": "Microsoft.Storage.BlobContainers.create", + "azure.mgmt.storage.aio.operations.BlobContainersOperations.create": "Microsoft.Storage.BlobContainers.create", + "azure.mgmt.storage.operations.BlobContainersOperations.update": "Microsoft.Storage.BlobContainers.update", + "azure.mgmt.storage.aio.operations.BlobContainersOperations.update": "Microsoft.Storage.BlobContainers.update", + "azure.mgmt.storage.operations.BlobContainersOperations.delete": "Microsoft.Storage.BlobContainers.delete", + "azure.mgmt.storage.aio.operations.BlobContainersOperations.delete": "Microsoft.Storage.BlobContainers.delete", + "azure.mgmt.storage.operations.BlobContainersOperations.set_legal_hold": "Microsoft.Storage.BlobContainers.setLegalHold", + "azure.mgmt.storage.aio.operations.BlobContainersOperations.set_legal_hold": "Microsoft.Storage.BlobContainers.setLegalHold", + "azure.mgmt.storage.operations.BlobContainersOperations.clear_legal_hold": "Microsoft.Storage.BlobContainers.clearLegalHold", + "azure.mgmt.storage.aio.operations.BlobContainersOperations.clear_legal_hold": "Microsoft.Storage.BlobContainers.clearLegalHold", + "azure.mgmt.storage.operations.BlobContainersOperations.lease": "Microsoft.Storage.BlobContainers.lease", + "azure.mgmt.storage.aio.operations.BlobContainersOperations.lease": "Microsoft.Storage.BlobContainers.lease", + "azure.mgmt.storage.operations.BlobContainersOperations.begin_object_level_worm": "Microsoft.Storage.BlobContainers.objectLevelWorm", + "azure.mgmt.storage.aio.operations.BlobContainersOperations.begin_object_level_worm": "Microsoft.Storage.BlobContainers.objectLevelWorm", + "azure.mgmt.storage.operations.BlobContainersOperations.list": "Microsoft.Storage.BlobServices.blobContainersList", + "azure.mgmt.storage.aio.operations.BlobContainersOperations.list": "Microsoft.Storage.BlobServices.blobContainersList", + "azure.mgmt.storage.operations.BlobContainersOperations.get_immutability_policy": "Microsoft.Storage.ImmutabilityPolicies.getImmutabilityPolicy", + "azure.mgmt.storage.aio.operations.BlobContainersOperations.get_immutability_policy": "Microsoft.Storage.ImmutabilityPolicies.getImmutabilityPolicy", + "azure.mgmt.storage.operations.BlobContainersOperations.create_or_update_immutability_policy": "Microsoft.Storage.ImmutabilityPolicies.createOrUpdateImmutabilityPolicy", + "azure.mgmt.storage.aio.operations.BlobContainersOperations.create_or_update_immutability_policy": "Microsoft.Storage.ImmutabilityPolicies.createOrUpdateImmutabilityPolicy", + "azure.mgmt.storage.operations.BlobContainersOperations.delete_immutability_policy": "Microsoft.Storage.ImmutabilityPolicies.deleteImmutabilityPolicy", + "azure.mgmt.storage.aio.operations.BlobContainersOperations.delete_immutability_policy": "Microsoft.Storage.ImmutabilityPolicies.deleteImmutabilityPolicy", + "azure.mgmt.storage.operations.BlobContainersOperations.lock_immutability_policy": "Microsoft.Storage.ImmutabilityPolicies.lockImmutabilityPolicy", + "azure.mgmt.storage.aio.operations.BlobContainersOperations.lock_immutability_policy": "Microsoft.Storage.ImmutabilityPolicies.lockImmutabilityPolicy", + "azure.mgmt.storage.operations.BlobContainersOperations.extend_immutability_policy": "Microsoft.Storage.ImmutabilityPolicies.extendImmutabilityPolicy", + "azure.mgmt.storage.aio.operations.BlobContainersOperations.extend_immutability_policy": "Microsoft.Storage.ImmutabilityPolicies.extendImmutabilityPolicy", + "azure.mgmt.storage.operations.BlobServicesOperations.get_service_properties": "Microsoft.Storage.BlobServices.getServiceProperties", + "azure.mgmt.storage.aio.operations.BlobServicesOperations.get_service_properties": "Microsoft.Storage.BlobServices.getServiceProperties", + "azure.mgmt.storage.operations.BlobServicesOperations.set_service_properties": "Microsoft.Storage.BlobServices.setServiceProperties", + "azure.mgmt.storage.aio.operations.BlobServicesOperations.set_service_properties": "Microsoft.Storage.BlobServices.setServiceProperties", + "azure.mgmt.storage.operations.BlobServicesOperations.list": "Microsoft.Storage.BlobServices.list", + "azure.mgmt.storage.aio.operations.BlobServicesOperations.list": "Microsoft.Storage.BlobServices.list", + "azure.mgmt.storage.operations.StorageAccountsOperations.check_name_availability": "Microsoft.Storage.StorageAccounts.checkNameAvailability", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.check_name_availability": "Microsoft.Storage.StorageAccounts.checkNameAvailability", + "azure.mgmt.storage.operations.StorageAccountsOperations.get_properties": "Microsoft.Storage.StorageAccounts.getProperties", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.get_properties": "Microsoft.Storage.StorageAccounts.getProperties", + "azure.mgmt.storage.operations.StorageAccountsOperations.begin_create": "Microsoft.Storage.StorageAccounts.create", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.begin_create": "Microsoft.Storage.StorageAccounts.create", + "azure.mgmt.storage.operations.StorageAccountsOperations.update": "Microsoft.Storage.StorageAccounts.update", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.update": "Microsoft.Storage.StorageAccounts.update", + "azure.mgmt.storage.operations.StorageAccountsOperations.delete": "Microsoft.Storage.StorageAccounts.delete", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.delete": "Microsoft.Storage.StorageAccounts.delete", + "azure.mgmt.storage.operations.StorageAccountsOperations.list_by_resource_group": "Microsoft.Storage.StorageAccounts.listByResourceGroup", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.list_by_resource_group": "Microsoft.Storage.StorageAccounts.listByResourceGroup", + "azure.mgmt.storage.operations.StorageAccountsOperations.list": "Microsoft.Storage.StorageAccounts.list", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.list": "Microsoft.Storage.StorageAccounts.list", + "azure.mgmt.storage.operations.StorageAccountsOperations.list_keys": "Microsoft.Storage.StorageAccounts.listKeys", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.list_keys": "Microsoft.Storage.StorageAccounts.listKeys", + "azure.mgmt.storage.operations.StorageAccountsOperations.regenerate_key": "Microsoft.Storage.StorageAccounts.regenerateKey", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.regenerate_key": "Microsoft.Storage.StorageAccounts.regenerateKey", + "azure.mgmt.storage.operations.StorageAccountsOperations.list_account_sas": "Microsoft.Storage.StorageAccounts.listAccountSAS", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.list_account_sas": "Microsoft.Storage.StorageAccounts.listAccountSAS", + "azure.mgmt.storage.operations.StorageAccountsOperations.list_service_sas": "Microsoft.Storage.StorageAccounts.listServiceSAS", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.list_service_sas": "Microsoft.Storage.StorageAccounts.listServiceSAS", + "azure.mgmt.storage.operations.StorageAccountsOperations.begin_failover": "Microsoft.Storage.StorageAccounts.failover", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.begin_failover": "Microsoft.Storage.StorageAccounts.failover", + "azure.mgmt.storage.operations.StorageAccountsOperations.begin_hierarchical_namespace_migration": "Microsoft.Storage.StorageAccounts.hierarchicalNamespaceMigration", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.begin_hierarchical_namespace_migration": "Microsoft.Storage.StorageAccounts.hierarchicalNamespaceMigration", + "azure.mgmt.storage.operations.StorageAccountsOperations.begin_abort_hierarchical_namespace_migration": "Microsoft.Storage.StorageAccounts.abortHierarchicalNamespaceMigration", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.begin_abort_hierarchical_namespace_migration": "Microsoft.Storage.StorageAccounts.abortHierarchicalNamespaceMigration", + "azure.mgmt.storage.operations.StorageAccountsOperations.begin_customer_initiated_migration": "Microsoft.Storage.StorageAccounts.customerInitiatedMigration", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.begin_customer_initiated_migration": "Microsoft.Storage.StorageAccounts.customerInitiatedMigration", + "azure.mgmt.storage.operations.StorageAccountsOperations.begin_restore_blob_ranges": "Microsoft.Storage.StorageAccounts.restoreBlobRanges", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.begin_restore_blob_ranges": "Microsoft.Storage.StorageAccounts.restoreBlobRanges", + "azure.mgmt.storage.operations.StorageAccountsOperations.revoke_user_delegation_keys": "Microsoft.Storage.StorageAccounts.revokeUserDelegationKeys", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.revoke_user_delegation_keys": "Microsoft.Storage.StorageAccounts.revokeUserDelegationKeys", + "azure.mgmt.storage.operations.StorageAccountsOperations.get_customer_initiated_migration": "Microsoft.Storage.StorageAccountMigrations.getCustomerInitiatedMigration", + "azure.mgmt.storage.aio.operations.StorageAccountsOperations.get_customer_initiated_migration": "Microsoft.Storage.StorageAccountMigrations.getCustomerInitiatedMigration", + "azure.mgmt.storage.operations.FileSharesOperations.get": "Microsoft.Storage.FileShares.get", + "azure.mgmt.storage.aio.operations.FileSharesOperations.get": "Microsoft.Storage.FileShares.get", + "azure.mgmt.storage.operations.FileSharesOperations.create": "Microsoft.Storage.FileShares.create", + "azure.mgmt.storage.aio.operations.FileSharesOperations.create": "Microsoft.Storage.FileShares.create", + "azure.mgmt.storage.operations.FileSharesOperations.update": "Microsoft.Storage.FileShares.update", + "azure.mgmt.storage.aio.operations.FileSharesOperations.update": "Microsoft.Storage.FileShares.update", + "azure.mgmt.storage.operations.FileSharesOperations.delete": "Microsoft.Storage.FileShares.delete", + "azure.mgmt.storage.aio.operations.FileSharesOperations.delete": "Microsoft.Storage.FileShares.delete", + "azure.mgmt.storage.operations.FileSharesOperations.restore": "Microsoft.Storage.FileShares.restore", + "azure.mgmt.storage.aio.operations.FileSharesOperations.restore": "Microsoft.Storage.FileShares.restore", + "azure.mgmt.storage.operations.FileSharesOperations.lease": "Microsoft.Storage.FileShares.lease", + "azure.mgmt.storage.aio.operations.FileSharesOperations.lease": "Microsoft.Storage.FileShares.lease", + "azure.mgmt.storage.operations.FileSharesOperations.list": "Microsoft.Storage.FileServices.fileSharesList", + "azure.mgmt.storage.aio.operations.FileSharesOperations.list": "Microsoft.Storage.FileServices.fileSharesList", + "azure.mgmt.storage.operations.FileServicesOperations.get_service_properties": "Microsoft.Storage.FileServices.getServiceProperties", + "azure.mgmt.storage.aio.operations.FileServicesOperations.get_service_properties": "Microsoft.Storage.FileServices.getServiceProperties", + "azure.mgmt.storage.operations.FileServicesOperations.set_service_properties": "Microsoft.Storage.FileServices.setServiceProperties", + "azure.mgmt.storage.aio.operations.FileServicesOperations.set_service_properties": "Microsoft.Storage.FileServices.setServiceProperties", + "azure.mgmt.storage.operations.FileServicesOperations.list": "Microsoft.Storage.FileServices.list", + "azure.mgmt.storage.aio.operations.FileServicesOperations.list": "Microsoft.Storage.FileServices.list", + "azure.mgmt.storage.operations.FileServicesOperations.get_service_usage": "Microsoft.Storage.FileServiceUsageOperationGroup.getServiceUsage", + "azure.mgmt.storage.aio.operations.FileServicesOperations.get_service_usage": "Microsoft.Storage.FileServiceUsageOperationGroup.getServiceUsage", + "azure.mgmt.storage.operations.FileServicesOperations.list_service_usages": "Microsoft.Storage.FileServiceUsageOperationGroup.listServiceUsages", + "azure.mgmt.storage.aio.operations.FileServicesOperations.list_service_usages": "Microsoft.Storage.FileServiceUsageOperationGroup.listServiceUsages", + "azure.mgmt.storage.operations.QueueServicesOperations.get_service_properties": "Microsoft.Storage.QueueServices.getServiceProperties", + "azure.mgmt.storage.aio.operations.QueueServicesOperations.get_service_properties": "Microsoft.Storage.QueueServices.getServiceProperties", + "azure.mgmt.storage.operations.QueueServicesOperations.set_service_properties": "Microsoft.Storage.QueueServices.setServiceProperties", + "azure.mgmt.storage.aio.operations.QueueServicesOperations.set_service_properties": "Microsoft.Storage.QueueServices.setServiceProperties", + "azure.mgmt.storage.operations.QueueServicesOperations.list": "Microsoft.Storage.QueueServices.list", + "azure.mgmt.storage.aio.operations.QueueServicesOperations.list": "Microsoft.Storage.QueueServices.list", + "azure.mgmt.storage.operations.DeletedAccountsOperations.get": "Microsoft.Storage.DeletedAccounts.get", + "azure.mgmt.storage.aio.operations.DeletedAccountsOperations.get": "Microsoft.Storage.DeletedAccounts.get", + "azure.mgmt.storage.operations.DeletedAccountsOperations.list": "Microsoft.Storage.DeletedAccountsOperationGroup.list", + "azure.mgmt.storage.aio.operations.DeletedAccountsOperations.list": "Microsoft.Storage.DeletedAccountsOperationGroup.list", + "azure.mgmt.storage.operations.ManagementPoliciesOperations.get": "Microsoft.Storage.ManagementPolicies.get", + "azure.mgmt.storage.aio.operations.ManagementPoliciesOperations.get": "Microsoft.Storage.ManagementPolicies.get", + "azure.mgmt.storage.operations.ManagementPoliciesOperations.create_or_update": "Microsoft.Storage.ManagementPolicies.createOrUpdate", + "azure.mgmt.storage.aio.operations.ManagementPoliciesOperations.create_or_update": "Microsoft.Storage.ManagementPolicies.createOrUpdate", + "azure.mgmt.storage.operations.ManagementPoliciesOperations.delete": "Microsoft.Storage.ManagementPolicies.delete", + "azure.mgmt.storage.aio.operations.ManagementPoliciesOperations.delete": "Microsoft.Storage.ManagementPolicies.delete", + "azure.mgmt.storage.operations.BlobInventoryPoliciesOperations.get": "Microsoft.Storage.BlobInventoryPolicies.get", + "azure.mgmt.storage.aio.operations.BlobInventoryPoliciesOperations.get": "Microsoft.Storage.BlobInventoryPolicies.get", + "azure.mgmt.storage.operations.BlobInventoryPoliciesOperations.create_or_update": "Microsoft.Storage.BlobInventoryPolicies.createOrUpdate", + "azure.mgmt.storage.aio.operations.BlobInventoryPoliciesOperations.create_or_update": "Microsoft.Storage.BlobInventoryPolicies.createOrUpdate", + "azure.mgmt.storage.operations.BlobInventoryPoliciesOperations.delete": "Microsoft.Storage.BlobInventoryPolicies.delete", + "azure.mgmt.storage.aio.operations.BlobInventoryPoliciesOperations.delete": "Microsoft.Storage.BlobInventoryPolicies.delete", + "azure.mgmt.storage.operations.BlobInventoryPoliciesOperations.list": "Microsoft.Storage.BlobInventoryPolicies.list", + "azure.mgmt.storage.aio.operations.BlobInventoryPoliciesOperations.list": "Microsoft.Storage.BlobInventoryPolicies.list", + "azure.mgmt.storage.operations.PrivateEndpointConnectionsOperations.get": "Microsoft.Storage.PrivateEndpointConnections.get", + "azure.mgmt.storage.aio.operations.PrivateEndpointConnectionsOperations.get": "Microsoft.Storage.PrivateEndpointConnections.get", + "azure.mgmt.storage.operations.PrivateEndpointConnectionsOperations.put": "Microsoft.Storage.PrivateEndpointConnections.put", + "azure.mgmt.storage.aio.operations.PrivateEndpointConnectionsOperations.put": "Microsoft.Storage.PrivateEndpointConnections.put", + "azure.mgmt.storage.operations.PrivateEndpointConnectionsOperations.delete": "Microsoft.Storage.PrivateEndpointConnections.delete", + "azure.mgmt.storage.aio.operations.PrivateEndpointConnectionsOperations.delete": "Microsoft.Storage.PrivateEndpointConnections.delete", + "azure.mgmt.storage.operations.PrivateEndpointConnectionsOperations.list": "Microsoft.Storage.PrivateEndpointConnections.list", + "azure.mgmt.storage.aio.operations.PrivateEndpointConnectionsOperations.list": "Microsoft.Storage.PrivateEndpointConnections.list", + "azure.mgmt.storage.operations.EncryptionScopesOperations.get": "Microsoft.Storage.EncryptionScopes.get", + "azure.mgmt.storage.aio.operations.EncryptionScopesOperations.get": "Microsoft.Storage.EncryptionScopes.get", + "azure.mgmt.storage.operations.EncryptionScopesOperations.put": "Microsoft.Storage.EncryptionScopes.put", + "azure.mgmt.storage.aio.operations.EncryptionScopesOperations.put": "Microsoft.Storage.EncryptionScopes.put", + "azure.mgmt.storage.operations.EncryptionScopesOperations.patch": "Microsoft.Storage.EncryptionScopes.patch", + "azure.mgmt.storage.aio.operations.EncryptionScopesOperations.patch": "Microsoft.Storage.EncryptionScopes.patch", + "azure.mgmt.storage.operations.EncryptionScopesOperations.list": "Microsoft.Storage.EncryptionScopes.list", + "azure.mgmt.storage.aio.operations.EncryptionScopesOperations.list": "Microsoft.Storage.EncryptionScopes.list", + "azure.mgmt.storage.operations.TableServicesOperations.get_service_properties": "Microsoft.Storage.TableServices.getServiceProperties", + "azure.mgmt.storage.aio.operations.TableServicesOperations.get_service_properties": "Microsoft.Storage.TableServices.getServiceProperties", + "azure.mgmt.storage.operations.TableServicesOperations.set_service_properties": "Microsoft.Storage.TableServices.setServiceProperties", + "azure.mgmt.storage.aio.operations.TableServicesOperations.set_service_properties": "Microsoft.Storage.TableServices.setServiceProperties", + "azure.mgmt.storage.operations.TableServicesOperations.list": "Microsoft.Storage.TableServices.list", + "azure.mgmt.storage.aio.operations.TableServicesOperations.list": "Microsoft.Storage.TableServices.list", + "azure.mgmt.storage.operations.NetworkSecurityPerimeterConfigurationsOperations.get": "Microsoft.Storage.NetworkSecurityPerimeterConfigurations.get", + "azure.mgmt.storage.aio.operations.NetworkSecurityPerimeterConfigurationsOperations.get": "Microsoft.Storage.NetworkSecurityPerimeterConfigurations.get", + "azure.mgmt.storage.operations.NetworkSecurityPerimeterConfigurationsOperations.list": "Microsoft.Storage.NetworkSecurityPerimeterConfigurations.list", + "azure.mgmt.storage.aio.operations.NetworkSecurityPerimeterConfigurationsOperations.list": "Microsoft.Storage.NetworkSecurityPerimeterConfigurations.list", + "azure.mgmt.storage.operations.NetworkSecurityPerimeterConfigurationsOperations.begin_reconcile": "Microsoft.Storage.NetworkSecurityPerimeterConfigurations.reconcile", + "azure.mgmt.storage.aio.operations.NetworkSecurityPerimeterConfigurationsOperations.begin_reconcile": "Microsoft.Storage.NetworkSecurityPerimeterConfigurations.reconcile", + "azure.mgmt.storage.operations.StorageTaskAssignmentsOperations.get": "Microsoft.Storage.StorageTaskAssignments.get", + "azure.mgmt.storage.aio.operations.StorageTaskAssignmentsOperations.get": "Microsoft.Storage.StorageTaskAssignments.get", + "azure.mgmt.storage.operations.StorageTaskAssignmentsOperations.begin_create": "Microsoft.Storage.StorageTaskAssignments.create", + "azure.mgmt.storage.aio.operations.StorageTaskAssignmentsOperations.begin_create": "Microsoft.Storage.StorageTaskAssignments.create", + "azure.mgmt.storage.operations.StorageTaskAssignmentsOperations.begin_update": "Microsoft.Storage.StorageTaskAssignments.update", + "azure.mgmt.storage.aio.operations.StorageTaskAssignmentsOperations.begin_update": "Microsoft.Storage.StorageTaskAssignments.update", + "azure.mgmt.storage.operations.StorageTaskAssignmentsOperations.begin_delete": "Microsoft.Storage.StorageTaskAssignments.delete", + "azure.mgmt.storage.aio.operations.StorageTaskAssignmentsOperations.begin_delete": "Microsoft.Storage.StorageTaskAssignments.delete", + "azure.mgmt.storage.operations.StorageTaskAssignmentsOperations.list": "Microsoft.Storage.StorageTaskAssignments.list", + "azure.mgmt.storage.aio.operations.StorageTaskAssignmentsOperations.list": "Microsoft.Storage.StorageTaskAssignments.list", + "azure.mgmt.storage.operations.StorageTaskAssignmentsOperations.begin_stop_assignment": "Microsoft.Storage.StorageTaskAssignments.stopAssignment", + "azure.mgmt.storage.aio.operations.StorageTaskAssignmentsOperations.begin_stop_assignment": "Microsoft.Storage.StorageTaskAssignments.stopAssignment", + "azure.mgmt.storage.operations.ConnectorsOperations.get": "Microsoft.Storage.Connectors.get", + "azure.mgmt.storage.aio.operations.ConnectorsOperations.get": "Microsoft.Storage.Connectors.get", + "azure.mgmt.storage.operations.ConnectorsOperations.begin_create": "Microsoft.Storage.Connectors.create", + "azure.mgmt.storage.aio.operations.ConnectorsOperations.begin_create": "Microsoft.Storage.Connectors.create", + "azure.mgmt.storage.operations.ConnectorsOperations.begin_update": "Microsoft.Storage.Connectors.update", + "azure.mgmt.storage.aio.operations.ConnectorsOperations.begin_update": "Microsoft.Storage.Connectors.update", + "azure.mgmt.storage.operations.ConnectorsOperations.begin_delete": "Microsoft.Storage.Connectors.delete", + "azure.mgmt.storage.aio.operations.ConnectorsOperations.begin_delete": "Microsoft.Storage.Connectors.delete", + "azure.mgmt.storage.operations.ConnectorsOperations.list_by_storage_account": "Microsoft.Storage.Connectors.listByStorageAccount", + "azure.mgmt.storage.aio.operations.ConnectorsOperations.list_by_storage_account": "Microsoft.Storage.Connectors.listByStorageAccount", + "azure.mgmt.storage.operations.ConnectorsOperations.begin_test_existing_connection": "Microsoft.Storage.Connectors.testExistingConnection", + "azure.mgmt.storage.aio.operations.ConnectorsOperations.begin_test_existing_connection": "Microsoft.Storage.Connectors.testExistingConnection", + "azure.mgmt.storage.operations.DataSharesOperations.get": "Microsoft.Storage.DataShares.get", + "azure.mgmt.storage.aio.operations.DataSharesOperations.get": "Microsoft.Storage.DataShares.get", + "azure.mgmt.storage.operations.DataSharesOperations.begin_create": "Microsoft.Storage.DataShares.create", + "azure.mgmt.storage.aio.operations.DataSharesOperations.begin_create": "Microsoft.Storage.DataShares.create", + "azure.mgmt.storage.operations.DataSharesOperations.begin_update": "Microsoft.Storage.DataShares.update", + "azure.mgmt.storage.aio.operations.DataSharesOperations.begin_update": "Microsoft.Storage.DataShares.update", + "azure.mgmt.storage.operations.DataSharesOperations.begin_delete": "Microsoft.Storage.DataShares.delete", + "azure.mgmt.storage.aio.operations.DataSharesOperations.begin_delete": "Microsoft.Storage.DataShares.delete", + "azure.mgmt.storage.operations.DataSharesOperations.list_by_storage_account": "Microsoft.Storage.DataShares.listByStorageAccount", + "azure.mgmt.storage.aio.operations.DataSharesOperations.list_by_storage_account": "Microsoft.Storage.DataShares.listByStorageAccount", + "azure.mgmt.storage.operations.PrivateLinkResourcesOperations.list_by_storage_account": "Microsoft.Storage.StorageAccounts.listByStorageAccount", + "azure.mgmt.storage.aio.operations.PrivateLinkResourcesOperations.list_by_storage_account": "Microsoft.Storage.StorageAccounts.listByStorageAccount", + "azure.mgmt.storage.operations.StorageTaskAssignmentsInstancesReportOperations.list": "Microsoft.Storage.StorageAccounts.storageTaskAssignmentsInstancesReportList", + "azure.mgmt.storage.aio.operations.StorageTaskAssignmentsInstancesReportOperations.list": "Microsoft.Storage.StorageAccounts.storageTaskAssignmentsInstancesReportList", + "azure.mgmt.storage.operations.QueueOperations.list": "Microsoft.Storage.QueueServices.queueList", + "azure.mgmt.storage.aio.operations.QueueOperations.list": "Microsoft.Storage.QueueServices.queueList", + "azure.mgmt.storage.operations.QueueOperations.get": "Microsoft.Storage.StorageQueues.get", + "azure.mgmt.storage.aio.operations.QueueOperations.get": "Microsoft.Storage.StorageQueues.get", + "azure.mgmt.storage.operations.QueueOperations.create": "Microsoft.Storage.StorageQueues.create", + "azure.mgmt.storage.aio.operations.QueueOperations.create": "Microsoft.Storage.StorageQueues.create", + "azure.mgmt.storage.operations.QueueOperations.update": "Microsoft.Storage.StorageQueues.update", + "azure.mgmt.storage.aio.operations.QueueOperations.update": "Microsoft.Storage.StorageQueues.update", + "azure.mgmt.storage.operations.QueueOperations.delete": "Microsoft.Storage.StorageQueues.delete", + "azure.mgmt.storage.aio.operations.QueueOperations.delete": "Microsoft.Storage.StorageQueues.delete", + "azure.mgmt.storage.operations.ObjectReplicationPoliciesOperations.get": "Microsoft.Storage.ObjectReplicationPolicyOperationGroup.get", + "azure.mgmt.storage.aio.operations.ObjectReplicationPoliciesOperations.get": "Microsoft.Storage.ObjectReplicationPolicyOperationGroup.get", + "azure.mgmt.storage.operations.ObjectReplicationPoliciesOperations.create_or_update": "Microsoft.Storage.ObjectReplicationPolicyOperationGroup.createOrUpdate", + "azure.mgmt.storage.aio.operations.ObjectReplicationPoliciesOperations.create_or_update": "Microsoft.Storage.ObjectReplicationPolicyOperationGroup.createOrUpdate", + "azure.mgmt.storage.operations.ObjectReplicationPoliciesOperations.delete": "Microsoft.Storage.ObjectReplicationPolicyOperationGroup.delete", + "azure.mgmt.storage.aio.operations.ObjectReplicationPoliciesOperations.delete": "Microsoft.Storage.ObjectReplicationPolicyOperationGroup.delete", + "azure.mgmt.storage.operations.ObjectReplicationPoliciesOperations.list": "Microsoft.Storage.ObjectReplicationPolicyOperationGroup.list", + "azure.mgmt.storage.aio.operations.ObjectReplicationPoliciesOperations.list": "Microsoft.Storage.ObjectReplicationPolicyOperationGroup.list", + "azure.mgmt.storage.operations.LocalUsersOperations.get": "Microsoft.Storage.LocalUserOperationGroup.get", + "azure.mgmt.storage.aio.operations.LocalUsersOperations.get": "Microsoft.Storage.LocalUserOperationGroup.get", + "azure.mgmt.storage.operations.LocalUsersOperations.create_or_update": "Microsoft.Storage.LocalUserOperationGroup.createOrUpdate", + "azure.mgmt.storage.aio.operations.LocalUsersOperations.create_or_update": "Microsoft.Storage.LocalUserOperationGroup.createOrUpdate", + "azure.mgmt.storage.operations.LocalUsersOperations.delete": "Microsoft.Storage.LocalUserOperationGroup.delete", + "azure.mgmt.storage.aio.operations.LocalUsersOperations.delete": "Microsoft.Storage.LocalUserOperationGroup.delete", + "azure.mgmt.storage.operations.LocalUsersOperations.list": "Microsoft.Storage.LocalUserOperationGroup.list", + "azure.mgmt.storage.aio.operations.LocalUsersOperations.list": "Microsoft.Storage.LocalUserOperationGroup.list", + "azure.mgmt.storage.operations.LocalUsersOperations.list_keys": "Microsoft.Storage.LocalUserOperationGroup.listKeys", + "azure.mgmt.storage.aio.operations.LocalUsersOperations.list_keys": "Microsoft.Storage.LocalUserOperationGroup.listKeys", + "azure.mgmt.storage.operations.LocalUsersOperations.regenerate_password": "Microsoft.Storage.LocalUserOperationGroup.regeneratePassword", + "azure.mgmt.storage.aio.operations.LocalUsersOperations.regenerate_password": "Microsoft.Storage.LocalUserOperationGroup.regeneratePassword", + "azure.mgmt.storage.operations.TableOperations.get": "Microsoft.Storage.Tables.get", + "azure.mgmt.storage.aio.operations.TableOperations.get": "Microsoft.Storage.Tables.get", + "azure.mgmt.storage.operations.TableOperations.create": "Microsoft.Storage.Tables.create", + "azure.mgmt.storage.aio.operations.TableOperations.create": "Microsoft.Storage.Tables.create", + "azure.mgmt.storage.operations.TableOperations.update": "Microsoft.Storage.Tables.update", + "azure.mgmt.storage.aio.operations.TableOperations.update": "Microsoft.Storage.Tables.update", + "azure.mgmt.storage.operations.TableOperations.delete": "Microsoft.Storage.Tables.delete", + "azure.mgmt.storage.aio.operations.TableOperations.delete": "Microsoft.Storage.Tables.delete", + "azure.mgmt.storage.operations.TableOperations.list": "Microsoft.Storage.Tables.list", + "azure.mgmt.storage.aio.operations.TableOperations.list": "Microsoft.Storage.Tables.list", + "azure.mgmt.storage.operations.StorageTaskAssignmentInstancesReportOperations.list": "Microsoft.Storage.StorageTaskAssignments.storageTaskAssignmentInstancesReportList", + "azure.mgmt.storage.aio.operations.StorageTaskAssignmentInstancesReportOperations.list": "Microsoft.Storage.StorageTaskAssignments.storageTaskAssignmentInstancesReportList", + "azure.mgmt.storage.operations.SkusOperations.list": "Microsoft.Storage.SkusOperationGroup.list", + "azure.mgmt.storage.aio.operations.SkusOperations.list": "Microsoft.Storage.SkusOperationGroup.list", + "azure.mgmt.storage.operations.UsagesOperations.list_by_location": "Microsoft.Storage.UsagesOperationGroup.listByLocation", + "azure.mgmt.storage.aio.operations.UsagesOperations.list_by_location": "Microsoft.Storage.UsagesOperationGroup.listByLocation" } } \ No newline at end of file diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/__init__.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/__init__.py index 60980065e1ee..56046e1044e5 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/__init__.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/__init__.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pylint: disable=wrong-import-position @@ -12,7 +12,7 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._storage_management_client import StorageManagementClient # type: ignore +from ._client import StorageManagementClient # type: ignore from ._version import VERSION __version__ = VERSION diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_storage_management_client.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_client.py similarity index 88% rename from sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_storage_management_client.py rename to sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_client.py index a5f90537a85e..a11d635c9390 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_storage_management_client.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_client.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -17,13 +17,14 @@ from azure.mgmt.core.policies import ARMAutoResourceProviderRegistrationPolicy from azure.mgmt.core.tools import get_arm_endpoints -from . import models as _models from ._configuration import StorageManagementClientConfiguration from ._utils.serialization import Deserializer, Serializer from .operations import ( BlobContainersOperations, BlobInventoryPoliciesOperations, BlobServicesOperations, + ConnectorsOperations, + DataSharesOperations, DeletedAccountsOperations, EncryptionScopesOperations, FileServicesOperations, @@ -55,28 +56,22 @@ class StorageManagementClient: # pylint: disable=too-many-instance-attributes """The Azure Storage Management API. - :ivar blob_services: BlobServicesOperations operations - :vartype blob_services: azure.mgmt.storage.operations.BlobServicesOperations + :ivar operations: Operations operations + :vartype operations: azure.mgmt.storage.operations.Operations :ivar blob_containers: BlobContainersOperations operations :vartype blob_containers: azure.mgmt.storage.operations.BlobContainersOperations - :ivar file_services: FileServicesOperations operations - :vartype file_services: azure.mgmt.storage.operations.FileServicesOperations + :ivar blob_services: BlobServicesOperations operations + :vartype blob_services: azure.mgmt.storage.operations.BlobServicesOperations + :ivar storage_accounts: StorageAccountsOperations operations + :vartype storage_accounts: azure.mgmt.storage.operations.StorageAccountsOperations :ivar file_shares: FileSharesOperations operations :vartype file_shares: azure.mgmt.storage.operations.FileSharesOperations + :ivar file_services: FileServicesOperations operations + :vartype file_services: azure.mgmt.storage.operations.FileServicesOperations :ivar queue_services: QueueServicesOperations operations :vartype queue_services: azure.mgmt.storage.operations.QueueServicesOperations - :ivar queue: QueueOperations operations - :vartype queue: azure.mgmt.storage.operations.QueueOperations - :ivar operations: Operations operations - :vartype operations: azure.mgmt.storage.operations.Operations - :ivar skus: SkusOperations operations - :vartype skus: azure.mgmt.storage.operations.SkusOperations - :ivar storage_accounts: StorageAccountsOperations operations - :vartype storage_accounts: azure.mgmt.storage.operations.StorageAccountsOperations :ivar deleted_accounts: DeletedAccountsOperations operations :vartype deleted_accounts: azure.mgmt.storage.operations.DeletedAccountsOperations - :ivar usages: UsagesOperations operations - :vartype usages: azure.mgmt.storage.operations.UsagesOperations :ivar management_policies: ManagementPoliciesOperations operations :vartype management_policies: azure.mgmt.storage.operations.ManagementPoliciesOperations :ivar blob_inventory_policies: BlobInventoryPoliciesOperations operations @@ -84,19 +79,10 @@ class StorageManagementClient: # pylint: disable=too-many-instance-attributes :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations :vartype private_endpoint_connections: azure.mgmt.storage.operations.PrivateEndpointConnectionsOperations - :ivar private_link_resources: PrivateLinkResourcesOperations operations - :vartype private_link_resources: azure.mgmt.storage.operations.PrivateLinkResourcesOperations - :ivar object_replication_policies: ObjectReplicationPoliciesOperations operations - :vartype object_replication_policies: - azure.mgmt.storage.operations.ObjectReplicationPoliciesOperations - :ivar local_users: LocalUsersOperations operations - :vartype local_users: azure.mgmt.storage.operations.LocalUsersOperations :ivar encryption_scopes: EncryptionScopesOperations operations :vartype encryption_scopes: azure.mgmt.storage.operations.EncryptionScopesOperations :ivar table_services: TableServicesOperations operations :vartype table_services: azure.mgmt.storage.operations.TableServicesOperations - :ivar table: TableOperations operations - :vartype table: azure.mgmt.storage.operations.TableOperations :ivar network_security_perimeter_configurations: NetworkSecurityPerimeterConfigurationsOperations operations :vartype network_security_perimeter_configurations: @@ -104,25 +90,45 @@ class StorageManagementClient: # pylint: disable=too-many-instance-attributes :ivar storage_task_assignments: StorageTaskAssignmentsOperations operations :vartype storage_task_assignments: azure.mgmt.storage.operations.StorageTaskAssignmentsOperations + :ivar connectors: ConnectorsOperations operations + :vartype connectors: azure.mgmt.storage.operations.ConnectorsOperations + :ivar data_shares: DataSharesOperations operations + :vartype data_shares: azure.mgmt.storage.operations.DataSharesOperations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: azure.mgmt.storage.operations.PrivateLinkResourcesOperations :ivar storage_task_assignments_instances_report: StorageTaskAssignmentsInstancesReportOperations operations :vartype storage_task_assignments_instances_report: azure.mgmt.storage.operations.StorageTaskAssignmentsInstancesReportOperations + :ivar queue: QueueOperations operations + :vartype queue: azure.mgmt.storage.operations.QueueOperations + :ivar object_replication_policies: ObjectReplicationPoliciesOperations operations + :vartype object_replication_policies: + azure.mgmt.storage.operations.ObjectReplicationPoliciesOperations + :ivar local_users: LocalUsersOperations operations + :vartype local_users: azure.mgmt.storage.operations.LocalUsersOperations + :ivar table: TableOperations operations + :vartype table: azure.mgmt.storage.operations.TableOperations :ivar storage_task_assignment_instances_report: StorageTaskAssignmentInstancesReportOperations operations :vartype storage_task_assignment_instances_report: azure.mgmt.storage.operations.StorageTaskAssignmentInstancesReportOperations - :param credential: Credential needed for the client to connect to Azure. Required. + :ivar skus: SkusOperations operations + :vartype skus: azure.mgmt.storage.operations.SkusOperations + :ivar usages: UsagesOperations operations + :vartype usages: azure.mgmt.storage.operations.UsagesOperations + :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: The ID of the target subscription. Required. + :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. :type subscription_id: str - :param base_url: Service URL. Default value is None. + :param base_url: Service host. Default value is None. :type base_url: str :keyword cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is None. :paramtype cloud_setting: ~azure.core.AzureClouds - :keyword api_version: Api Version. Default value is "2025-06-01". Note that overriding this - default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Known values are "2025-08-01". + Default value is "2025-08-01". Note that overriding this default value may result in + unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. @@ -137,6 +143,7 @@ def __init__( cloud_setting: Optional["AzureClouds"] = None, **kwargs: Any ) -> None: + _endpoint = "{endpoint}" _cloud = cloud_setting or settings.current.azure_cloud # type: ignore _endpoints = get_arm_endpoints(_cloud) if not base_url: @@ -145,6 +152,7 @@ def __init__( self._config = StorageManagementClientConfiguration( credential=credential, subscription_id=subscription_id, + base_url=cast(str, base_url), cloud_setting=cloud_setting, credential_scopes=credential_scopes, **kwargs @@ -168,27 +176,23 @@ def __init__( policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, self._config.http_logging_policy, ] - self._client: ARMPipelineClient = ARMPipelineClient(base_url=cast(str, base_url), policies=_policies, **kwargs) + self._client: ARMPipelineClient = ARMPipelineClient(base_url=cast(str, _endpoint), policies=_policies, **kwargs) - client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} - self._serialize = Serializer(client_models) - self._deserialize = Deserializer(client_models) + self._serialize = Serializer() + self._deserialize = Deserializer() self._serialize.client_side_validation = False - self.blob_services = BlobServicesOperations(self._client, self._config, self._serialize, self._deserialize) - self.blob_containers = BlobContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.file_services = FileServicesOperations(self._client, self._config, self._serialize, self._deserialize) - self.file_shares = FileSharesOperations(self._client, self._config, self._serialize, self._deserialize) - self.queue_services = QueueServicesOperations(self._client, self._config, self._serialize, self._deserialize) - self.queue = QueueOperations(self._client, self._config, self._serialize, self._deserialize) self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) - self.skus = SkusOperations(self._client, self._config, self._serialize, self._deserialize) + self.blob_containers = BlobContainersOperations(self._client, self._config, self._serialize, self._deserialize) + self.blob_services = BlobServicesOperations(self._client, self._config, self._serialize, self._deserialize) self.storage_accounts = StorageAccountsOperations( self._client, self._config, self._serialize, self._deserialize ) + self.file_shares = FileSharesOperations(self._client, self._config, self._serialize, self._deserialize) + self.file_services = FileServicesOperations(self._client, self._config, self._serialize, self._deserialize) + self.queue_services = QueueServicesOperations(self._client, self._config, self._serialize, self._deserialize) self.deleted_accounts = DeletedAccountsOperations( self._client, self._config, self._serialize, self._deserialize ) - self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize) self.management_policies = ManagementPoliciesOperations( self._client, self._config, self._serialize, self._deserialize ) @@ -198,38 +202,43 @@ def __init__( self.private_endpoint_connections = PrivateEndpointConnectionsOperations( self._client, self._config, self._serialize, self._deserialize ) - self.private_link_resources = PrivateLinkResourcesOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.object_replication_policies = ObjectReplicationPoliciesOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.local_users = LocalUsersOperations(self._client, self._config, self._serialize, self._deserialize) self.encryption_scopes = EncryptionScopesOperations( self._client, self._config, self._serialize, self._deserialize ) self.table_services = TableServicesOperations(self._client, self._config, self._serialize, self._deserialize) - self.table = TableOperations(self._client, self._config, self._serialize, self._deserialize) self.network_security_perimeter_configurations = NetworkSecurityPerimeterConfigurationsOperations( self._client, self._config, self._serialize, self._deserialize ) self.storage_task_assignments = StorageTaskAssignmentsOperations( self._client, self._config, self._serialize, self._deserialize ) + self.connectors = ConnectorsOperations(self._client, self._config, self._serialize, self._deserialize) + self.data_shares = DataSharesOperations(self._client, self._config, self._serialize, self._deserialize) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.storage_task_assignments_instances_report = StorageTaskAssignmentsInstancesReportOperations( self._client, self._config, self._serialize, self._deserialize ) + self.queue = QueueOperations(self._client, self._config, self._serialize, self._deserialize) + self.object_replication_policies = ObjectReplicationPoliciesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.local_users = LocalUsersOperations(self._client, self._config, self._serialize, self._deserialize) + self.table = TableOperations(self._client, self._config, self._serialize, self._deserialize) self.storage_task_assignment_instances_report = StorageTaskAssignmentInstancesReportOperations( self._client, self._config, self._serialize, self._deserialize ) + self.skus = SkusOperations(self._client, self._config, self._serialize, self._deserialize) + self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize) - def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest >>> request = HttpRequest("GET", "https://www.example.org/") - >>> response = client._send_request(request) + >>> response = client.send_request(request) For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request @@ -242,7 +251,11 @@ def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: """ request_copy = deepcopy(request) - request_copy.url = self._client.format_url(request_copy.url) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore def close(self) -> None: diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_configuration.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_configuration.py index df87f9889cb8..c87d9c3ad243 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_configuration.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_configuration.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -24,15 +24,18 @@ class StorageManagementClientConfiguration: # pylint: disable=too-many-instance Note that all parameters used to create this instance are saved as instance attributes. - :param credential: Credential needed for the client to connect to Azure. Required. + :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: The ID of the target subscription. Required. + :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. :type subscription_id: str + :param base_url: Service host. Default value is "https://management.azure.com". + :type base_url: str :param cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is None. :type cloud_setting: ~azure.core.AzureClouds - :keyword api_version: Api Version. Default value is "2025-06-01". Note that overriding this - default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Known values are "2025-08-01". + Default value is "2025-08-01". Note that overriding this default value may result in + unsupported behavior. :paramtype api_version: str """ @@ -40,10 +43,11 @@ def __init__( self, credential: "TokenCredential", subscription_id: str, + base_url: str = "https://management.azure.com", cloud_setting: Optional["AzureClouds"] = None, **kwargs: Any ) -> None: - api_version: str = kwargs.pop("api_version", "2025-06-01") + api_version: str = kwargs.pop("api_version", "2025-08-01") if credential is None: raise ValueError("Parameter 'credential' must not be None.") @@ -52,6 +56,7 @@ def __init__( self.credential = credential self.subscription_id = subscription_id + self.base_url = base_url self.cloud_setting = cloud_setting self.api_version = api_version self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_patch.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_patch.py index 8bcb627aa475..87676c65a8f0 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_patch.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_patch.py @@ -7,9 +7,9 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_utils/__init__.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_utils/__init__.py index 0af9b28f6607..8026245c2abc 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_utils/__init__.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_utils/__init__.py @@ -1,6 +1,6 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_utils/model_base.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_utils/model_base.py new file mode 100644 index 000000000000..eef4e52ed1a0 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_utils/model_base.py @@ -0,0 +1,1459 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from collections.abc import MutableMapping +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null +from azure.core.rest import HttpResponse + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") +_NONE_TYPE = type(None) + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + +_ARRAY_ENCODE_MAPPING = { + "pipeDelimited": "|", + "spaceDelimited": " ", + "commaDelimited": ",", + "newlineDelimited": "\n", +} + + +def _deserialize_array_encoded(delimit: str, attr): + if isinstance(attr, str): + if attr == "": + return [] + return attr.split(delimit) + return attr + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj # type: ignore[no-any-return] + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) # type: ignore[no-any-return] + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING: + return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format]) + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): + def __init__(self, data: dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + # If this key has been deserialized (for mutable types), we need to handle serialization + if hasattr(self, "_attr_to_rest_field"): + cache_attr = f"_deserialized_{key}" + if hasattr(self, cache_attr): + rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key) + if rf: + value = self._data.get(key) + if isinstance(value, (dict, list, set)): + # For mutable types, serialize and return + # But also update _data with serialized form and clear flag + # so mutations via this returned value affect _data + serialized = _serialize(value, rf._format) + # If serialized form is same type (no transformation needed), + # return _data directly so mutations work + if isinstance(serialized, type(value)) and serialized == value: + return self._data.get(key) + # Otherwise return serialized copy and clear flag + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + # Store serialized form back + self._data[key] = serialized + return serialized + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + # Clear any cached deserialized value when setting through dictionary access + cache_attr = f"_deserialized_{key}" + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ + return self._data.popitem() + + def clear(self) -> None: + """ + Remove all items from D. + """ + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + if isinstance(other, _MyMutableMapping): + return self._data == other._data + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o): + return _ARRAY_ENCODE_MAPPING[format].join(o) + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field(attr_to_rest_field: dict[str, "_RestField"], rest_name: str) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: + if isinstance(args[0], ET.Element): + dict_to_pass.update(self._init_from_xml(args[0])) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def _init_from_xml(self, element: ET.Element) -> dict[str, typing.Any]: + """Deserialize an XML element into a dict mapping rest field names to values. + + :param ET.Element element: The XML element to deserialize from. + :returns: A dictionary of rest_name to deserialized value pairs. + :rtype: dict + """ + result: dict[str, typing.Any] = {} + model_meta = getattr(self, "_xml", {}) + existed_attr_keys: list[str] = [] + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = _resolve_xml_ns(prop_meta, model_meta) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and element.get(xml_name) is not None: + existed_attr_keys.append(xml_name) + result[rf._rest_name] = _deserialize(rf._type, element.get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + _items_name = prop_meta.get("itemsName") + if _items_name: + xml_name = _items_name + _items_ns = prop_meta.get("itemsNs") + if _items_ns is not None: + xml_ns = _items_ns + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = element.findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + result[rf._rest_name] = _deserialize(rf._type, items) + elif not rf._is_optional: + existed_attr_keys.append(xml_name) + result[rf._rest_name] = [] + continue + + # text element is primitive type + if prop_meta.get("text", False): + if element.text is not None: + result[rf._rest_name] = _deserialize(rf._type, element.text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = element.find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + result[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in element: + if e.tag not in existed_attr_keys: + result[e.tag] = _convert_element(e) + + return result + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._backcompat_attr_to_rest_field: dict[str, _RestField] = { + Model._get_backcompat_attribute_name(cls._attr_to_rest_field, attr): rf + for attr, rf in cls._attr_to_rest_field.items() + } + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_backcompat_attribute_name(cls, attr_to_rest_field: dict[str, "_RestField"], attr_name: str) -> str: + rest_field_obj = attr_to_rest_field.get(attr_name) # pylint: disable=protected-access + if rest_field_obj is None: + return attr_name + original_tsp_name = getattr(rest_field_obj, "_original_tsp_name", None) # pylint: disable=protected-access + if original_tsp_name: + return original_tsp_name + return attr_name + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = _resolve_xml_ns(prop_meta, model_meta) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: list[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _is_array_encoded_deserializer(deserializer: functools.partial) -> bool: + return ( + isinstance(deserializer, functools.partial) + and isinstance(deserializer.args[0], functools.partial) + and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable + ) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + + # encoded string may be deserialized to sequence + if isinstance(obj, str) and isinstance(deserializer, functools.partial): + # for list[str] + if _is_array_encoded_deserializer(deserializer): + return deserializer(obj) + + # for list[Union[...]] + if isinstance(deserializer.args[0], list): + for sub_deserializer in deserializer.args[0]: + if _is_array_encoded_deserializer(sub_deserializer): + return sub_deserializer(obj) + + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: list[typing.Any]) -> list[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) # type: ignore + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a is _NONE_TYPE for a in annotation.__args__): # pyright: ignore + if rf: + rf._is_optional = True + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a is not _NONE_TYPE), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a is not _NONE_TYPE] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() == "dict": + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() in ["list", "set", "tuple", "sequence"]: + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING.values(): + return deserializer(value.text) if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING_WITHFORMAT.values(): + return deserializer(value.text) if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value.text if isinstance(value, ET.Element) else value) + except ValueError: + # for unknown value, return raw value + return value.text if isinstance(value, ET.Element) else value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +def _failsafe_deserialize( + deserializer: typing.Any, + response: HttpResponse, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, response.json(), module, rf, format) + except Exception: # pylint: disable=broad-except + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + response: HttpResponse, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, response.text()) + except Exception: # pylint: disable=broad-except + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +# pylint: disable=too-many-instance-attributes +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, + original_tsp_name: typing.Optional[str] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._is_optional = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + self._original_tsp_name = original_tsp_name + + @property + def _class_type(self) -> typing.Any: + result = getattr(self._type, "args", [None])[0] + # type may be wrapped by nested functools.partial so we need to check for that + if isinstance(result, functools.partial): + return getattr(result, "args", [None])[0] + return result + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + # Use _data.get() directly to avoid triggering __getitem__ which clears the cache + item = obj._data.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + + # For mutable types, we want mutations to directly affect _data + # Check if we've already deserialized this value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + # Return the value from _data directly (it's been deserialized in place) + return obj._data.get(self._rest_name) + + deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, store the deserialized value back in _data + # so mutations directly affect _data + if isinstance(deserialized, (dict, list, set)): + obj._data[self._rest_name] = deserialized + object.__setattr__(obj, cache_attr, True) # Mark as deserialized + return deserialized + + return deserialized + + def __set__(self, obj: Model, value) -> None: + # Clear the cached deserialized object when setting a new value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + object.__delattr__(obj, cache_attr) + + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, + original_tsp_name: typing.Optional[str] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + original_tsp_name=original_tsp_name, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_xml_ns(meta: dict[str, typing.Any]) -> typing.Optional[str]: + """Return the XML namespace from a metadata dict, checking both 'ns' (old-style) and 'namespace' (DPG) keys. + + :param dict meta: The metadata dictionary to extract namespace from. + :returns: The namespace string if 'ns' or 'namespace' key is present, None otherwise. + :rtype: str or None + """ + ns = meta.get("ns") + if ns is None: + ns = meta.get("namespace") + return ns + + +def _resolve_xml_ns( + prop_meta: dict[str, typing.Any], model_meta: typing.Optional[dict[str, typing.Any]] = None +) -> typing.Optional[str]: + """Resolve XML namespace for a property, falling back to model namespace when appropriate. + + Checks the property metadata first; if no namespace is found and the model does not declare + an explicit prefix, falls back to the model-level namespace. + + :param dict prop_meta: The property metadata dictionary. + :param dict model_meta: The model metadata dictionary, used as fallback. + :returns: The resolved namespace string, or None. + :rtype: str or None + """ + ns = _get_xml_ns(prop_meta) + if ns is None and model_meta is not None and not model_meta.get("prefix"): + ns = _get_xml_ns(model_meta) + return ns + + +def _set_xml_attribute(element: ET.Element, name: str, value: typing.Any, prop_meta: dict[str, typing.Any]) -> None: + """Set an XML attribute on an element, handling namespace prefix registration. + + :param ET.Element element: The element to set the attribute on. + :param str name: The default attribute name (wire name). + :param any value: The attribute value. + :param dict prop_meta: The property metadata dictionary. + """ + xml_name = prop_meta.get("name", name) + _attr_ns = _get_xml_ns(prop_meta) + if _attr_ns: + _attr_prefix = prop_meta.get("prefix") + if _attr_prefix: + _safe_register_namespace(_attr_prefix, _attr_ns) + xml_name = "{" + _attr_ns + "}" + xml_name + element.set(xml_name, _get_primitive_type_value(value)) + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, list[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + # When serializing as an array item (parent_meta is set), check if the parent has an + # explicit itemsName. This ensures correct element names for unwrapped arrays (where + # the element tag is the property/items name, not the model type name). + _items_name = parent_meta.get("itemsName") if parent_meta is not None else None + element_name = _items_name if _items_name else (model_meta.get("name") or o.__class__.__name__) + _model_ns = _get_xml_ns(model_meta) + wrapped_element = _create_xml_element( + element_name, + model_meta.get("prefix"), + _model_ns, + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # Propagate model namespace to properties only for old-style "ns"-keyed models. + # DPG-generated models use the "namespace" key and explicitly declare namespace on + # each property that needs it, so propagation is intentionally skipped for them. + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + _set_xml_attribute(wrapped_element, k, v, prop_meta) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + _dict_ns = _get_xml_ns(parent_meta) if parent_meta else None + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": _dict_ns, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + _items_ns = parent_meta.get("itemsNs") + if _items_ns is None: + _items_ns = _get_xml_ns(parent_meta) + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": _items_ns, + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[dict[str, typing.Any]], +) -> ET.Element: + _meta_ns = _get_xml_ns(meta) if meta else None + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, _meta_ns + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element # type: ignore[no-any-return] + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _safe_register_namespace(prefix: str, ns: str) -> None: + """Register an XML namespace prefix, handling reserved prefix patterns. + + Some prefixes (e.g. 'ns2') match Python's reserved 'ns\\d+' pattern used for + auto-generated prefixes, causing register_namespace to raise ValueError. + Falls back to directly registering in the internal namespace map. + + :param str prefix: The namespace prefix to register. + :param str ns: The namespace URI. + """ + try: + ET.register_namespace(prefix, ns) + except ValueError: + _ns_map = getattr(ET, "_namespace_map", None) + if _ns_map is not None: + _ns_map[ns] = prefix + + +def _create_xml_element( + tag: typing.Any, prefix: typing.Optional[str] = None, ns: typing.Optional[str] = None +) -> ET.Element: + if prefix and ns: + _safe_register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: list[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_utils/serialization.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_utils/serialization.py index 6da830e0cf4a..81ec1de5922b 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_utils/serialization.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_utils/serialization.py @@ -3,7 +3,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_utils/utils.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_utils/utils.py new file mode 100644 index 000000000000..cbaa624660e4 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_utils/utils.py @@ -0,0 +1,40 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Optional + +from azure.core import MatchConditions + + +def quote_etag(etag: Optional[str]) -> Optional[str]: + if not etag or etag == "*": + return etag + if etag.startswith("W/"): + return etag + if etag.startswith('"') and etag.endswith('"'): + return etag + if etag.startswith("'") and etag.endswith("'"): + return etag + return '"' + etag + '"' + + +def prep_if_match(etag: Optional[str], match_condition: Optional[MatchConditions]) -> Optional[str]: + if match_condition == MatchConditions.IfNotModified: + if_match = quote_etag(etag) if etag else None + return if_match + if match_condition == MatchConditions.IfPresent: + return "*" + return None + + +def prep_if_none_match(etag: Optional[str], match_condition: Optional[MatchConditions]) -> Optional[str]: + if match_condition == MatchConditions.IfModified: + if_none_match = quote_etag(etag) if etag else None + return if_none_match + if match_condition == MatchConditions.IfMissing: + return "*" + return None diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_validation.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_validation.py new file mode 100644 index 000000000000..f5af3a4eb8a2 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_validation.py @@ -0,0 +1,66 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import functools + + +def api_version_validation(**kwargs): + params_added_on = kwargs.pop("params_added_on", {}) + method_added_on = kwargs.pop("method_added_on", "") + api_versions_list = kwargs.pop("api_versions_list", []) + + def _index_with_default(value: str, default: int = -1) -> int: + """Get the index of value in lst, or return default if not found. + + :param value: The value to search for in the api_versions_list. + :type value: str + :param default: The default value to return if the value is not found. + :type default: int + :return: The index of the value in the list, or the default value if not found. + :rtype: int + """ + try: + return api_versions_list.index(value) + except ValueError: + return default + + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: + # this assumes the client has an _api_version attribute + client = args[0] + client_api_version = client._config.api_version # pylint: disable=protected-access + except AttributeError: + return func(*args, **kwargs) + + if _index_with_default(method_added_on) > _index_with_default(client_api_version): + raise ValueError( + f"'{func.__name__}' is not available in API version " + f"{client_api_version}. Pass service API version {method_added_on} or newer to your client." + ) + + unsupported = { + parameter: api_version + for api_version, parameters in params_added_on.items() + for parameter in parameters + if parameter in kwargs and _index_with_default(api_version) > _index_with_default(client_api_version) + } + if unsupported: + raise ValueError( + "".join( + [ + f"'{param}' is not available in API version {client_api_version}. " + f"Use service API version {version} or newer.\n" + for param, version in unsupported.items() + ] + ) + ) + return func(*args, **kwargs) + + return wrapper + + return decorator diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_version.py index a1b29af6fef7..2eddbf3b770a 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_version.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_version.py @@ -1,8 +1,9 @@ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "24.0.1" +VERSION = "25.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/__init__.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/__init__.py index ee2b940bd4fc..12b592bef349 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/__init__.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/__init__.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pylint: disable=wrong-import-position @@ -12,7 +12,7 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._storage_management_client import StorageManagementClient # type: ignore +from ._client import StorageManagementClient # type: ignore try: from ._patch import __all__ as _patch_all diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_storage_management_client.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_client.py similarity index 89% rename from sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_storage_management_client.py rename to sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_client.py index 4775f94495fd..fdfa56210dcb 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_storage_management_client.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_client.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -17,13 +17,14 @@ from azure.mgmt.core.policies import AsyncARMAutoResourceProviderRegistrationPolicy from azure.mgmt.core.tools import get_arm_endpoints -from .. import models as _models from .._utils.serialization import Deserializer, Serializer from ._configuration import StorageManagementClientConfiguration from .operations import ( BlobContainersOperations, BlobInventoryPoliciesOperations, BlobServicesOperations, + ConnectorsOperations, + DataSharesOperations, DeletedAccountsOperations, EncryptionScopesOperations, FileServicesOperations, @@ -55,28 +56,22 @@ class StorageManagementClient: # pylint: disable=too-many-instance-attributes """The Azure Storage Management API. - :ivar blob_services: BlobServicesOperations operations - :vartype blob_services: azure.mgmt.storage.aio.operations.BlobServicesOperations + :ivar operations: Operations operations + :vartype operations: azure.mgmt.storage.aio.operations.Operations :ivar blob_containers: BlobContainersOperations operations :vartype blob_containers: azure.mgmt.storage.aio.operations.BlobContainersOperations - :ivar file_services: FileServicesOperations operations - :vartype file_services: azure.mgmt.storage.aio.operations.FileServicesOperations + :ivar blob_services: BlobServicesOperations operations + :vartype blob_services: azure.mgmt.storage.aio.operations.BlobServicesOperations + :ivar storage_accounts: StorageAccountsOperations operations + :vartype storage_accounts: azure.mgmt.storage.aio.operations.StorageAccountsOperations :ivar file_shares: FileSharesOperations operations :vartype file_shares: azure.mgmt.storage.aio.operations.FileSharesOperations + :ivar file_services: FileServicesOperations operations + :vartype file_services: azure.mgmt.storage.aio.operations.FileServicesOperations :ivar queue_services: QueueServicesOperations operations :vartype queue_services: azure.mgmt.storage.aio.operations.QueueServicesOperations - :ivar queue: QueueOperations operations - :vartype queue: azure.mgmt.storage.aio.operations.QueueOperations - :ivar operations: Operations operations - :vartype operations: azure.mgmt.storage.aio.operations.Operations - :ivar skus: SkusOperations operations - :vartype skus: azure.mgmt.storage.aio.operations.SkusOperations - :ivar storage_accounts: StorageAccountsOperations operations - :vartype storage_accounts: azure.mgmt.storage.aio.operations.StorageAccountsOperations :ivar deleted_accounts: DeletedAccountsOperations operations :vartype deleted_accounts: azure.mgmt.storage.aio.operations.DeletedAccountsOperations - :ivar usages: UsagesOperations operations - :vartype usages: azure.mgmt.storage.aio.operations.UsagesOperations :ivar management_policies: ManagementPoliciesOperations operations :vartype management_policies: azure.mgmt.storage.aio.operations.ManagementPoliciesOperations :ivar blob_inventory_policies: BlobInventoryPoliciesOperations operations @@ -85,20 +80,10 @@ class StorageManagementClient: # pylint: disable=too-many-instance-attributes :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations :vartype private_endpoint_connections: azure.mgmt.storage.aio.operations.PrivateEndpointConnectionsOperations - :ivar private_link_resources: PrivateLinkResourcesOperations operations - :vartype private_link_resources: - azure.mgmt.storage.aio.operations.PrivateLinkResourcesOperations - :ivar object_replication_policies: ObjectReplicationPoliciesOperations operations - :vartype object_replication_policies: - azure.mgmt.storage.aio.operations.ObjectReplicationPoliciesOperations - :ivar local_users: LocalUsersOperations operations - :vartype local_users: azure.mgmt.storage.aio.operations.LocalUsersOperations :ivar encryption_scopes: EncryptionScopesOperations operations :vartype encryption_scopes: azure.mgmt.storage.aio.operations.EncryptionScopesOperations :ivar table_services: TableServicesOperations operations :vartype table_services: azure.mgmt.storage.aio.operations.TableServicesOperations - :ivar table: TableOperations operations - :vartype table: azure.mgmt.storage.aio.operations.TableOperations :ivar network_security_perimeter_configurations: NetworkSecurityPerimeterConfigurationsOperations operations :vartype network_security_perimeter_configurations: @@ -106,25 +91,46 @@ class StorageManagementClient: # pylint: disable=too-many-instance-attributes :ivar storage_task_assignments: StorageTaskAssignmentsOperations operations :vartype storage_task_assignments: azure.mgmt.storage.aio.operations.StorageTaskAssignmentsOperations + :ivar connectors: ConnectorsOperations operations + :vartype connectors: azure.mgmt.storage.aio.operations.ConnectorsOperations + :ivar data_shares: DataSharesOperations operations + :vartype data_shares: azure.mgmt.storage.aio.operations.DataSharesOperations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: + azure.mgmt.storage.aio.operations.PrivateLinkResourcesOperations :ivar storage_task_assignments_instances_report: StorageTaskAssignmentsInstancesReportOperations operations :vartype storage_task_assignments_instances_report: azure.mgmt.storage.aio.operations.StorageTaskAssignmentsInstancesReportOperations + :ivar queue: QueueOperations operations + :vartype queue: azure.mgmt.storage.aio.operations.QueueOperations + :ivar object_replication_policies: ObjectReplicationPoliciesOperations operations + :vartype object_replication_policies: + azure.mgmt.storage.aio.operations.ObjectReplicationPoliciesOperations + :ivar local_users: LocalUsersOperations operations + :vartype local_users: azure.mgmt.storage.aio.operations.LocalUsersOperations + :ivar table: TableOperations operations + :vartype table: azure.mgmt.storage.aio.operations.TableOperations :ivar storage_task_assignment_instances_report: StorageTaskAssignmentInstancesReportOperations operations :vartype storage_task_assignment_instances_report: azure.mgmt.storage.aio.operations.StorageTaskAssignmentInstancesReportOperations - :param credential: Credential needed for the client to connect to Azure. Required. + :ivar skus: SkusOperations operations + :vartype skus: azure.mgmt.storage.aio.operations.SkusOperations + :ivar usages: UsagesOperations operations + :vartype usages: azure.mgmt.storage.aio.operations.UsagesOperations + :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The ID of the target subscription. Required. + :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. :type subscription_id: str - :param base_url: Service URL. Default value is None. + :param base_url: Service host. Default value is None. :type base_url: str :keyword cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is None. :paramtype cloud_setting: ~azure.core.AzureClouds - :keyword api_version: Api Version. Default value is "2025-06-01". Note that overriding this - default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Known values are "2025-08-01". + Default value is "2025-08-01". Note that overriding this default value may result in + unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. @@ -139,6 +145,7 @@ def __init__( cloud_setting: Optional["AzureClouds"] = None, **kwargs: Any ) -> None: + _endpoint = "{endpoint}" _cloud = cloud_setting or settings.current.azure_cloud # type: ignore _endpoints = get_arm_endpoints(_cloud) if not base_url: @@ -147,6 +154,7 @@ def __init__( self._config = StorageManagementClientConfiguration( credential=credential, subscription_id=subscription_id, + base_url=cast(str, base_url), cloud_setting=cloud_setting, credential_scopes=credential_scopes, **kwargs @@ -171,28 +179,24 @@ def __init__( self._config.http_logging_policy, ] self._client: AsyncARMPipelineClient = AsyncARMPipelineClient( - base_url=cast(str, base_url), policies=_policies, **kwargs + base_url=cast(str, _endpoint), policies=_policies, **kwargs ) - client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} - self._serialize = Serializer(client_models) - self._deserialize = Deserializer(client_models) + self._serialize = Serializer() + self._deserialize = Deserializer() self._serialize.client_side_validation = False - self.blob_services = BlobServicesOperations(self._client, self._config, self._serialize, self._deserialize) - self.blob_containers = BlobContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.file_services = FileServicesOperations(self._client, self._config, self._serialize, self._deserialize) - self.file_shares = FileSharesOperations(self._client, self._config, self._serialize, self._deserialize) - self.queue_services = QueueServicesOperations(self._client, self._config, self._serialize, self._deserialize) - self.queue = QueueOperations(self._client, self._config, self._serialize, self._deserialize) self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) - self.skus = SkusOperations(self._client, self._config, self._serialize, self._deserialize) + self.blob_containers = BlobContainersOperations(self._client, self._config, self._serialize, self._deserialize) + self.blob_services = BlobServicesOperations(self._client, self._config, self._serialize, self._deserialize) self.storage_accounts = StorageAccountsOperations( self._client, self._config, self._serialize, self._deserialize ) + self.file_shares = FileSharesOperations(self._client, self._config, self._serialize, self._deserialize) + self.file_services = FileServicesOperations(self._client, self._config, self._serialize, self._deserialize) + self.queue_services = QueueServicesOperations(self._client, self._config, self._serialize, self._deserialize) self.deleted_accounts = DeletedAccountsOperations( self._client, self._config, self._serialize, self._deserialize ) - self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize) self.management_policies = ManagementPoliciesOperations( self._client, self._config, self._serialize, self._deserialize ) @@ -202,32 +206,37 @@ def __init__( self.private_endpoint_connections = PrivateEndpointConnectionsOperations( self._client, self._config, self._serialize, self._deserialize ) - self.private_link_resources = PrivateLinkResourcesOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.object_replication_policies = ObjectReplicationPoliciesOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.local_users = LocalUsersOperations(self._client, self._config, self._serialize, self._deserialize) self.encryption_scopes = EncryptionScopesOperations( self._client, self._config, self._serialize, self._deserialize ) self.table_services = TableServicesOperations(self._client, self._config, self._serialize, self._deserialize) - self.table = TableOperations(self._client, self._config, self._serialize, self._deserialize) self.network_security_perimeter_configurations = NetworkSecurityPerimeterConfigurationsOperations( self._client, self._config, self._serialize, self._deserialize ) self.storage_task_assignments = StorageTaskAssignmentsOperations( self._client, self._config, self._serialize, self._deserialize ) + self.connectors = ConnectorsOperations(self._client, self._config, self._serialize, self._deserialize) + self.data_shares = DataSharesOperations(self._client, self._config, self._serialize, self._deserialize) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.storage_task_assignments_instances_report = StorageTaskAssignmentsInstancesReportOperations( self._client, self._config, self._serialize, self._deserialize ) + self.queue = QueueOperations(self._client, self._config, self._serialize, self._deserialize) + self.object_replication_policies = ObjectReplicationPoliciesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.local_users = LocalUsersOperations(self._client, self._config, self._serialize, self._deserialize) + self.table = TableOperations(self._client, self._config, self._serialize, self._deserialize) self.storage_task_assignment_instances_report = StorageTaskAssignmentInstancesReportOperations( self._client, self._config, self._serialize, self._deserialize ) + self.skus = SkusOperations(self._client, self._config, self._serialize, self._deserialize) + self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize) - def _send_request( + def send_request( self, request: HttpRequest, *, stream: bool = False, **kwargs: Any ) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. @@ -235,7 +244,7 @@ def _send_request( >>> from azure.core.rest import HttpRequest >>> request = HttpRequest("GET", "https://www.example.org/") - >>> response = await client._send_request(request) + >>> response = await client.send_request(request) For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request @@ -248,7 +257,11 @@ def _send_request( """ request_copy = deepcopy(request) - request_copy.url = self._client.format_url(request_copy.url) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore async def close(self) -> None: diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_configuration.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_configuration.py index a55453bc9234..fc1d559a5552 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_configuration.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_configuration.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -24,15 +24,18 @@ class StorageManagementClientConfiguration: # pylint: disable=too-many-instance Note that all parameters used to create this instance are saved as instance attributes. - :param credential: Credential needed for the client to connect to Azure. Required. + :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The ID of the target subscription. Required. + :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. :type subscription_id: str + :param base_url: Service host. Default value is "https://management.azure.com". + :type base_url: str :param cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is None. :type cloud_setting: ~azure.core.AzureClouds - :keyword api_version: Api Version. Default value is "2025-06-01". Note that overriding this - default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Known values are "2025-08-01". + Default value is "2025-08-01". Note that overriding this default value may result in + unsupported behavior. :paramtype api_version: str """ @@ -40,10 +43,11 @@ def __init__( self, credential: "AsyncTokenCredential", subscription_id: str, + base_url: str = "https://management.azure.com", cloud_setting: Optional["AzureClouds"] = None, **kwargs: Any ) -> None: - api_version: str = kwargs.pop("api_version", "2025-06-01") + api_version: str = kwargs.pop("api_version", "2025-08-01") if credential is None: raise ValueError("Parameter 'credential' must not be None.") @@ -52,6 +56,7 @@ def __init__( self.credential = credential self.subscription_id = subscription_id + self.base_url = base_url self.cloud_setting = cloud_setting self.api_version = api_version self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_patch.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_patch.py index 8bcb627aa475..87676c65a8f0 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_patch.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_patch.py @@ -7,9 +7,9 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/__init__.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/__init__.py index 0d649856038b..a3f91fe60a7f 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/__init__.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/__init__.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pylint: disable=wrong-import-position @@ -13,60 +12,64 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._blob_services_operations import BlobServicesOperations # type: ignore -from ._blob_containers_operations import BlobContainersOperations # type: ignore -from ._file_services_operations import FileServicesOperations # type: ignore -from ._file_shares_operations import FileSharesOperations # type: ignore -from ._queue_services_operations import QueueServicesOperations # type: ignore -from ._queue_operations import QueueOperations # type: ignore from ._operations import Operations # type: ignore -from ._skus_operations import SkusOperations # type: ignore -from ._storage_accounts_operations import StorageAccountsOperations # type: ignore -from ._deleted_accounts_operations import DeletedAccountsOperations # type: ignore -from ._usages_operations import UsagesOperations # type: ignore -from ._management_policies_operations import ManagementPoliciesOperations # type: ignore -from ._blob_inventory_policies_operations import BlobInventoryPoliciesOperations # type: ignore -from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations # type: ignore -from ._private_link_resources_operations import PrivateLinkResourcesOperations # type: ignore -from ._object_replication_policies_operations import ObjectReplicationPoliciesOperations # type: ignore -from ._local_users_operations import LocalUsersOperations # type: ignore -from ._encryption_scopes_operations import EncryptionScopesOperations # type: ignore -from ._table_services_operations import TableServicesOperations # type: ignore -from ._table_operations import TableOperations # type: ignore -from ._network_security_perimeter_configurations_operations import NetworkSecurityPerimeterConfigurationsOperations # type: ignore -from ._storage_task_assignments_operations import StorageTaskAssignmentsOperations # type: ignore -from ._storage_task_assignments_instances_report_operations import StorageTaskAssignmentsInstancesReportOperations # type: ignore -from ._storage_task_assignment_instances_report_operations import StorageTaskAssignmentInstancesReportOperations # type: ignore +from ._operations import BlobContainersOperations # type: ignore +from ._operations import BlobServicesOperations # type: ignore +from ._operations import StorageAccountsOperations # type: ignore +from ._operations import FileSharesOperations # type: ignore +from ._operations import FileServicesOperations # type: ignore +from ._operations import QueueServicesOperations # type: ignore +from ._operations import DeletedAccountsOperations # type: ignore +from ._operations import ManagementPoliciesOperations # type: ignore +from ._operations import BlobInventoryPoliciesOperations # type: ignore +from ._operations import PrivateEndpointConnectionsOperations # type: ignore +from ._operations import EncryptionScopesOperations # type: ignore +from ._operations import TableServicesOperations # type: ignore +from ._operations import NetworkSecurityPerimeterConfigurationsOperations # type: ignore +from ._operations import StorageTaskAssignmentsOperations # type: ignore +from ._operations import ConnectorsOperations # type: ignore +from ._operations import DataSharesOperations # type: ignore +from ._operations import PrivateLinkResourcesOperations # type: ignore +from ._operations import StorageTaskAssignmentsInstancesReportOperations # type: ignore +from ._operations import QueueOperations # type: ignore +from ._operations import ObjectReplicationPoliciesOperations # type: ignore +from ._operations import LocalUsersOperations # type: ignore +from ._operations import TableOperations # type: ignore +from ._operations import StorageTaskAssignmentInstancesReportOperations # type: ignore +from ._operations import SkusOperations # type: ignore +from ._operations import UsagesOperations # type: ignore from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ - "BlobServicesOperations", + "Operations", "BlobContainersOperations", - "FileServicesOperations", + "BlobServicesOperations", + "StorageAccountsOperations", "FileSharesOperations", + "FileServicesOperations", "QueueServicesOperations", - "QueueOperations", - "Operations", - "SkusOperations", - "StorageAccountsOperations", "DeletedAccountsOperations", - "UsagesOperations", "ManagementPoliciesOperations", "BlobInventoryPoliciesOperations", "PrivateEndpointConnectionsOperations", - "PrivateLinkResourcesOperations", - "ObjectReplicationPoliciesOperations", - "LocalUsersOperations", "EncryptionScopesOperations", "TableServicesOperations", - "TableOperations", "NetworkSecurityPerimeterConfigurationsOperations", "StorageTaskAssignmentsOperations", + "ConnectorsOperations", + "DataSharesOperations", + "PrivateLinkResourcesOperations", "StorageTaskAssignmentsInstancesReportOperations", + "QueueOperations", + "ObjectReplicationPoliciesOperations", + "LocalUsersOperations", + "TableOperations", "StorageTaskAssignmentInstancesReportOperations", + "SkusOperations", + "UsagesOperations", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_blob_containers_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_blob_containers_operations.py deleted file mode 100644 index d18524f52711..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_blob_containers_operations.py +++ /dev/null @@ -1,1828 +0,0 @@ -# pylint: disable=too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, AsyncIterator, Callable, IO, Literal, Optional, TypeVar, Union, cast, overload -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - StreamClosedError, - StreamConsumedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._blob_containers_operations import ( - build_clear_legal_hold_request, - build_create_or_update_immutability_policy_request, - build_create_request, - build_delete_immutability_policy_request, - build_delete_request, - build_extend_immutability_policy_request, - build_get_immutability_policy_request, - build_get_request, - build_lease_request, - build_list_request, - build_lock_immutability_policy_request, - build_object_level_worm_request, - build_set_legal_hold_request, - build_update_request, -) -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class BlobContainersOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`blob_containers` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, - resource_group_name: str, - account_name: str, - maxpagesize: Optional[str] = None, - filter: Optional[str] = None, - include: Optional[Union[str, _models.ListContainersInclude]] = None, - **kwargs: Any - ) -> AsyncItemPaged["_models.ListContainerItem"]: - """Lists all containers and does not support a prefix like data plane. Also SRP today does not - return continuation token. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param maxpagesize: Optional. Specified maximum number of containers that can be included in - the list. Default value is None. - :type maxpagesize: str - :param filter: Optional. When specified, only container names starting with the filter will be - listed. Default value is None. - :type filter: str - :param include: Optional, used to include the properties for soft deleted blob containers. - "deleted" Default value is None. - :type include: str or ~azure.mgmt.storage.models.ListContainersInclude - :return: An iterator like instance of either ListContainerItem or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.ListContainerItem] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ListContainerItems] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - maxpagesize=maxpagesize, - filter=filter, - include=include, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("ListContainerItems", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) - - @overload - async def create( - self, - resource_group_name: str, - account_name: str, - container_name: str, - blob_container: _models.BlobContainer, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.BlobContainer: - """Creates a new container under the specified account as described by request body. The container - resource includes metadata and properties for that container. It does not include a list of the - blobs contained by the container. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param blob_container: Properties of the blob container to create. Required. - :type blob_container: ~azure.mgmt.storage.models.BlobContainer - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: BlobContainer or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobContainer - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def create( - self, - resource_group_name: str, - account_name: str, - container_name: str, - blob_container: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.BlobContainer: - """Creates a new container under the specified account as described by request body. The container - resource includes metadata and properties for that container. It does not include a list of the - blobs contained by the container. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param blob_container: Properties of the blob container to create. Required. - :type blob_container: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: BlobContainer or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobContainer - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def create( - self, - resource_group_name: str, - account_name: str, - container_name: str, - blob_container: Union[_models.BlobContainer, IO[bytes]], - **kwargs: Any - ) -> _models.BlobContainer: - """Creates a new container under the specified account as described by request body. The container - resource includes metadata and properties for that container. It does not include a list of the - blobs contained by the container. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param blob_container: Properties of the blob container to create. Is either a BlobContainer - type or a IO[bytes] type. Required. - :type blob_container: ~azure.mgmt.storage.models.BlobContainer or IO[bytes] - :return: BlobContainer or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobContainer - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.BlobContainer] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(blob_container, (IOBase, bytes)): - _content = blob_container - else: - _json = self._serialize.body(blob_container, "BlobContainer") - - _request = build_create_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("BlobContainer", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def update( - self, - resource_group_name: str, - account_name: str, - container_name: str, - blob_container: _models.BlobContainer, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.BlobContainer: - """Updates container properties as specified in request body. Properties not mentioned in the - request will be unchanged. Update fails if the specified container doesn't already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param blob_container: Properties to update for the blob container. Required. - :type blob_container: ~azure.mgmt.storage.models.BlobContainer - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: BlobContainer or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobContainer - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def update( - self, - resource_group_name: str, - account_name: str, - container_name: str, - blob_container: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.BlobContainer: - """Updates container properties as specified in request body. Properties not mentioned in the - request will be unchanged. Update fails if the specified container doesn't already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param blob_container: Properties to update for the blob container. Required. - :type blob_container: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: BlobContainer or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobContainer - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def update( - self, - resource_group_name: str, - account_name: str, - container_name: str, - blob_container: Union[_models.BlobContainer, IO[bytes]], - **kwargs: Any - ) -> _models.BlobContainer: - """Updates container properties as specified in request body. Properties not mentioned in the - request will be unchanged. Update fails if the specified container doesn't already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param blob_container: Properties to update for the blob container. Is either a BlobContainer - type or a IO[bytes] type. Required. - :type blob_container: ~azure.mgmt.storage.models.BlobContainer or IO[bytes] - :return: BlobContainer or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobContainer - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.BlobContainer] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(blob_container, (IOBase, bytes)): - _content = blob_container - else: - _json = self._serialize.body(blob_container, "BlobContainer") - - _request = build_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("BlobContainer", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get( - self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any - ) -> _models.BlobContainer: - """Gets properties of a specified container. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :return: BlobContainer or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobContainer - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.BlobContainer] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("BlobContainer", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def delete(self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any) -> None: - """Deletes specified container under its account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - @overload - async def set_legal_hold( - self, - resource_group_name: str, - account_name: str, - container_name: str, - legal_hold: _models.LegalHold, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LegalHold: - """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold - follows an append pattern and does not clear out the existing tags that are not specified in - the request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param legal_hold: The LegalHold property that will be set to a blob container. Required. - :type legal_hold: ~azure.mgmt.storage.models.LegalHold - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: LegalHold or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LegalHold - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def set_legal_hold( - self, - resource_group_name: str, - account_name: str, - container_name: str, - legal_hold: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LegalHold: - """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold - follows an append pattern and does not clear out the existing tags that are not specified in - the request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param legal_hold: The LegalHold property that will be set to a blob container. Required. - :type legal_hold: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: LegalHold or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LegalHold - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def set_legal_hold( - self, - resource_group_name: str, - account_name: str, - container_name: str, - legal_hold: Union[_models.LegalHold, IO[bytes]], - **kwargs: Any - ) -> _models.LegalHold: - """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold - follows an append pattern and does not clear out the existing tags that are not specified in - the request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param legal_hold: The LegalHold property that will be set to a blob container. Is either a - LegalHold type or a IO[bytes] type. Required. - :type legal_hold: ~azure.mgmt.storage.models.LegalHold or IO[bytes] - :return: LegalHold or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LegalHold - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.LegalHold] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(legal_hold, (IOBase, bytes)): - _content = legal_hold - else: - _json = self._serialize.body(legal_hold, "LegalHold") - - _request = build_set_legal_hold_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("LegalHold", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def clear_legal_hold( - self, - resource_group_name: str, - account_name: str, - container_name: str, - legal_hold: _models.LegalHold, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LegalHold: - """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent - operation. ClearLegalHold clears out only the specified tags in the request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param legal_hold: The LegalHold property that will be clear from a blob container. Required. - :type legal_hold: ~azure.mgmt.storage.models.LegalHold - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: LegalHold or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LegalHold - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def clear_legal_hold( - self, - resource_group_name: str, - account_name: str, - container_name: str, - legal_hold: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LegalHold: - """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent - operation. ClearLegalHold clears out only the specified tags in the request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param legal_hold: The LegalHold property that will be clear from a blob container. Required. - :type legal_hold: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: LegalHold or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LegalHold - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def clear_legal_hold( - self, - resource_group_name: str, - account_name: str, - container_name: str, - legal_hold: Union[_models.LegalHold, IO[bytes]], - **kwargs: Any - ) -> _models.LegalHold: - """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent - operation. ClearLegalHold clears out only the specified tags in the request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param legal_hold: The LegalHold property that will be clear from a blob container. Is either a - LegalHold type or a IO[bytes] type. Required. - :type legal_hold: ~azure.mgmt.storage.models.LegalHold or IO[bytes] - :return: LegalHold or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LegalHold - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.LegalHold] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(legal_hold, (IOBase, bytes)): - _content = legal_hold - else: - _json = self._serialize.body(legal_hold, "LegalHold") - - _request = build_clear_legal_hold_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("LegalHold", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def create_or_update_immutability_policy( - self, - resource_group_name: str, - account_name: str, - container_name: str, - if_match: Optional[str] = None, - parameters: Optional[_models.ImmutabilityPolicy] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but - not required for this operation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Default value is None. - :type if_match: str - :param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob - container. Default value is None. - :type parameters: ~azure.mgmt.storage.models.ImmutabilityPolicy - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def create_or_update_immutability_policy( - self, - resource_group_name: str, - account_name: str, - container_name: str, - if_match: Optional[str] = None, - parameters: Optional[IO[bytes]] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but - not required for this operation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Default value is None. - :type if_match: str - :param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob - container. Default value is None. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def create_or_update_immutability_policy( - self, - resource_group_name: str, - account_name: str, - container_name: str, - if_match: Optional[str] = None, - parameters: Optional[Union[_models.ImmutabilityPolicy, IO[bytes]]] = None, - **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but - not required for this operation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Default value is None. - :type if_match: str - :param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob - container. Is either a ImmutabilityPolicy type or a IO[bytes] type. Default value is None. - :type parameters: ~azure.mgmt.storage.models.ImmutabilityPolicy or IO[bytes] - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - immutability_policy_name: Literal["default"] = kwargs.pop("immutability_policy_name", "default") - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - content_type = content_type if parameters else None - cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" if parameters else None - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - if parameters is not None: - _json = self._serialize.body(parameters, "ImmutabilityPolicy") - else: - _json = None - - _request = build_create_or_update_immutability_policy_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - if_match=if_match, - immutability_policy_name=immutability_policy_name, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("ImmutabilityPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get_immutability_policy( - self, - resource_group_name: str, - account_name: str, - container_name: str, - if_match: Optional[str] = None, - **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Gets the existing immutability policy along with the corresponding ETag in response headers and - body. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Default value is None. - :type if_match: str - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - immutability_policy_name: Literal["default"] = kwargs.pop("immutability_policy_name", "default") - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) - - _request = build_get_immutability_policy_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - if_match=if_match, - immutability_policy_name=immutability_policy_name, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("ImmutabilityPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def delete_immutability_policy( - self, resource_group_name: str, account_name: str, container_name: str, if_match: str, **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Aborts an unlocked immutability policy. The response of delete has - immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this - operation. Deleting a locked immutability policy is not allowed, the only way is to delete the - container after deleting all expired blobs inside the policy locked container. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Required. - :type if_match: str - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - immutability_policy_name: Literal["default"] = kwargs.pop("immutability_policy_name", "default") - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) - - _request = build_delete_immutability_policy_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - if_match=if_match, - immutability_policy_name=immutability_policy_name, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("ImmutabilityPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def lock_immutability_policy( - self, resource_group_name: str, account_name: str, container_name: str, if_match: str, **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is - ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Required. - :type if_match: str - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) - - _request = build_lock_immutability_policy_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - if_match=if_match, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("ImmutabilityPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @overload - async def extend_immutability_policy( - self, - resource_group_name: str, - account_name: str, - container_name: str, - if_match: str, - parameters: Optional[_models.ImmutabilityPolicy] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only - action allowed on a Locked policy will be this action. ETag in If-Match is required for this - operation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Required. - :type if_match: str - :param parameters: The ImmutabilityPolicy Properties that will be extended for a blob - container. Default value is None. - :type parameters: ~azure.mgmt.storage.models.ImmutabilityPolicy - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def extend_immutability_policy( - self, - resource_group_name: str, - account_name: str, - container_name: str, - if_match: str, - parameters: Optional[IO[bytes]] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only - action allowed on a Locked policy will be this action. ETag in If-Match is required for this - operation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Required. - :type if_match: str - :param parameters: The ImmutabilityPolicy Properties that will be extended for a blob - container. Default value is None. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def extend_immutability_policy( - self, - resource_group_name: str, - account_name: str, - container_name: str, - if_match: str, - parameters: Optional[Union[_models.ImmutabilityPolicy, IO[bytes]]] = None, - **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only - action allowed on a Locked policy will be this action. ETag in If-Match is required for this - operation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Required. - :type if_match: str - :param parameters: The ImmutabilityPolicy Properties that will be extended for a blob - container. Is either a ImmutabilityPolicy type or a IO[bytes] type. Default value is None. - :type parameters: ~azure.mgmt.storage.models.ImmutabilityPolicy or IO[bytes] - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - content_type = content_type if parameters else None - cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" if parameters else None - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - if parameters is not None: - _json = self._serialize.body(parameters, "ImmutabilityPolicy") - else: - _json = None - - _request = build_extend_immutability_policy_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - if_match=if_match, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("ImmutabilityPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @overload - async def lease( - self, - resource_group_name: str, - account_name: str, - container_name: str, - parameters: Optional[_models.LeaseContainerRequest] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LeaseContainerResponse: - """The Lease Container operation establishes and manages a lock on a container for delete - operations. The lock duration can be 15 to 60 seconds, or can be infinite. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param parameters: Lease Container request body. Default value is None. - :type parameters: ~azure.mgmt.storage.models.LeaseContainerRequest - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: LeaseContainerResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LeaseContainerResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def lease( - self, - resource_group_name: str, - account_name: str, - container_name: str, - parameters: Optional[IO[bytes]] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LeaseContainerResponse: - """The Lease Container operation establishes and manages a lock on a container for delete - operations. The lock duration can be 15 to 60 seconds, or can be infinite. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param parameters: Lease Container request body. Default value is None. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: LeaseContainerResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LeaseContainerResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def lease( - self, - resource_group_name: str, - account_name: str, - container_name: str, - parameters: Optional[Union[_models.LeaseContainerRequest, IO[bytes]]] = None, - **kwargs: Any - ) -> _models.LeaseContainerResponse: - """The Lease Container operation establishes and manages a lock on a container for delete - operations. The lock duration can be 15 to 60 seconds, or can be infinite. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param parameters: Lease Container request body. Is either a LeaseContainerRequest type or a - IO[bytes] type. Default value is None. - :type parameters: ~azure.mgmt.storage.models.LeaseContainerRequest or IO[bytes] - :return: LeaseContainerResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LeaseContainerResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - content_type = content_type if parameters else None - cls: ClsType[_models.LeaseContainerResponse] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" if parameters else None - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - if parameters is not None: - _json = self._serialize.body(parameters, "LeaseContainerRequest") - else: - _json = None - - _request = build_lease_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("LeaseContainerResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - async def _object_level_worm_initial( - self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any - ) -> AsyncIterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - - _request = build_object_level_worm_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def begin_object_level_worm( - self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any - ) -> AsyncLROPoller[None]: - """This operation migrates a blob container from container level WORM to object level immutability - enabled container. Prerequisites require a container level immutability policy either in locked - or unlocked state, Account level versioning must be enabled and there should be no Legal hold - on the container. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._object_level_worm_initial( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - api_version=api_version, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - await raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller[None].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_blob_inventory_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_blob_inventory_policies_operations.py deleted file mode 100644 index 9641d714b10c..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_blob_inventory_policies_operations.py +++ /dev/null @@ -1,430 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._blob_inventory_policies_operations import ( - build_create_or_update_request, - build_delete_request, - build_get_request, - build_list_request, -) -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class BlobInventoryPoliciesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`blob_inventory_policies` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def get( - self, - resource_group_name: str, - account_name: str, - blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], - **kwargs: Any - ) -> _models.BlobInventoryPolicy: - """Gets the blob inventory policy associated with the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It - should always be 'default'. "default" Required. - :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName - :return: BlobInventoryPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.BlobInventoryPolicy] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - blob_inventory_policy_name=blob_inventory_policy_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("BlobInventoryPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def create_or_update( - self, - resource_group_name: str, - account_name: str, - blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], - properties: _models.BlobInventoryPolicy, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.BlobInventoryPolicy: - """Sets the blob inventory policy to the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It - should always be 'default'. "default" Required. - :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName - :param properties: The blob inventory policy set to a storage account. Required. - :type properties: ~azure.mgmt.storage.models.BlobInventoryPolicy - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: BlobInventoryPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def create_or_update( - self, - resource_group_name: str, - account_name: str, - blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], - properties: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.BlobInventoryPolicy: - """Sets the blob inventory policy to the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It - should always be 'default'. "default" Required. - :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName - :param properties: The blob inventory policy set to a storage account. Required. - :type properties: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: BlobInventoryPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def create_or_update( - self, - resource_group_name: str, - account_name: str, - blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], - properties: Union[_models.BlobInventoryPolicy, IO[bytes]], - **kwargs: Any - ) -> _models.BlobInventoryPolicy: - """Sets the blob inventory policy to the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It - should always be 'default'. "default" Required. - :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName - :param properties: The blob inventory policy set to a storage account. Is either a - BlobInventoryPolicy type or a IO[bytes] type. Required. - :type properties: ~azure.mgmt.storage.models.BlobInventoryPolicy or IO[bytes] - :return: BlobInventoryPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.BlobInventoryPolicy] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(properties, (IOBase, bytes)): - _content = properties - else: - _json = self._serialize.body(properties, "BlobInventoryPolicy") - - _request = build_create_or_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - blob_inventory_policy_name=blob_inventory_policy_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("BlobInventoryPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def delete( - self, - resource_group_name: str, - account_name: str, - blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], - **kwargs: Any - ) -> None: - """Deletes the blob inventory policy associated with the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It - should always be 'default'. "default" Required. - :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - blob_inventory_policy_name=blob_inventory_policy_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - @distributed_trace - def list( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> AsyncItemPaged["_models.BlobInventoryPolicy"]: - """Gets the blob inventory policy associated with the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: An iterator like instance of either BlobInventoryPolicy or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.BlobInventoryPolicy] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ListBlobInventoryPolicy] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("ListBlobInventoryPolicy", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_blob_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_blob_services_operations.py deleted file mode 100644 index c930ba94f9b1..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_blob_services_operations.py +++ /dev/null @@ -1,354 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Literal, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._blob_services_operations import ( - build_get_service_properties_request, - build_list_request, - build_set_service_properties_request, -) -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class BlobServicesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`blob_services` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> AsyncItemPaged["_models.BlobServiceProperties"]: - """List blob services of storage account. It returns a collection of one object named default. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: An iterator like instance of either BlobServiceProperties or the result of - cls(response) - :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.BlobServiceProperties] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.BlobServiceItems] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("BlobServiceItems", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) - - @overload - async def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: _models.BlobServiceProperties, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.BlobServiceProperties: - """Sets the properties of a storage account’s Blob service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Blob service, including properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. Required. - :type parameters: ~azure.mgmt.storage.models.BlobServiceProperties - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: BlobServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.BlobServiceProperties: - """Sets the properties of a storage account’s Blob service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Blob service, including properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: BlobServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.BlobServiceProperties, IO[bytes]], - **kwargs: Any - ) -> _models.BlobServiceProperties: - """Sets the properties of a storage account’s Blob service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Blob service, including properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. Is either a - BlobServiceProperties type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.BlobServiceProperties or IO[bytes] - :return: BlobServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - blob_services_name: Literal["default"] = kwargs.pop("blob_services_name", "default") - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.BlobServiceProperties] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "BlobServiceProperties") - - _request = build_set_service_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - blob_services_name=blob_services_name, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("BlobServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get_service_properties( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> _models.BlobServiceProperties: - """Gets the properties of a storage account’s Blob service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: BlobServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - blob_services_name: Literal["default"] = kwargs.pop("blob_services_name", "default") - cls: ClsType[_models.BlobServiceProperties] = kwargs.pop("cls", None) - - _request = build_get_service_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - blob_services_name=blob_services_name, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("BlobServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_deleted_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_deleted_accounts_operations.py deleted file mode 100644 index 2a2d9a5ea91a..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_deleted_accounts_operations.py +++ /dev/null @@ -1,192 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, Callable, Optional, TypeVar -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._deleted_accounts_operations import build_get_request, build_list_request -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class DeletedAccountsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`deleted_accounts` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list(self, **kwargs: Any) -> AsyncItemPaged["_models.DeletedAccount"]: - """Lists deleted accounts under the subscription. - - :return: An iterator like instance of either DeletedAccount or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.DeletedAccount] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.DeletedAccountListResult] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("DeletedAccountListResult", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) - - @distributed_trace_async - async def get(self, deleted_account_name: str, location: str, **kwargs: Any) -> _models.DeletedAccount: - """Get properties of specified deleted account resource. - - :param deleted_account_name: Name of the deleted storage account. Required. - :type deleted_account_name: str - :param location: The location of the deleted storage account. Required. - :type location: str - :return: DeletedAccount or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.DeletedAccount - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.DeletedAccount] = kwargs.pop("cls", None) - - _request = build_get_request( - deleted_account_name=deleted_account_name, - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("DeletedAccount", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_encryption_scopes_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_encryption_scopes_operations.py deleted file mode 100644 index 98de3aef61a0..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_encryption_scopes_operations.py +++ /dev/null @@ -1,563 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._encryption_scopes_operations import ( - build_get_request, - build_list_request, - build_patch_request, - build_put_request, -) -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class EncryptionScopesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`encryption_scopes` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @overload - async def put( - self, - resource_group_name: str, - account_name: str, - encryption_scope_name: str, - encryption_scope: _models.EncryptionScope, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.EncryptionScope: - """Synchronously creates or updates an encryption scope under the specified storage account. If an - encryption scope is already created and a subsequent request is issued with different - properties, the encryption scope properties will be updated per the specified request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param encryption_scope_name: The name of the encryption scope within the specified storage - account. Encryption scope names must be between 3 and 63 characters in length and use numbers, - lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and - followed by a letter or number. Required. - :type encryption_scope_name: str - :param encryption_scope: Encryption scope properties to be used for the create or update. - Required. - :type encryption_scope: ~azure.mgmt.storage.models.EncryptionScope - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: EncryptionScope or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.EncryptionScope - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def put( - self, - resource_group_name: str, - account_name: str, - encryption_scope_name: str, - encryption_scope: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.EncryptionScope: - """Synchronously creates or updates an encryption scope under the specified storage account. If an - encryption scope is already created and a subsequent request is issued with different - properties, the encryption scope properties will be updated per the specified request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param encryption_scope_name: The name of the encryption scope within the specified storage - account. Encryption scope names must be between 3 and 63 characters in length and use numbers, - lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and - followed by a letter or number. Required. - :type encryption_scope_name: str - :param encryption_scope: Encryption scope properties to be used for the create or update. - Required. - :type encryption_scope: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: EncryptionScope or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.EncryptionScope - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def put( - self, - resource_group_name: str, - account_name: str, - encryption_scope_name: str, - encryption_scope: Union[_models.EncryptionScope, IO[bytes]], - **kwargs: Any - ) -> _models.EncryptionScope: - """Synchronously creates or updates an encryption scope under the specified storage account. If an - encryption scope is already created and a subsequent request is issued with different - properties, the encryption scope properties will be updated per the specified request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param encryption_scope_name: The name of the encryption scope within the specified storage - account. Encryption scope names must be between 3 and 63 characters in length and use numbers, - lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and - followed by a letter or number. Required. - :type encryption_scope_name: str - :param encryption_scope: Encryption scope properties to be used for the create or update. Is - either a EncryptionScope type or a IO[bytes] type. Required. - :type encryption_scope: ~azure.mgmt.storage.models.EncryptionScope or IO[bytes] - :return: EncryptionScope or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.EncryptionScope - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.EncryptionScope] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(encryption_scope, (IOBase, bytes)): - _content = encryption_scope - else: - _json = self._serialize.body(encryption_scope, "EncryptionScope") - - _request = build_put_request( - resource_group_name=resource_group_name, - account_name=account_name, - encryption_scope_name=encryption_scope_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("EncryptionScope", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def patch( - self, - resource_group_name: str, - account_name: str, - encryption_scope_name: str, - encryption_scope: _models.EncryptionScope, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.EncryptionScope: - """Update encryption scope properties as specified in the request body. Update fails if the - specified encryption scope does not already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param encryption_scope_name: The name of the encryption scope within the specified storage - account. Encryption scope names must be between 3 and 63 characters in length and use numbers, - lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and - followed by a letter or number. Required. - :type encryption_scope_name: str - :param encryption_scope: Encryption scope properties to be used for the update. Required. - :type encryption_scope: ~azure.mgmt.storage.models.EncryptionScope - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: EncryptionScope or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.EncryptionScope - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def patch( - self, - resource_group_name: str, - account_name: str, - encryption_scope_name: str, - encryption_scope: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.EncryptionScope: - """Update encryption scope properties as specified in the request body. Update fails if the - specified encryption scope does not already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param encryption_scope_name: The name of the encryption scope within the specified storage - account. Encryption scope names must be between 3 and 63 characters in length and use numbers, - lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and - followed by a letter or number. Required. - :type encryption_scope_name: str - :param encryption_scope: Encryption scope properties to be used for the update. Required. - :type encryption_scope: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: EncryptionScope or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.EncryptionScope - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def patch( - self, - resource_group_name: str, - account_name: str, - encryption_scope_name: str, - encryption_scope: Union[_models.EncryptionScope, IO[bytes]], - **kwargs: Any - ) -> _models.EncryptionScope: - """Update encryption scope properties as specified in the request body. Update fails if the - specified encryption scope does not already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param encryption_scope_name: The name of the encryption scope within the specified storage - account. Encryption scope names must be between 3 and 63 characters in length and use numbers, - lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and - followed by a letter or number. Required. - :type encryption_scope_name: str - :param encryption_scope: Encryption scope properties to be used for the update. Is either a - EncryptionScope type or a IO[bytes] type. Required. - :type encryption_scope: ~azure.mgmt.storage.models.EncryptionScope or IO[bytes] - :return: EncryptionScope or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.EncryptionScope - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.EncryptionScope] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(encryption_scope, (IOBase, bytes)): - _content = encryption_scope - else: - _json = self._serialize.body(encryption_scope, "EncryptionScope") - - _request = build_patch_request( - resource_group_name=resource_group_name, - account_name=account_name, - encryption_scope_name=encryption_scope_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("EncryptionScope", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get( - self, resource_group_name: str, account_name: str, encryption_scope_name: str, **kwargs: Any - ) -> _models.EncryptionScope: - """Returns the properties for the specified encryption scope. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param encryption_scope_name: The name of the encryption scope within the specified storage - account. Encryption scope names must be between 3 and 63 characters in length and use numbers, - lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and - followed by a letter or number. Required. - :type encryption_scope_name: str - :return: EncryptionScope or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.EncryptionScope - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.EncryptionScope] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - encryption_scope_name=encryption_scope_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("EncryptionScope", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def list( - self, - resource_group_name: str, - account_name: str, - maxpagesize: Optional[int] = None, - filter: Optional[str] = None, - include: Optional[Union[str, _models.ListEncryptionScopesInclude]] = None, - **kwargs: Any - ) -> AsyncItemPaged["_models.EncryptionScope"]: - """Lists all the encryption scopes available under the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param maxpagesize: Optional, specifies the maximum number of encryption scopes that will be - included in the list response. Default value is None. - :type maxpagesize: int - :param filter: Optional. When specified, only encryption scope names starting with the filter - will be listed. Default value is None. - :type filter: str - :param include: Optional, when specified, will list encryption scopes with the specific state. - Defaults to All. Known values are: "All", "Enabled", and "Disabled". Default value is None. - :type include: str or ~azure.mgmt.storage.models.ListEncryptionScopesInclude - :return: An iterator like instance of either EncryptionScope or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.EncryptionScope] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.EncryptionScopeListResult] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - maxpagesize=maxpagesize, - filter=filter, - include=include, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("EncryptionScopeListResult", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_file_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_file_services_operations.py deleted file mode 100644 index dd6eddf52361..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_file_services_operations.py +++ /dev/null @@ -1,481 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Literal, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._file_services_operations import ( - build_get_service_properties_request, - build_get_service_usage_request, - build_list_request, - build_list_service_usages_request, - build_set_service_properties_request, -) -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class FileServicesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`file_services` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _models.FileServiceItems: - """List all file services in storage accounts. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: FileServiceItems or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileServiceItems - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.FileServiceItems] = kwargs.pop("cls", None) - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("FileServiceItems", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: _models.FileServiceProperties, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.FileServiceProperties: - """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource - Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of file services in storage accounts, including CORS - (Cross-Origin Resource Sharing) rules. Required. - :type parameters: ~azure.mgmt.storage.models.FileServiceProperties - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: FileServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.FileServiceProperties: - """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource - Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of file services in storage accounts, including CORS - (Cross-Origin Resource Sharing) rules. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: FileServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.FileServiceProperties, IO[bytes]], - **kwargs: Any - ) -> _models.FileServiceProperties: - """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource - Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of file services in storage accounts, including CORS - (Cross-Origin Resource Sharing) rules. Is either a FileServiceProperties type or a IO[bytes] - type. Required. - :type parameters: ~azure.mgmt.storage.models.FileServiceProperties or IO[bytes] - :return: FileServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - file_services_name: Literal["default"] = kwargs.pop("file_services_name", "default") - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.FileServiceProperties] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "FileServiceProperties") - - _request = build_set_service_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - file_services_name=file_services_name, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("FileServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get_service_properties( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> _models.FileServiceProperties: - """Gets the properties of file services in storage accounts, including CORS (Cross-Origin Resource - Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: FileServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - file_services_name: Literal["default"] = kwargs.pop("file_services_name", "default") - cls: ClsType[_models.FileServiceProperties] = kwargs.pop("cls", None) - - _request = build_get_service_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - file_services_name=file_services_name, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("FileServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def list_service_usages( - self, resource_group_name: str, account_name: str, maxpagesize: Optional[int] = None, **kwargs: Any - ) -> AsyncItemPaged["_models.FileServiceUsage"]: - """Gets the usages of file service in storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param maxpagesize: Optional, specifies the maximum number of file service usages to be - included in the list response. Default value is None. - :type maxpagesize: int - :return: An iterator like instance of either FileServiceUsage or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.FileServiceUsage] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - file_services_name: Literal["default"] = kwargs.pop("file_services_name", "default") - cls: ClsType[_models.FileServiceUsages] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_service_usages_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - maxpagesize=maxpagesize, - api_version=api_version, - file_services_name=file_services_name, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("FileServiceUsages", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) - - @distributed_trace_async - async def get_service_usage( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> _models.FileServiceUsage: - """Gets the usage of file service in storage account including account limits, file share limits - and constants used in recommendations and bursting formula. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: FileServiceUsage or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileServiceUsage - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - file_services_name: Literal["default"] = kwargs.pop("file_services_name", "default") - file_service_usages_name: Literal["default"] = kwargs.pop("file_service_usages_name", "default") - cls: ClsType[_models.FileServiceUsage] = kwargs.pop("cls", None) - - _request = build_get_service_usage_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - file_services_name=file_services_name, - file_service_usages_name=file_service_usages_name, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("FileServiceUsage", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_file_shares_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_file_shares_operations.py deleted file mode 100644 index fee5ef0f3320..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_file_shares_operations.py +++ /dev/null @@ -1,987 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._file_shares_operations import ( - build_create_request, - build_delete_request, - build_get_request, - build_lease_request, - build_list_request, - build_restore_request, - build_update_request, -) -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class FileSharesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`file_shares` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, - resource_group_name: str, - account_name: str, - maxpagesize: Optional[str] = None, - filter: Optional[str] = None, - expand: Optional[str] = None, - **kwargs: Any - ) -> AsyncItemPaged["_models.FileShareItem"]: - """Lists all shares. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param maxpagesize: Optional. Specified maximum number of shares that can be included in the - list. Default value is None. - :type maxpagesize: str - :param filter: Optional. When specified, only share names starting with the filter will be - listed. Default value is None. - :type filter: str - :param expand: Optional, used to expand the properties within share's properties. Valid values - are: deleted, snapshots. Should be passed as a string with delimiter ','. Default value is - None. - :type expand: str - :return: An iterator like instance of either FileShareItem or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.FileShareItem] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.FileShareItems] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - maxpagesize=maxpagesize, - filter=filter, - expand=expand, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("FileShareItems", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) - - @overload - async def create( - self, - resource_group_name: str, - account_name: str, - share_name: str, - file_share: _models.FileShare, - expand: Optional[str] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.FileShare: - """Creates a new share under the specified account as described by request body. The share - resource includes metadata and properties for that share. It does not include a list of the - files contained by the share. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param file_share: Properties of the file share to create. Required. - :type file_share: ~azure.mgmt.storage.models.FileShare - :param expand: Optional, used to expand the properties within share's properties. Valid values - are: snapshots. Should be passed as a string with delimiter ','. Default value is None. - :type expand: str - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: FileShare or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileShare - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def create( - self, - resource_group_name: str, - account_name: str, - share_name: str, - file_share: IO[bytes], - expand: Optional[str] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.FileShare: - """Creates a new share under the specified account as described by request body. The share - resource includes metadata and properties for that share. It does not include a list of the - files contained by the share. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param file_share: Properties of the file share to create. Required. - :type file_share: IO[bytes] - :param expand: Optional, used to expand the properties within share's properties. Valid values - are: snapshots. Should be passed as a string with delimiter ','. Default value is None. - :type expand: str - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: FileShare or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileShare - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def create( - self, - resource_group_name: str, - account_name: str, - share_name: str, - file_share: Union[_models.FileShare, IO[bytes]], - expand: Optional[str] = None, - **kwargs: Any - ) -> _models.FileShare: - """Creates a new share under the specified account as described by request body. The share - resource includes metadata and properties for that share. It does not include a list of the - files contained by the share. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param file_share: Properties of the file share to create. Is either a FileShare type or a - IO[bytes] type. Required. - :type file_share: ~azure.mgmt.storage.models.FileShare or IO[bytes] - :param expand: Optional, used to expand the properties within share's properties. Valid values - are: snapshots. Should be passed as a string with delimiter ','. Default value is None. - :type expand: str - :return: FileShare or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileShare - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(file_share, (IOBase, bytes)): - _content = file_share - else: - _json = self._serialize.body(file_share, "FileShare") - - _request = build_create_request( - resource_group_name=resource_group_name, - account_name=account_name, - share_name=share_name, - subscription_id=self._config.subscription_id, - expand=expand, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("FileShare", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def update( - self, - resource_group_name: str, - account_name: str, - share_name: str, - file_share: _models.FileShare, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.FileShare: - """Updates share properties as specified in request body. Properties not mentioned in the request - will not be changed. Update fails if the specified share does not already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param file_share: Properties to update for the file share. Required. - :type file_share: ~azure.mgmt.storage.models.FileShare - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: FileShare or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileShare - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def update( - self, - resource_group_name: str, - account_name: str, - share_name: str, - file_share: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.FileShare: - """Updates share properties as specified in request body. Properties not mentioned in the request - will not be changed. Update fails if the specified share does not already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param file_share: Properties to update for the file share. Required. - :type file_share: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: FileShare or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileShare - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def update( - self, - resource_group_name: str, - account_name: str, - share_name: str, - file_share: Union[_models.FileShare, IO[bytes]], - **kwargs: Any - ) -> _models.FileShare: - """Updates share properties as specified in request body. Properties not mentioned in the request - will not be changed. Update fails if the specified share does not already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param file_share: Properties to update for the file share. Is either a FileShare type or a - IO[bytes] type. Required. - :type file_share: ~azure.mgmt.storage.models.FileShare or IO[bytes] - :return: FileShare or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileShare - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(file_share, (IOBase, bytes)): - _content = file_share - else: - _json = self._serialize.body(file_share, "FileShare") - - _request = build_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - share_name=share_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("FileShare", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get( - self, - resource_group_name: str, - account_name: str, - share_name: str, - expand: Optional[str] = None, - x_ms_snapshot: Optional[str] = None, - **kwargs: Any - ) -> _models.FileShare: - """Gets properties of a specified share. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param expand: Optional, used to expand the properties within share's properties. Valid values - are: stats. Should be passed as a string with delimiter ','. Default value is None. - :type expand: str - :param x_ms_snapshot: Optional, used to retrieve properties of a snapshot. Default value is - None. - :type x_ms_snapshot: str - :return: FileShare or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileShare - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - share_name=share_name, - subscription_id=self._config.subscription_id, - expand=expand, - x_ms_snapshot=x_ms_snapshot, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("FileShare", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def delete( - self, - resource_group_name: str, - account_name: str, - share_name: str, - x_ms_snapshot: Optional[str] = None, - include: Optional[str] = None, - **kwargs: Any - ) -> None: - """Deletes specified share under its account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param x_ms_snapshot: Optional, used to delete a snapshot. Default value is None. - :type x_ms_snapshot: str - :param include: Optional. Valid values are: snapshots, leased-snapshots, none. The default - value is snapshots. For 'snapshots', the file share is deleted including all of its file share - snapshots. If the file share contains leased-snapshots, the deletion fails. For - 'leased-snapshots', the file share is deleted included all of its file share snapshots - (leased/unleased). For 'none', the file share is deleted if it has no share snapshots. If the - file share contains any snapshots (leased or unleased), the deletion fails. Default value is - None. - :type include: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - share_name=share_name, - subscription_id=self._config.subscription_id, - x_ms_snapshot=x_ms_snapshot, - include=include, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - @overload - async def restore( - self, - resource_group_name: str, - account_name: str, - share_name: str, - deleted_share: _models.DeletedShare, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> None: - """Restore a file share within a valid retention days if share soft delete is enabled. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param deleted_share: Required. - :type deleted_share: ~azure.mgmt.storage.models.DeletedShare - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def restore( - self, - resource_group_name: str, - account_name: str, - share_name: str, - deleted_share: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> None: - """Restore a file share within a valid retention days if share soft delete is enabled. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param deleted_share: Required. - :type deleted_share: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def restore( - self, - resource_group_name: str, - account_name: str, - share_name: str, - deleted_share: Union[_models.DeletedShare, IO[bytes]], - **kwargs: Any - ) -> None: - """Restore a file share within a valid retention days if share soft delete is enabled. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param deleted_share: Is either a DeletedShare type or a IO[bytes] type. Required. - :type deleted_share: ~azure.mgmt.storage.models.DeletedShare or IO[bytes] - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(deleted_share, (IOBase, bytes)): - _content = deleted_share - else: - _json = self._serialize.body(deleted_share, "DeletedShare") - - _request = build_restore_request( - resource_group_name=resource_group_name, - account_name=account_name, - share_name=share_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - @overload - async def lease( - self, - resource_group_name: str, - account_name: str, - share_name: str, - x_ms_snapshot: Optional[str] = None, - parameters: Optional[_models.LeaseShareRequest] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LeaseShareResponse: - """The Lease Share operation establishes and manages a lock on a share for delete operations. The - lock duration can be 15 to 60 seconds, or can be infinite. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param x_ms_snapshot: Optional. Specify the snapshot time to lease a snapshot. Default value is - None. - :type x_ms_snapshot: str - :param parameters: Lease Share request body. Default value is None. - :type parameters: ~azure.mgmt.storage.models.LeaseShareRequest - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: LeaseShareResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LeaseShareResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def lease( - self, - resource_group_name: str, - account_name: str, - share_name: str, - x_ms_snapshot: Optional[str] = None, - parameters: Optional[IO[bytes]] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LeaseShareResponse: - """The Lease Share operation establishes and manages a lock on a share for delete operations. The - lock duration can be 15 to 60 seconds, or can be infinite. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param x_ms_snapshot: Optional. Specify the snapshot time to lease a snapshot. Default value is - None. - :type x_ms_snapshot: str - :param parameters: Lease Share request body. Default value is None. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: LeaseShareResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LeaseShareResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def lease( - self, - resource_group_name: str, - account_name: str, - share_name: str, - x_ms_snapshot: Optional[str] = None, - parameters: Optional[Union[_models.LeaseShareRequest, IO[bytes]]] = None, - **kwargs: Any - ) -> _models.LeaseShareResponse: - """The Lease Share operation establishes and manages a lock on a share for delete operations. The - lock duration can be 15 to 60 seconds, or can be infinite. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param x_ms_snapshot: Optional. Specify the snapshot time to lease a snapshot. Default value is - None. - :type x_ms_snapshot: str - :param parameters: Lease Share request body. Is either a LeaseShareRequest type or a IO[bytes] - type. Default value is None. - :type parameters: ~azure.mgmt.storage.models.LeaseShareRequest or IO[bytes] - :return: LeaseShareResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LeaseShareResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - content_type = content_type if parameters else None - cls: ClsType[_models.LeaseShareResponse] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" if parameters else None - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - if parameters is not None: - _json = self._serialize.body(parameters, "LeaseShareRequest") - else: - _json = None - - _request = build_lease_request( - resource_group_name=resource_group_name, - account_name=account_name, - share_name=share_name, - subscription_id=self._config.subscription_id, - x_ms_snapshot=x_ms_snapshot, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("LeaseShareResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_local_users_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_local_users_operations.py deleted file mode 100644 index ed461d50a01a..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_local_users_operations.py +++ /dev/null @@ -1,587 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._local_users_operations import ( - build_create_or_update_request, - build_delete_request, - build_get_request, - build_list_keys_request, - build_list_request, - build_regenerate_password_request, -) -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class LocalUsersOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`local_users` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, - resource_group_name: str, - account_name: str, - maxpagesize: Optional[int] = None, - filter: Optional[str] = None, - include: Optional[Union[str, _models.ListLocalUserIncludeParam]] = None, - **kwargs: Any - ) -> AsyncItemPaged["_models.LocalUser"]: - """List the local users associated with the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param maxpagesize: Optional, specifies the maximum number of local users that will be included - in the list response. Default value is None. - :type maxpagesize: int - :param filter: Optional. When specified, only local user names starting with the filter will be - listed. Default value is None. - :type filter: str - :param include: Optional, when specified, will list local users enabled for the specific - protocol. Lists all users by default. "nfsv3" Default value is None. - :type include: str or ~azure.mgmt.storage.models.ListLocalUserIncludeParam - :return: An iterator like instance of either LocalUser or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.LocalUser] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.LocalUsers] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - maxpagesize=maxpagesize, - filter=filter, - include=include, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("LocalUsers", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) - - @distributed_trace_async - async def get(self, resource_group_name: str, account_name: str, username: str, **kwargs: Any) -> _models.LocalUser: - """Get the local user of the storage account by username. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param username: The name of local user. The username must contain lowercase letters and - numbers only. It must be unique only within the storage account. Required. - :type username: str - :return: LocalUser or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LocalUser - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.LocalUser] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - username=username, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("LocalUser", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def create_or_update( - self, - resource_group_name: str, - account_name: str, - username: str, - properties: _models.LocalUser, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LocalUser: - """Create or update the properties of a local user associated with the storage account. Properties - for NFSv3 enablement and extended groups cannot be set with other properties. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param username: The name of local user. The username must contain lowercase letters and - numbers only. It must be unique only within the storage account. Required. - :type username: str - :param properties: The local user associated with a storage account. Required. - :type properties: ~azure.mgmt.storage.models.LocalUser - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: LocalUser or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LocalUser - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def create_or_update( - self, - resource_group_name: str, - account_name: str, - username: str, - properties: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LocalUser: - """Create or update the properties of a local user associated with the storage account. Properties - for NFSv3 enablement and extended groups cannot be set with other properties. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param username: The name of local user. The username must contain lowercase letters and - numbers only. It must be unique only within the storage account. Required. - :type username: str - :param properties: The local user associated with a storage account. Required. - :type properties: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: LocalUser or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LocalUser - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def create_or_update( - self, - resource_group_name: str, - account_name: str, - username: str, - properties: Union[_models.LocalUser, IO[bytes]], - **kwargs: Any - ) -> _models.LocalUser: - """Create or update the properties of a local user associated with the storage account. Properties - for NFSv3 enablement and extended groups cannot be set with other properties. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param username: The name of local user. The username must contain lowercase letters and - numbers only. It must be unique only within the storage account. Required. - :type username: str - :param properties: The local user associated with a storage account. Is either a LocalUser type - or a IO[bytes] type. Required. - :type properties: ~azure.mgmt.storage.models.LocalUser or IO[bytes] - :return: LocalUser or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LocalUser - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.LocalUser] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(properties, (IOBase, bytes)): - _content = properties - else: - _json = self._serialize.body(properties, "LocalUser") - - _request = build_create_or_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - username=username, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("LocalUser", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def delete(self, resource_group_name: str, account_name: str, username: str, **kwargs: Any) -> None: - """Deletes the local user associated with the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param username: The name of local user. The username must contain lowercase letters and - numbers only. It must be unique only within the storage account. Required. - :type username: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - username=username, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - @distributed_trace_async - async def list_keys( - self, resource_group_name: str, account_name: str, username: str, **kwargs: Any - ) -> _models.LocalUserKeys: - """List SSH authorized keys and shared key of the local user. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param username: The name of local user. The username must contain lowercase letters and - numbers only. It must be unique only within the storage account. Required. - :type username: str - :return: LocalUserKeys or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LocalUserKeys - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.LocalUserKeys] = kwargs.pop("cls", None) - - _request = build_list_keys_request( - resource_group_name=resource_group_name, - account_name=account_name, - username=username, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("LocalUserKeys", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def regenerate_password( - self, resource_group_name: str, account_name: str, username: str, **kwargs: Any - ) -> _models.LocalUserRegeneratePasswordResult: - """Regenerate the local user SSH password. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param username: The name of local user. The username must contain lowercase letters and - numbers only. It must be unique only within the storage account. Required. - :type username: str - :return: LocalUserRegeneratePasswordResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LocalUserRegeneratePasswordResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.LocalUserRegeneratePasswordResult] = kwargs.pop("cls", None) - - _request = build_regenerate_password_request( - resource_group_name=resource_group_name, - account_name=account_name, - username=username, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("LocalUserRegeneratePasswordResult", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_management_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_management_policies_operations.py deleted file mode 100644 index fad6fdf2f0ec..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_management_policies_operations.py +++ /dev/null @@ -1,337 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload - -from azure.core import AsyncPipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._management_policies_operations import ( - build_create_or_update_request, - build_delete_request, - build_get_request, -) -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class ManagementPoliciesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`management_policies` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def get( - self, - resource_group_name: str, - account_name: str, - management_policy_name: Union[str, _models.ManagementPolicyName], - **kwargs: Any - ) -> _models.ManagementPolicy: - """Gets the managementpolicy associated with the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param management_policy_name: The name of the Storage Account Management Policy. It should - always be 'default'. "default" Required. - :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName - :return: ManagementPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ManagementPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ManagementPolicy] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - management_policy_name=management_policy_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ManagementPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def create_or_update( - self, - resource_group_name: str, - account_name: str, - management_policy_name: Union[str, _models.ManagementPolicyName], - properties: _models.ManagementPolicy, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ManagementPolicy: - """Sets the managementpolicy to the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param management_policy_name: The name of the Storage Account Management Policy. It should - always be 'default'. "default" Required. - :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName - :param properties: The ManagementPolicy set to a storage account. Required. - :type properties: ~azure.mgmt.storage.models.ManagementPolicy - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: ManagementPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ManagementPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def create_or_update( - self, - resource_group_name: str, - account_name: str, - management_policy_name: Union[str, _models.ManagementPolicyName], - properties: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ManagementPolicy: - """Sets the managementpolicy to the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param management_policy_name: The name of the Storage Account Management Policy. It should - always be 'default'. "default" Required. - :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName - :param properties: The ManagementPolicy set to a storage account. Required. - :type properties: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: ManagementPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ManagementPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def create_or_update( - self, - resource_group_name: str, - account_name: str, - management_policy_name: Union[str, _models.ManagementPolicyName], - properties: Union[_models.ManagementPolicy, IO[bytes]], - **kwargs: Any - ) -> _models.ManagementPolicy: - """Sets the managementpolicy to the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param management_policy_name: The name of the Storage Account Management Policy. It should - always be 'default'. "default" Required. - :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName - :param properties: The ManagementPolicy set to a storage account. Is either a ManagementPolicy - type or a IO[bytes] type. Required. - :type properties: ~azure.mgmt.storage.models.ManagementPolicy or IO[bytes] - :return: ManagementPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ManagementPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.ManagementPolicy] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(properties, (IOBase, bytes)): - _content = properties - else: - _json = self._serialize.body(properties, "ManagementPolicy") - - _request = build_create_or_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - management_policy_name=management_policy_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ManagementPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def delete( - self, - resource_group_name: str, - account_name: str, - management_policy_name: Union[str, _models.ManagementPolicyName], - **kwargs: Any - ) -> None: - """Deletes the managementpolicy associated with the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param management_policy_name: The name of the Storage Account Management Policy. It should - always be 'default'. "default" Required. - :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - management_policy_name=management_policy_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_network_security_perimeter_configurations_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_network_security_perimeter_configurations_operations.py deleted file mode 100644 index 84e7a0afdf2f..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_network_security_perimeter_configurations_operations.py +++ /dev/null @@ -1,355 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, AsyncIterator, Callable, Optional, TypeVar, Union, cast -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - StreamClosedError, - StreamConsumedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._network_security_perimeter_configurations_operations import ( - build_get_request, - build_list_request, - build_reconcile_request, -) -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class NetworkSecurityPerimeterConfigurationsOperations: # pylint: disable=name-too-long - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`network_security_perimeter_configurations` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> AsyncItemPaged["_models.NetworkSecurityPerimeterConfiguration"]: - """Gets list of effective NetworkSecurityPerimeterConfiguration for storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: An iterator like instance of either NetworkSecurityPerimeterConfiguration or the - result of cls(response) - :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.NetworkSecurityPerimeterConfiguration] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.NetworkSecurityPerimeterConfigurationList] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("NetworkSecurityPerimeterConfigurationList", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) - - @distributed_trace_async - async def get( - self, - resource_group_name: str, - account_name: str, - network_security_perimeter_configuration_name: str, - **kwargs: Any - ) -> _models.NetworkSecurityPerimeterConfiguration: - """Gets effective NetworkSecurityPerimeterConfiguration for association. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param network_security_perimeter_configuration_name: The name for Network Security Perimeter - configuration. Required. - :type network_security_perimeter_configuration_name: str - :return: NetworkSecurityPerimeterConfiguration or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.NetworkSecurityPerimeterConfiguration - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.NetworkSecurityPerimeterConfiguration] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("NetworkSecurityPerimeterConfiguration", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - async def _reconcile_initial( - self, - resource_group_name: str, - account_name: str, - network_security_perimeter_configuration_name: str, - **kwargs: Any - ) -> AsyncIterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - - _request = build_reconcile_request( - resource_group_name=resource_group_name, - account_name=account_name, - network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def begin_reconcile( - self, - resource_group_name: str, - account_name: str, - network_security_perimeter_configuration_name: str, - **kwargs: Any - ) -> AsyncLROPoller[None]: - """Refreshes any information about the association. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param network_security_perimeter_configuration_name: The name for Network Security Perimeter - configuration. Required. - :type network_security_perimeter_configuration_name: str - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._reconcile_initial( - resource_group_name=resource_group_name, - account_name=account_name, - network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, - api_version=api_version, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - await raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller[None].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_object_replication_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_object_replication_policies_operations.py deleted file mode 100644 index 8768698a500b..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_object_replication_policies_operations.py +++ /dev/null @@ -1,449 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._object_replication_policies_operations import ( - build_create_or_update_request, - build_delete_request, - build_get_request, - build_list_request, -) -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class ObjectReplicationPoliciesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`object_replication_policies` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> AsyncItemPaged["_models.ObjectReplicationPolicy"]: - """List the object replication policies associated with the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: An iterator like instance of either ObjectReplicationPolicy or the result of - cls(response) - :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.ObjectReplicationPolicy] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ObjectReplicationPolicies] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("ObjectReplicationPolicies", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) - - @distributed_trace_async - async def get( - self, resource_group_name: str, account_name: str, object_replication_policy_id: str, **kwargs: Any - ) -> _models.ObjectReplicationPolicy: - """Get the object replication policy of the storage account by policy ID. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param object_replication_policy_id: For the destination account, provide the value 'default'. - Configure the policy on the destination account first. For the source account, provide the - value of the policy ID that is returned when you download the policy that was defined on the - destination account. The policy is downloaded as a JSON file. Required. - :type object_replication_policy_id: str - :return: ObjectReplicationPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ObjectReplicationPolicy] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - object_replication_policy_id=object_replication_policy_id, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ObjectReplicationPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def create_or_update( - self, - resource_group_name: str, - account_name: str, - object_replication_policy_id: str, - properties: _models.ObjectReplicationPolicy, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ObjectReplicationPolicy: - """Create or update the object replication policy of the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param object_replication_policy_id: For the destination account, provide the value 'default'. - Configure the policy on the destination account first. For the source account, provide the - value of the policy ID that is returned when you download the policy that was defined on the - destination account. The policy is downloaded as a JSON file. Required. - :type object_replication_policy_id: str - :param properties: The object replication policy set to a storage account. A unique policy ID - will be created if absent. Required. - :type properties: ~azure.mgmt.storage.models.ObjectReplicationPolicy - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: ObjectReplicationPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def create_or_update( - self, - resource_group_name: str, - account_name: str, - object_replication_policy_id: str, - properties: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ObjectReplicationPolicy: - """Create or update the object replication policy of the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param object_replication_policy_id: For the destination account, provide the value 'default'. - Configure the policy on the destination account first. For the source account, provide the - value of the policy ID that is returned when you download the policy that was defined on the - destination account. The policy is downloaded as a JSON file. Required. - :type object_replication_policy_id: str - :param properties: The object replication policy set to a storage account. A unique policy ID - will be created if absent. Required. - :type properties: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: ObjectReplicationPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def create_or_update( - self, - resource_group_name: str, - account_name: str, - object_replication_policy_id: str, - properties: Union[_models.ObjectReplicationPolicy, IO[bytes]], - **kwargs: Any - ) -> _models.ObjectReplicationPolicy: - """Create or update the object replication policy of the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param object_replication_policy_id: For the destination account, provide the value 'default'. - Configure the policy on the destination account first. For the source account, provide the - value of the policy ID that is returned when you download the policy that was defined on the - destination account. The policy is downloaded as a JSON file. Required. - :type object_replication_policy_id: str - :param properties: The object replication policy set to a storage account. A unique policy ID - will be created if absent. Is either a ObjectReplicationPolicy type or a IO[bytes] type. - Required. - :type properties: ~azure.mgmt.storage.models.ObjectReplicationPolicy or IO[bytes] - :return: ObjectReplicationPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.ObjectReplicationPolicy] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(properties, (IOBase, bytes)): - _content = properties - else: - _json = self._serialize.body(properties, "ObjectReplicationPolicy") - - _request = build_create_or_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - object_replication_policy_id=object_replication_policy_id, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ObjectReplicationPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def delete( - self, resource_group_name: str, account_name: str, object_replication_policy_id: str, **kwargs: Any - ) -> None: - """Deletes the object replication policy associated with the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param object_replication_policy_id: For the destination account, provide the value 'default'. - Configure the policy on the destination account first. For the source account, provide the - value of the policy ID that is returned when you download the policy that was defined on the - destination account. The policy is downloaded as a JSON file. Required. - :type object_replication_policy_id: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - object_replication_policy_id=object_replication_policy_id, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_operations.py index 9c08948ad61e..697f481e1505 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_operations.py @@ -1,37 +1,166 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from collections.abc import MutableMapping -from typing import Any, Callable, Optional, TypeVar +from io import IOBase +import json +from typing import Any, AsyncIterator, Callable, IO, Literal, Optional, TypeVar, Union, cast, overload import urllib.parse -from azure.core import AsyncPipelineClient +from azure.core import AsyncPipelineClient, MatchConditions from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, ResourceExistsError, + ResourceModifiedError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models +from ..._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize from ..._utils.serialization import Deserializer, Serializer -from ...operations._operations import build_list_request +from ..._validation import api_version_validation +from ...operations._operations import ( + build_blob_containers_clear_legal_hold_request, + build_blob_containers_create_or_update_immutability_policy_request, + build_blob_containers_create_request, + build_blob_containers_delete_immutability_policy_request, + build_blob_containers_delete_request, + build_blob_containers_extend_immutability_policy_request, + build_blob_containers_get_immutability_policy_request, + build_blob_containers_get_request, + build_blob_containers_lease_request, + build_blob_containers_list_request, + build_blob_containers_lock_immutability_policy_request, + build_blob_containers_object_level_worm_request, + build_blob_containers_set_legal_hold_request, + build_blob_containers_update_request, + build_blob_inventory_policies_create_or_update_request, + build_blob_inventory_policies_delete_request, + build_blob_inventory_policies_get_request, + build_blob_inventory_policies_list_request, + build_blob_services_get_service_properties_request, + build_blob_services_list_request, + build_blob_services_set_service_properties_request, + build_connectors_create_request, + build_connectors_delete_request, + build_connectors_get_request, + build_connectors_list_by_storage_account_request, + build_connectors_test_existing_connection_request, + build_connectors_update_request, + build_data_shares_create_request, + build_data_shares_delete_request, + build_data_shares_get_request, + build_data_shares_list_by_storage_account_request, + build_data_shares_update_request, + build_deleted_accounts_get_request, + build_deleted_accounts_list_request, + build_encryption_scopes_get_request, + build_encryption_scopes_list_request, + build_encryption_scopes_patch_request, + build_encryption_scopes_put_request, + build_file_services_get_service_properties_request, + build_file_services_get_service_usage_request, + build_file_services_list_request, + build_file_services_list_service_usages_request, + build_file_services_set_service_properties_request, + build_file_shares_create_request, + build_file_shares_delete_request, + build_file_shares_get_request, + build_file_shares_lease_request, + build_file_shares_list_request, + build_file_shares_restore_request, + build_file_shares_update_request, + build_local_users_create_or_update_request, + build_local_users_delete_request, + build_local_users_get_request, + build_local_users_list_keys_request, + build_local_users_list_request, + build_local_users_regenerate_password_request, + build_management_policies_create_or_update_request, + build_management_policies_delete_request, + build_management_policies_get_request, + build_network_security_perimeter_configurations_get_request, + build_network_security_perimeter_configurations_list_request, + build_network_security_perimeter_configurations_reconcile_request, + build_object_replication_policies_create_or_update_request, + build_object_replication_policies_delete_request, + build_object_replication_policies_get_request, + build_object_replication_policies_list_request, + build_operations_list_request, + build_private_endpoint_connections_delete_request, + build_private_endpoint_connections_get_request, + build_private_endpoint_connections_list_request, + build_private_endpoint_connections_put_request, + build_private_link_resources_list_by_storage_account_request, + build_queue_create_request, + build_queue_delete_request, + build_queue_get_request, + build_queue_list_request, + build_queue_services_get_service_properties_request, + build_queue_services_list_request, + build_queue_services_set_service_properties_request, + build_queue_update_request, + build_skus_list_request, + build_storage_accounts_abort_hierarchical_namespace_migration_request, + build_storage_accounts_check_name_availability_request, + build_storage_accounts_create_request, + build_storage_accounts_customer_initiated_migration_request, + build_storage_accounts_delete_request, + build_storage_accounts_failover_request, + build_storage_accounts_get_customer_initiated_migration_request, + build_storage_accounts_get_properties_request, + build_storage_accounts_hierarchical_namespace_migration_request, + build_storage_accounts_list_account_sas_request, + build_storage_accounts_list_by_resource_group_request, + build_storage_accounts_list_keys_request, + build_storage_accounts_list_request, + build_storage_accounts_list_service_sas_request, + build_storage_accounts_regenerate_key_request, + build_storage_accounts_restore_blob_ranges_request, + build_storage_accounts_revoke_user_delegation_keys_request, + build_storage_accounts_update_request, + build_storage_task_assignment_instances_report_list_request, + build_storage_task_assignments_create_request, + build_storage_task_assignments_delete_request, + build_storage_task_assignments_get_request, + build_storage_task_assignments_instances_report_list_request, + build_storage_task_assignments_list_request, + build_storage_task_assignments_stop_assignment_request, + build_storage_task_assignments_update_request, + build_table_create_request, + build_table_delete_request, + build_table_get_request, + build_table_list_request, + build_table_services_get_service_properties_request, + build_table_services_list_request, + build_table_services_set_service_properties_request, + build_table_update_request, + build_usages_list_by_location_request, +) from .._configuration import StorageManagementClientConfiguration T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +JSON = MutableMapping[str, Any] List = list @@ -45,8 +174,6 @@ class Operations: :attr:`operations` attribute. """ - models = _models - def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") @@ -58,15 +185,14 @@ def __init__(self, *args, **kwargs) -> None: def list(self, **kwargs: Any) -> AsyncItemPaged["_models.Operation"]: """Lists all of the available Storage Rest API operations. - :return: An iterator like instance of either Operation or the result of cls(response) + :return: An iterator like instance of Operation :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.Operation] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + _params = kwargs.pop("params", {}) or {} - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None) + cls: ClsType[List[_models.Operation]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -79,12 +205,17 @@ def list(self, **kwargs: Any) -> AsyncItemPaged["_models.Operation"]: def prepare_request(next_link=None): if not next_link: - _request = build_list_request( - api_version=api_version, + _request = build_operations_list_request( + api_version=self._config.api_version, headers=_headers, params=_params, ) - _request.url = self._client.format_url(_request.url) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) else: # make call to next link with the client's api-version @@ -99,16 +230,24 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + return _request async def extract_data(pipeline_response): - deserialized = self._deserialize("OperationListResult", pipeline_response) - list_of_elem = deserialized.value + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Operation], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore - return None, AsyncList(list_of_elem) + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) async def get_next(next_link=None): _request = prepare_request(next_link) @@ -121,7 +260,16403 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class BlobContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`blob_containers` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any + ) -> _models.BlobContainer: + """Gets properties of a specified container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BlobContainer] = kwargs.pop("cls", None) + + _request = build_blob_containers_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BlobContainer, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def create( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: _models.BlobContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobContainer: + """Creates a new container under the specified account as described by request body. The container + resource includes metadata and properties for that container. It does not include a list of the + blobs contained by the container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param blob_container: Properties of the blob container to create. Required. + :type blob_container: ~azure.mgmt.storage.models.BlobContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobContainer: + """Creates a new container under the specified account as described by request body. The container + resource includes metadata and properties for that container. It does not include a list of the + blobs contained by the container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param blob_container: Properties of the blob container to create. Required. + :type blob_container: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobContainer: + """Creates a new container under the specified account as described by request body. The container + resource includes metadata and properties for that container. It does not include a list of the + blobs contained by the container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param blob_container: Properties of the blob container to create. Required. + :type blob_container: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: Union[_models.BlobContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.BlobContainer: + """Creates a new container under the specified account as described by request body. The container + resource includes metadata and properties for that container. It does not include a list of the + blobs contained by the container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param blob_container: Properties of the blob container to create. Is one of the following + types: BlobContainer, JSON, IO[bytes] Required. + :type blob_container: ~azure.mgmt.storage.models.BlobContainer or JSON or IO[bytes] + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BlobContainer] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(blob_container, (IOBase, bytes)): + _content = blob_container + else: + _content = json.dumps(blob_container, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_blob_containers_create_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BlobContainer, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def update( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: _models.BlobContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobContainer: + """Updates container properties as specified in request body. Properties not mentioned in the + request will be unchanged. Update fails if the specified container doesn't already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param blob_container: Properties to update for the blob container. Required. + :type blob_container: ~azure.mgmt.storage.models.BlobContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobContainer: + """Updates container properties as specified in request body. Properties not mentioned in the + request will be unchanged. Update fails if the specified container doesn't already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param blob_container: Properties to update for the blob container. Required. + :type blob_container: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobContainer: + """Updates container properties as specified in request body. Properties not mentioned in the + request will be unchanged. Update fails if the specified container doesn't already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param blob_container: Properties to update for the blob container. Required. + :type blob_container: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: Union[_models.BlobContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.BlobContainer: + """Updates container properties as specified in request body. Properties not mentioned in the + request will be unchanged. Update fails if the specified container doesn't already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param blob_container: Properties to update for the blob container. Is one of the following + types: BlobContainer, JSON, IO[bytes] Required. + :type blob_container: ~azure.mgmt.storage.models.BlobContainer or JSON or IO[bytes] + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BlobContainer] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(blob_container, (IOBase, bytes)): + _content = blob_container + else: + _content = json.dumps(blob_container, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_blob_containers_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BlobContainer, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete(self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any) -> None: + """Deletes specified container under its account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_containers_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @overload + async def set_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: _models.LegalHold, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LegalHold: + """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold + follows an append pattern and does not clear out the existing tags that are not specified in + the request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param legal_hold: The LegalHold property that will be set to a blob container. Required. + :type legal_hold: ~azure.mgmt.storage.models.LegalHold + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LegalHold. The LegalHold is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LegalHold + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def set_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LegalHold: + """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold + follows an append pattern and does not clear out the existing tags that are not specified in + the request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param legal_hold: The LegalHold property that will be set to a blob container. Required. + :type legal_hold: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LegalHold. The LegalHold is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LegalHold + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def set_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LegalHold: + """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold + follows an append pattern and does not clear out the existing tags that are not specified in + the request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param legal_hold: The LegalHold property that will be set to a blob container. Required. + :type legal_hold: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: LegalHold. The LegalHold is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LegalHold + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def set_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: Union[_models.LegalHold, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.LegalHold: + """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold + follows an append pattern and does not clear out the existing tags that are not specified in + the request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param legal_hold: The LegalHold property that will be set to a blob container. Is one of the + following types: LegalHold, JSON, IO[bytes] Required. + :type legal_hold: ~azure.mgmt.storage.models.LegalHold or JSON or IO[bytes] + :return: LegalHold. The LegalHold is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LegalHold + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.LegalHold] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(legal_hold, (IOBase, bytes)): + _content = legal_hold + else: + _content = json.dumps(legal_hold, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_blob_containers_set_legal_hold_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.LegalHold, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def clear_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: _models.LegalHold, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LegalHold: + """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent + operation. ClearLegalHold clears out only the specified tags in the request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param legal_hold: The LegalHold property that will be clear from a blob container. Required. + :type legal_hold: ~azure.mgmt.storage.models.LegalHold + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LegalHold. The LegalHold is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LegalHold + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def clear_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LegalHold: + """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent + operation. ClearLegalHold clears out only the specified tags in the request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param legal_hold: The LegalHold property that will be clear from a blob container. Required. + :type legal_hold: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LegalHold. The LegalHold is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LegalHold + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def clear_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LegalHold: + """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent + operation. ClearLegalHold clears out only the specified tags in the request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param legal_hold: The LegalHold property that will be clear from a blob container. Required. + :type legal_hold: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: LegalHold. The LegalHold is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LegalHold + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def clear_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: Union[_models.LegalHold, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.LegalHold: + """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent + operation. ClearLegalHold clears out only the specified tags in the request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param legal_hold: The LegalHold property that will be clear from a blob container. Is one of + the following types: LegalHold, JSON, IO[bytes] Required. + :type legal_hold: ~azure.mgmt.storage.models.LegalHold or JSON or IO[bytes] + :return: LegalHold. The LegalHold is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LegalHold + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.LegalHold] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(legal_hold, (IOBase, bytes)): + _content = legal_hold + else: + _content = json.dumps(legal_hold, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_blob_containers_clear_legal_hold_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.LegalHold, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def lease( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[_models.LeaseContainerRequest] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LeaseContainerResponse: + """The Lease Container operation establishes and manages a lock on a container for delete + operations. The lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: ~azure.mgmt.storage.models.LeaseContainerRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LeaseContainerResponse. The LeaseContainerResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LeaseContainerResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def lease( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[JSON] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LeaseContainerResponse: + """The Lease Container operation establishes and manages a lock on a container for delete + operations. The lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LeaseContainerResponse. The LeaseContainerResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LeaseContainerResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def lease( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[IO[bytes]] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LeaseContainerResponse: + """The Lease Container operation establishes and manages a lock on a container for delete + operations. The lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: LeaseContainerResponse. The LeaseContainerResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LeaseContainerResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def lease( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[Union[_models.LeaseContainerRequest, JSON, IO[bytes]]] = None, + **kwargs: Any + ) -> _models.LeaseContainerResponse: + """The Lease Container operation establishes and manages a lock on a container for delete + operations. The lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The content of the action request. Is one of the following types: + LeaseContainerRequest, JSON, IO[bytes] Default value is None. + :type parameters: ~azure.mgmt.storage.models.LeaseContainerRequest or JSON or IO[bytes] + :return: LeaseContainerResponse. The LeaseContainerResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LeaseContainerResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if parameters else None + cls: ClsType[_models.LeaseContainerResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" if parameters else None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_blob_containers_lease_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.LeaseContainerResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _object_level_worm_initial( + self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_blob_containers_object_level_worm_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_object_level_worm( + self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """This operation migrates a blob container from container level WORM to object level immutability + enabled container. Prerequisites require a container level immutability policy either in locked + or unlocked state, Account level versioning must be enabled and there should be no Legal hold + on the container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._object_level_worm_initial( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + account_name: str, + *, + filter: Optional[str] = None, + include: Optional[Union[str, _models.ListContainersInclude]] = None, + **kwargs: Any + ) -> AsyncItemPaged["_models.ListContainerItem"]: + """Lists all containers and does not support a prefix like data plane. Also SRP today does not + return continuation token. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword filter: Optional. When specified, only container names starting with the filter will + be listed. Default value is None. + :paramtype filter: str + :keyword include: Optional, used to include the properties for soft deleted blob containers. + "deleted" Default value is None. + :paramtype include: str or ~azure.mgmt.storage.models.ListContainersInclude + :return: An iterator like instance of ListContainerItem + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.ListContainerItem] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.ListContainerItem]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_blob_containers_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + maxpagesize=maxpagesize, + filter=filter, + include=include, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.ListContainerItem], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def get_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + *, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Gets the existing immutability policy along with the corresponding ETag in response headers and + body. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) + + _request = build_blob_containers_get_immutability_policy_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ImmutabilityPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def create_or_update_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[_models.ImmutabilityPolicy] = None, + *, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but + not required for this operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob + container. Default value is None. + :type parameters: ~azure.mgmt.storage.models.ImmutabilityPolicy + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[JSON] = None, + *, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but + not required for this operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob + container. Default value is None. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[IO[bytes]] = None, + *, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but + not required for this operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob + container. Default value is None. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[Union[_models.ImmutabilityPolicy, JSON, IO[bytes]]] = None, + *, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but + not required for this operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob + container. Is one of the following types: ImmutabilityPolicy, JSON, IO[bytes] Default value is + None. + :type parameters: ~azure.mgmt.storage.models.ImmutabilityPolicy or JSON or IO[bytes] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if parameters else None + cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" if parameters else None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_blob_containers_create_or_update_immutability_policy_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ImmutabilityPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + *, + etag: str, + match_condition: MatchConditions, + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Aborts an unlocked immutability policy. The response of delete has + immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this + operation. Deleting a locked immutability policy is not allowed, the only way is to delete the + container after deleting all expired blobs inside the policy locked container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Required. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Required. + :paramtype match_condition: ~azure.core.MatchConditions + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) + + _request = build_blob_containers_delete_immutability_policy_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ImmutabilityPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def lock_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + *, + etag: str, + match_condition: MatchConditions, + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is + ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Required. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Required. + :paramtype match_condition: ~azure.core.MatchConditions + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) + + _request = build_blob_containers_lock_immutability_policy_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ImmutabilityPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def extend_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[_models.ImmutabilityPolicy] = None, + *, + etag: str, + match_condition: MatchConditions, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only + action allowed on a Locked policy will be this action. ETag in If-Match is required for this + operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: ~azure.mgmt.storage.models.ImmutabilityPolicy + :keyword etag: check if resource is changed. Set None to skip checking etag. Required. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Required. + :paramtype match_condition: ~azure.core.MatchConditions + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def extend_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[JSON] = None, + *, + etag: str, + match_condition: MatchConditions, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only + action allowed on a Locked policy will be this action. ETag in If-Match is required for this + operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: JSON + :keyword etag: check if resource is changed. Set None to skip checking etag. Required. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Required. + :paramtype match_condition: ~azure.core.MatchConditions + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def extend_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[IO[bytes]] = None, + *, + etag: str, + match_condition: MatchConditions, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only + action allowed on a Locked policy will be this action. ETag in If-Match is required for this + operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: IO[bytes] + :keyword etag: check if resource is changed. Set None to skip checking etag. Required. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Required. + :paramtype match_condition: ~azure.core.MatchConditions + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def extend_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[Union[_models.ImmutabilityPolicy, JSON, IO[bytes]]] = None, + *, + etag: str, + match_condition: MatchConditions, + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only + action allowed on a Locked policy will be this action. ETag in If-Match is required for this + operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The content of the action request. Is one of the following types: + ImmutabilityPolicy, JSON, IO[bytes] Default value is None. + :type parameters: ~azure.mgmt.storage.models.ImmutabilityPolicy or JSON or IO[bytes] + :keyword etag: check if resource is changed. Set None to skip checking etag. Required. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Required. + :paramtype match_condition: ~azure.core.MatchConditions + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if parameters else None + cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" if parameters else None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_blob_containers_extend_immutability_policy_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ImmutabilityPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + +class BlobServicesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`blob_services` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get_service_properties( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> _models.BlobServiceProperties: + """Gets the properties of a storage account’s Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: BlobServiceProperties. The BlobServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BlobServiceProperties] = kwargs.pop("cls", None) + + _request = build_blob_services_get_service_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BlobServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: _models.BlobServiceProperties, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobServiceProperties: + """Sets the properties of a storage account’s Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Blob service, including properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. Required. + :type parameters: ~azure.mgmt.storage.models.BlobServiceProperties + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobServiceProperties. The BlobServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobServiceProperties: + """Sets the properties of a storage account’s Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Blob service, including properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobServiceProperties. The BlobServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobServiceProperties: + """Sets the properties of a storage account’s Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Blob service, including properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobServiceProperties. The BlobServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.BlobServiceProperties, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.BlobServiceProperties: + """Sets the properties of a storage account’s Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Blob service, including properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. Is one of the following + types: BlobServiceProperties, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.BlobServiceProperties or JSON or IO[bytes] + :return: BlobServiceProperties. The BlobServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BlobServiceProperties] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_blob_services_set_service_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BlobServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.BlobServiceProperties"]: + """List blob services of storage account. It returns a collection of one object named default. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of BlobServiceProperties + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.BlobServiceProperties] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.BlobServiceProperties]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_blob_services_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.BlobServiceProperties], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class StorageAccountsOperations: # pylint: disable=too-many-public-methods + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`storage_accounts` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @overload + async def check_name_availability( + self, + account_name: _models.StorageAccountCheckNameAvailabilityParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CheckNameAvailabilityResult: + """Checks that the storage account name is valid and is not already in use. + + :param account_name: The request body. Required. + :type account_name: ~azure.mgmt.storage.models.StorageAccountCheckNameAvailabilityParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: CheckNameAvailabilityResult. The CheckNameAvailabilityResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.CheckNameAvailabilityResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def check_name_availability( + self, account_name: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.CheckNameAvailabilityResult: + """Checks that the storage account name is valid and is not already in use. + + :param account_name: The request body. Required. + :type account_name: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: CheckNameAvailabilityResult. The CheckNameAvailabilityResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.CheckNameAvailabilityResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def check_name_availability( + self, account_name: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.CheckNameAvailabilityResult: + """Checks that the storage account name is valid and is not already in use. + + :param account_name: The request body. Required. + :type account_name: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: CheckNameAvailabilityResult. The CheckNameAvailabilityResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.CheckNameAvailabilityResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def check_name_availability( + self, account_name: Union[_models.StorageAccountCheckNameAvailabilityParameters, JSON, IO[bytes]], **kwargs: Any + ) -> _models.CheckNameAvailabilityResult: + """Checks that the storage account name is valid and is not already in use. + + :param account_name: The request body. Is one of the following types: + StorageAccountCheckNameAvailabilityParameters, JSON, IO[bytes] Required. + :type account_name: ~azure.mgmt.storage.models.StorageAccountCheckNameAvailabilityParameters or + JSON or IO[bytes] + :return: CheckNameAvailabilityResult. The CheckNameAvailabilityResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.CheckNameAvailabilityResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.CheckNameAvailabilityResult] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(account_name, (IOBase, bytes)): + _content = account_name + else: + _content = json.dumps(account_name, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_accounts_check_name_availability_request( + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.CheckNameAvailabilityResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_properties( + self, + resource_group_name: str, + account_name: str, + *, + expand: Optional[Union[str, _models.StorageAccountExpand]] = None, + **kwargs: Any + ) -> _models.StorageAccount: + """Returns the properties for the specified storage account including but not limited to name, SKU + name, location, and account status. The ListKeys operation should be used to retrieve storage + keys. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword expand: May be used to expand the properties within account's properties. By default, + data is not included when fetching properties. Currently we only support geoReplicationStats + and blobRestoreStatus. Known values are: "geoReplicationStats" and "blobRestoreStatus". Default + value is None. + :paramtype expand: str or ~azure.mgmt.storage.models.StorageAccountExpand + :return: StorageAccount. The StorageAccount is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccount + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageAccount] = kwargs.pop("cls", None) + + _request = build_storage_accounts_get_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageAccount, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_initial( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.StorageAccountCreateParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_accounts_create_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create( + self, + resource_group_name: str, + account_name: str, + parameters: _models.StorageAccountCreateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAccount]: + """Asynchronously creates a new storage account with the specified parameters. If an account is + already created and a subsequent create request is issued with different properties, the + account properties will be updated. If an account is already created and a subsequent create or + update request is issued with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for the created account. Required. + :type parameters: ~azure.mgmt.storage.models.StorageAccountCreateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageAccount. The StorageAccount is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageAccount] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAccount]: + """Asynchronously creates a new storage account with the specified parameters. If an account is + already created and a subsequent create request is issued with different properties, the + account properties will be updated. If an account is already created and a subsequent create or + update request is issued with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for the created account. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageAccount. The StorageAccount is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageAccount] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAccount]: + """Asynchronously creates a new storage account with the specified parameters. If an account is + already created and a subsequent create request is issued with different properties, the + account properties will be updated. If an account is already created and a subsequent create or + update request is issued with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for the created account. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageAccount. The StorageAccount is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageAccount] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.StorageAccountCreateParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAccount]: + """Asynchronously creates a new storage account with the specified parameters. If an account is + already created and a subsequent create request is issued with different properties, the + account properties will be updated. If an account is already created and a subsequent create or + update request is issued with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for the created account. Is one of the following + types: StorageAccountCreateParameters, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.StorageAccountCreateParameters or JSON or + IO[bytes] + :return: An instance of AsyncLROPoller that returns StorageAccount. The StorageAccount is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageAccount] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageAccount] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_initial( + resource_group_name=resource_group_name, + account_name=account_name, + parameters=parameters, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageAccount, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.StorageAccount].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.StorageAccount]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @overload + async def update( + self, + resource_group_name: str, + account_name: str, + parameters: _models.StorageAccountUpdateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageAccount: + """The update operation can be used to update the SKU, encryption, access tier, or tags for a + storage account. It can also be used to map the account to a custom domain. Only one custom + domain is supported per storage account; the replacement/change of custom domain is not + supported. In order to replace an old custom domain, the old value must be cleared/unregistered + before a new value can be set. The update of multiple properties is supported. This call does + not change the storage keys for the account. If you want to change the storage account keys, + use the regenerate keys operation. The location and name of the storage account cannot be + changed after creation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for the updated account. Required. + :type parameters: ~azure.mgmt.storage.models.StorageAccountUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageAccount. The StorageAccount is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccount + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageAccount: + """The update operation can be used to update the SKU, encryption, access tier, or tags for a + storage account. It can also be used to map the account to a custom domain. Only one custom + domain is supported per storage account; the replacement/change of custom domain is not + supported. In order to replace an old custom domain, the old value must be cleared/unregistered + before a new value can be set. The update of multiple properties is supported. This call does + not change the storage keys for the account. If you want to change the storage account keys, + use the regenerate keys operation. The location and name of the storage account cannot be + changed after creation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for the updated account. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageAccount. The StorageAccount is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccount + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageAccount: + """The update operation can be used to update the SKU, encryption, access tier, or tags for a + storage account. It can also be used to map the account to a custom domain. Only one custom + domain is supported per storage account; the replacement/change of custom domain is not + supported. In order to replace an old custom domain, the old value must be cleared/unregistered + before a new value can be set. The update of multiple properties is supported. This call does + not change the storage keys for the account. If you want to change the storage account keys, + use the regenerate keys operation. The location and name of the storage account cannot be + changed after creation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for the updated account. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageAccount. The StorageAccount is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccount + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.StorageAccountUpdateParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.StorageAccount: + """The update operation can be used to update the SKU, encryption, access tier, or tags for a + storage account. It can also be used to map the account to a custom domain. Only one custom + domain is supported per storage account; the replacement/change of custom domain is not + supported. In order to replace an old custom domain, the old value must be cleared/unregistered + before a new value can be set. The update of multiple properties is supported. This call does + not change the storage keys for the account. If you want to change the storage account keys, + use the regenerate keys operation. The location and name of the storage account cannot be + changed after creation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for the updated account. Is one of the following + types: StorageAccountUpdateParameters, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.StorageAccountUpdateParameters or JSON or + IO[bytes] + :return: StorageAccount. The StorageAccount is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccount + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageAccount] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_accounts_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageAccount, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete(self, resource_group_name: str, account_name: str, **kwargs: Any) -> None: + """Deletes a storage account in Microsoft Azure. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_storage_accounts_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def list_by_resource_group( + self, resource_group_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.StorageAccount"]: + """Lists all the storage accounts available under the given resource group. Note that storage keys + are not returned; use the ListKeys operation for this. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of StorageAccount + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.StorageAccount] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageAccount]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_accounts_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageAccount], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list(self, **kwargs: Any) -> AsyncItemPaged["_models.StorageAccount"]: + """Lists all the storage accounts available under the subscription. Note that storage keys are not + returned; use the ListKeys operation for this. + + :return: An iterator like instance of StorageAccount + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.StorageAccount] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageAccount]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_accounts_list_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageAccount], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def list_keys( + self, resource_group_name: str, account_name: str, *, expand: Literal["kerb"] = "kerb", **kwargs: Any + ) -> _models.StorageAccountListKeysResult: + """Lists the access keys or Kerberos keys (if active directory enabled) for the specified storage + account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword expand: Specifies type of the key to be listed. Possible value is kerb. Known values + are "kerb" and None. Default value is "kerb". + :paramtype expand: str + :return: StorageAccountListKeysResult. The StorageAccountListKeysResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageAccountListKeysResult] = kwargs.pop("cls", None) + + _request = build_storage_accounts_list_keys_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageAccountListKeysResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def regenerate_key( + self, + resource_group_name: str, + account_name: str, + regenerate_key: _models.StorageAccountRegenerateKeyParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageAccountListKeysResult: + """Regenerates one of the access keys or Kerberos keys for the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2, + kerb1, kerb2. Required. + :type regenerate_key: ~azure.mgmt.storage.models.StorageAccountRegenerateKeyParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageAccountListKeysResult. The StorageAccountListKeysResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def regenerate_key( + self, + resource_group_name: str, + account_name: str, + regenerate_key: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageAccountListKeysResult: + """Regenerates one of the access keys or Kerberos keys for the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2, + kerb1, kerb2. Required. + :type regenerate_key: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageAccountListKeysResult. The StorageAccountListKeysResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def regenerate_key( + self, + resource_group_name: str, + account_name: str, + regenerate_key: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageAccountListKeysResult: + """Regenerates one of the access keys or Kerberos keys for the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2, + kerb1, kerb2. Required. + :type regenerate_key: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageAccountListKeysResult. The StorageAccountListKeysResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def regenerate_key( + self, + resource_group_name: str, + account_name: str, + regenerate_key: Union[_models.StorageAccountRegenerateKeyParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.StorageAccountListKeysResult: + """Regenerates one of the access keys or Kerberos keys for the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2, + kerb1, kerb2. Is one of the following types: StorageAccountRegenerateKeyParameters, JSON, + IO[bytes] Required. + :type regenerate_key: ~azure.mgmt.storage.models.StorageAccountRegenerateKeyParameters or JSON + or IO[bytes] + :return: StorageAccountListKeysResult. The StorageAccountListKeysResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageAccountListKeysResult] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(regenerate_key, (IOBase, bytes)): + _content = regenerate_key + else: + _content = json.dumps(regenerate_key, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_accounts_regenerate_key_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageAccountListKeysResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def list_account_sas( + self, + resource_group_name: str, + account_name: str, + parameters: _models.AccountSasParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ListAccountSasResponse: + """List SAS credentials of a storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide to list SAS credentials for the storage account. + Required. + :type parameters: ~azure.mgmt.storage.models.AccountSasParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ListAccountSasResponse. The ListAccountSasResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListAccountSasResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def list_account_sas( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ListAccountSasResponse: + """List SAS credentials of a storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide to list SAS credentials for the storage account. + Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ListAccountSasResponse. The ListAccountSasResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListAccountSasResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def list_account_sas( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ListAccountSasResponse: + """List SAS credentials of a storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide to list SAS credentials for the storage account. + Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: ListAccountSasResponse. The ListAccountSasResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListAccountSasResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def list_account_sas( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.AccountSasParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.ListAccountSasResponse: + """List SAS credentials of a storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide to list SAS credentials for the storage account. + Is one of the following types: AccountSasParameters, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.AccountSasParameters or JSON or IO[bytes] + :return: ListAccountSasResponse. The ListAccountSasResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListAccountSasResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ListAccountSasResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_accounts_list_account_sas_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ListAccountSasResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def list_service_sas( + self, + resource_group_name: str, + account_name: str, + parameters: _models.ServiceSasParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ListServiceSasResponse: + """List service SAS credentials of a specific resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide to list service SAS credentials. Required. + :type parameters: ~azure.mgmt.storage.models.ServiceSasParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ListServiceSasResponse. The ListServiceSasResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListServiceSasResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def list_service_sas( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ListServiceSasResponse: + """List service SAS credentials of a specific resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide to list service SAS credentials. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ListServiceSasResponse. The ListServiceSasResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListServiceSasResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def list_service_sas( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ListServiceSasResponse: + """List service SAS credentials of a specific resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide to list service SAS credentials. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: ListServiceSasResponse. The ListServiceSasResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListServiceSasResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def list_service_sas( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.ServiceSasParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.ListServiceSasResponse: + """List service SAS credentials of a specific resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide to list service SAS credentials. Is one of the + following types: ServiceSasParameters, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.ServiceSasParameters or JSON or IO[bytes] + :return: ListServiceSasResponse. The ListServiceSasResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListServiceSasResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ListServiceSasResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_accounts_list_service_sas_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ListServiceSasResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _failover_initial( + self, + resource_group_name: str, + account_name: str, + *, + failover_type: Literal["Planned"] = "Planned", + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_accounts_failover_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + failover_type=failover_type, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_failover( + self, + resource_group_name: str, + account_name: str, + *, + failover_type: Literal["Planned"] = "Planned", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """A failover request can be triggered for a storage account in the event a primary endpoint + becomes unavailable for any reason. The failover occurs from the storage account's primary + cluster to the secondary cluster for RA-GRS accounts. The secondary cluster will become primary + after failover and the account is converted to LRS. In the case of a Planned Failover, the + primary and secondary clusters are swapped after failover and the account remains + geo-replicated. Failover should continue to be used in the event of availability issues as + Planned failover is only available while the primary and secondary endpoints are available. The + primary use case of a Planned Failover is disaster recovery testing drills. This type of + failover is invoked by setting FailoverType parameter to 'Planned'. Learn more about the + failover options here- + `https://learn.microsoft.com/azure/storage/common/storage-disaster-recovery-guidance + `_. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword failover_type: The parameter is set to 'Planned' to indicate whether a Planned + failover is requested. Known values are "Planned" and None. Default value is "Planned". + :paramtype failover_type: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._failover_initial( + resource_group_name=resource_group_name, + account_name=account_name, + failover_type=failover_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + async def _hierarchical_namespace_migration_initial( # pylint: disable=name-too-long + self, resource_group_name: str, account_name: str, *, request_type: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_accounts_hierarchical_namespace_migration_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + request_type=request_type, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_hierarchical_namespace_migration( + self, resource_group_name: str, account_name: str, *, request_type: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Live Migration of storage account to enable Hns. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword request_type: Required. Hierarchical namespace migration type can either be a + hierarchical namespace validation request 'HnsOnValidationRequest' or a hydration request + 'HnsOnHydrationRequest'. The validation request will validate the migration whereas the + hydration request will migrate the account. Required. + :paramtype request_type: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._hierarchical_namespace_migration_initial( + resource_group_name=resource_group_name, + account_name=account_name, + request_type=request_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + async def _abort_hierarchical_namespace_migration_initial( # pylint: disable=name-too-long + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_accounts_abort_hierarchical_namespace_migration_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_abort_hierarchical_namespace_migration( # pylint: disable=name-too-long + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Abort live Migration of storage account to enable Hns. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._abort_hierarchical_namespace_migration_initial( + resource_group_name=resource_group_name, + account_name=account_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + async def _customer_initiated_migration_initial( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.StorageAccountMigration, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_accounts_customer_initiated_migration_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_customer_initiated_migration( + self, + resource_group_name: str, + account_name: str, + parameters: _models.StorageAccountMigration, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Account Migration request can be triggered for a storage account to change its redundancy + level. The migration updates the non-zonal redundant storage account to a zonal redundant + account or vice-versa in order to have better reliability and availability. Zone-redundant + storage (ZRS) replicates your storage account synchronously across three Azure availability + zones in the primary region. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The request parameters required to perform storage account migration. + Required. + :type parameters: ~azure.mgmt.storage.models.StorageAccountMigration + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_customer_initiated_migration( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Account Migration request can be triggered for a storage account to change its redundancy + level. The migration updates the non-zonal redundant storage account to a zonal redundant + account or vice-versa in order to have better reliability and availability. Zone-redundant + storage (ZRS) replicates your storage account synchronously across three Azure availability + zones in the primary region. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The request parameters required to perform storage account migration. + Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_customer_initiated_migration( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Account Migration request can be triggered for a storage account to change its redundancy + level. The migration updates the non-zonal redundant storage account to a zonal redundant + account or vice-versa in order to have better reliability and availability. Zone-redundant + storage (ZRS) replicates your storage account synchronously across three Azure availability + zones in the primary region. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The request parameters required to perform storage account migration. + Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_customer_initiated_migration( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.StorageAccountMigration, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Account Migration request can be triggered for a storage account to change its redundancy + level. The migration updates the non-zonal redundant storage account to a zonal redundant + account or vice-versa in order to have better reliability and availability. Zone-redundant + storage (ZRS) replicates your storage account synchronously across three Azure availability + zones in the primary region. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The request parameters required to perform storage account migration. Is one + of the following types: StorageAccountMigration, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.StorageAccountMigration or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._customer_initiated_migration_initial( + resource_group_name=resource_group_name, + account_name=account_name, + parameters=parameters, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + async def _restore_blob_ranges_initial( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.BlobRestoreParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_accounts_restore_blob_ranges_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_restore_blob_ranges( + self, + resource_group_name: str, + account_name: str, + parameters: _models.BlobRestoreParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BlobRestoreStatus]: + """Restore blobs in the specified blob ranges. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for restore blob ranges. Required. + :type parameters: ~azure.mgmt.storage.models.BlobRestoreParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns BlobRestoreStatus. The BlobRestoreStatus is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.BlobRestoreStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_restore_blob_ranges( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BlobRestoreStatus]: + """Restore blobs in the specified blob ranges. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for restore blob ranges. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns BlobRestoreStatus. The BlobRestoreStatus is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.BlobRestoreStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_restore_blob_ranges( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BlobRestoreStatus]: + """Restore blobs in the specified blob ranges. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for restore blob ranges. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns BlobRestoreStatus. The BlobRestoreStatus is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.BlobRestoreStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_restore_blob_ranges( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.BlobRestoreParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.BlobRestoreStatus]: + """Restore blobs in the specified blob ranges. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for restore blob ranges. Is one of the following + types: BlobRestoreParameters, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.BlobRestoreParameters or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns BlobRestoreStatus. The BlobRestoreStatus is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.BlobRestoreStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BlobRestoreStatus] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._restore_blob_ranges_initial( + resource_group_name=resource_group_name, + account_name=account_name, + parameters=parameters, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.BlobRestoreStatus, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.BlobRestoreStatus].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.BlobRestoreStatus]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @distributed_trace_async + async def revoke_user_delegation_keys(self, resource_group_name: str, account_name: str, **kwargs: Any) -> None: + """Revoke user delegation keys. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_storage_accounts_revoke_user_delegation_keys_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def get_customer_initiated_migration( + self, + resource_group_name: str, + account_name: str, + migration_name: Union[str, _models.MigrationName], + **kwargs: Any + ) -> _models.StorageAccountMigration: + """Gets the status of the ongoing migration for the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param migration_name: The name of the Storage Account Migration. It should always be + 'default'. "default" Required. + :type migration_name: str or ~azure.mgmt.storage.models.MigrationName + :return: StorageAccountMigration. The StorageAccountMigration is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccountMigration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageAccountMigration] = kwargs.pop("cls", None) + + _request = build_storage_accounts_get_customer_initiated_migration_request( + resource_group_name=resource_group_name, + account_name=account_name, + migration_name=migration_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageAccountMigration, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class FileSharesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`file_shares` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + account_name: str, + share_name: str, + *, + expand: Optional[str] = None, + x_ms_snapshot: Optional[str] = None, + **kwargs: Any + ) -> _models.FileShare: + """Gets properties of a specified share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :keyword expand: Optional, used to expand the properties within share's properties. Valid + values are: stats. Should be passed as a string with delimiter ','. Default value is None. + :paramtype expand: str + :keyword x_ms_snapshot: Optional, used to retrieve properties of a snapshot. Default value is + None. + :paramtype x_ms_snapshot: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) + + _request = build_file_shares_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + share_name=share_name, + subscription_id=self._config.subscription_id, + expand=expand, + x_ms_snapshot=x_ms_snapshot, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileShare, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def create( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: _models.FileShare, + *, + expand: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileShare: + """Creates a new share under the specified account as described by request body. The share + resource includes metadata and properties for that share. It does not include a list of the + files contained by the share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param file_share: Properties of the file share to create. Required. + :type file_share: ~azure.mgmt.storage.models.FileShare + :keyword expand: Optional, used to expand the properties within share's properties. Valid + values are: snapshots. Should be passed as a string with delimiter ','. Default value is None. + :paramtype expand: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: JSON, + *, + expand: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileShare: + """Creates a new share under the specified account as described by request body. The share + resource includes metadata and properties for that share. It does not include a list of the + files contained by the share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param file_share: Properties of the file share to create. Required. + :type file_share: JSON + :keyword expand: Optional, used to expand the properties within share's properties. Valid + values are: snapshots. Should be passed as a string with delimiter ','. Default value is None. + :paramtype expand: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: IO[bytes], + *, + expand: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileShare: + """Creates a new share under the specified account as described by request body. The share + resource includes metadata and properties for that share. It does not include a list of the + files contained by the share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param file_share: Properties of the file share to create. Required. + :type file_share: IO[bytes] + :keyword expand: Optional, used to expand the properties within share's properties. Valid + values are: snapshots. Should be passed as a string with delimiter ','. Default value is None. + :paramtype expand: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: Union[_models.FileShare, JSON, IO[bytes]], + *, + expand: Optional[str] = None, + **kwargs: Any + ) -> _models.FileShare: + """Creates a new share under the specified account as described by request body. The share + resource includes metadata and properties for that share. It does not include a list of the + files contained by the share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param file_share: Properties of the file share to create. Is one of the following types: + FileShare, JSON, IO[bytes] Required. + :type file_share: ~azure.mgmt.storage.models.FileShare or JSON or IO[bytes] + :keyword expand: Optional, used to expand the properties within share's properties. Valid + values are: snapshots. Should be passed as a string with delimiter ','. Default value is None. + :paramtype expand: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(file_share, (IOBase, bytes)): + _content = file_share + else: + _content = json.dumps(file_share, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_shares_create_request( + resource_group_name=resource_group_name, + account_name=account_name, + share_name=share_name, + subscription_id=self._config.subscription_id, + expand=expand, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileShare, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def update( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: _models.FileShare, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileShare: + """Updates share properties as specified in request body. Properties not mentioned in the request + will not be changed. Update fails if the specified share does not already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param file_share: Properties to update for the file share. Required. + :type file_share: ~azure.mgmt.storage.models.FileShare + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileShare: + """Updates share properties as specified in request body. Properties not mentioned in the request + will not be changed. Update fails if the specified share does not already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param file_share: Properties to update for the file share. Required. + :type file_share: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileShare: + """Updates share properties as specified in request body. Properties not mentioned in the request + will not be changed. Update fails if the specified share does not already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param file_share: Properties to update for the file share. Required. + :type file_share: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: Union[_models.FileShare, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.FileShare: + """Updates share properties as specified in request body. Properties not mentioned in the request + will not be changed. Update fails if the specified share does not already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param file_share: Properties to update for the file share. Is one of the following types: + FileShare, JSON, IO[bytes] Required. + :type file_share: ~azure.mgmt.storage.models.FileShare or JSON or IO[bytes] + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(file_share, (IOBase, bytes)): + _content = file_share + else: + _content = json.dumps(file_share, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_shares_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + share_name=share_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileShare, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete( + self, + resource_group_name: str, + account_name: str, + share_name: str, + *, + x_ms_snapshot: Optional[str] = None, + include: Optional[str] = None, + **kwargs: Any + ) -> None: + """Deletes specified share under its account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :keyword x_ms_snapshot: Optional, used to delete a snapshot. Default value is None. + :paramtype x_ms_snapshot: str + :keyword include: Optional. Valid values are: snapshots, leased-snapshots, none. The default + value is snapshots. For 'snapshots', the file share is deleted including all of its file share + snapshots. If the file share contains leased-snapshots, the deletion fails. For + 'leased-snapshots', the file share is deleted included all of its file share snapshots + (leased/unleased). For 'none', the file share is deleted if it has no share snapshots. If the + file share contains any snapshots (leased or unleased), the deletion fails. Default value is + None. + :paramtype include: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_file_shares_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + share_name=share_name, + subscription_id=self._config.subscription_id, + x_ms_snapshot=x_ms_snapshot, + include=include, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @overload + async def restore( + self, + resource_group_name: str, + account_name: str, + share_name: str, + deleted_share: _models.DeletedShare, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Restore a file share within a valid retention days if share soft delete is enabled. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param deleted_share: Required. + :type deleted_share: ~azure.mgmt.storage.models.DeletedShare + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def restore( + self, + resource_group_name: str, + account_name: str, + share_name: str, + deleted_share: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Restore a file share within a valid retention days if share soft delete is enabled. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param deleted_share: Required. + :type deleted_share: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def restore( + self, + resource_group_name: str, + account_name: str, + share_name: str, + deleted_share: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Restore a file share within a valid retention days if share soft delete is enabled. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param deleted_share: Required. + :type deleted_share: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def restore( + self, + resource_group_name: str, + account_name: str, + share_name: str, + deleted_share: Union[_models.DeletedShare, JSON, IO[bytes]], + **kwargs: Any + ) -> None: + """Restore a file share within a valid retention days if share soft delete is enabled. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param deleted_share: Is one of the following types: DeletedShare, JSON, IO[bytes] Required. + :type deleted_share: ~azure.mgmt.storage.models.DeletedShare or JSON or IO[bytes] + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(deleted_share, (IOBase, bytes)): + _content = deleted_share + else: + _content = json.dumps(deleted_share, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_shares_restore_request( + resource_group_name=resource_group_name, + account_name=account_name, + share_name=share_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @overload + async def lease( + self, + resource_group_name: str, + account_name: str, + share_name: str, + parameters: Optional[_models.LeaseShareRequest] = None, + *, + x_ms_snapshot: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LeaseShareResponse: + """The Lease Share operation establishes and manages a lock on a share for delete operations. The + lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: ~azure.mgmt.storage.models.LeaseShareRequest + :keyword x_ms_snapshot: Optional. Specify the snapshot time to lease a snapshot. Default value + is None. + :paramtype x_ms_snapshot: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LeaseShareResponse. The LeaseShareResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LeaseShareResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def lease( + self, + resource_group_name: str, + account_name: str, + share_name: str, + parameters: Optional[JSON] = None, + *, + x_ms_snapshot: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LeaseShareResponse: + """The Lease Share operation establishes and manages a lock on a share for delete operations. The + lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: JSON + :keyword x_ms_snapshot: Optional. Specify the snapshot time to lease a snapshot. Default value + is None. + :paramtype x_ms_snapshot: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LeaseShareResponse. The LeaseShareResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LeaseShareResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def lease( + self, + resource_group_name: str, + account_name: str, + share_name: str, + parameters: Optional[IO[bytes]] = None, + *, + x_ms_snapshot: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LeaseShareResponse: + """The Lease Share operation establishes and manages a lock on a share for delete operations. The + lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: IO[bytes] + :keyword x_ms_snapshot: Optional. Specify the snapshot time to lease a snapshot. Default value + is None. + :paramtype x_ms_snapshot: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: LeaseShareResponse. The LeaseShareResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LeaseShareResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def lease( + self, + resource_group_name: str, + account_name: str, + share_name: str, + parameters: Optional[Union[_models.LeaseShareRequest, JSON, IO[bytes]]] = None, + *, + x_ms_snapshot: Optional[str] = None, + **kwargs: Any + ) -> _models.LeaseShareResponse: + """The Lease Share operation establishes and manages a lock on a share for delete operations. The + lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param parameters: The content of the action request. Is one of the following types: + LeaseShareRequest, JSON, IO[bytes] Default value is None. + :type parameters: ~azure.mgmt.storage.models.LeaseShareRequest or JSON or IO[bytes] + :keyword x_ms_snapshot: Optional. Specify the snapshot time to lease a snapshot. Default value + is None. + :paramtype x_ms_snapshot: str + :return: LeaseShareResponse. The LeaseShareResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LeaseShareResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if parameters else None + cls: ClsType[_models.LeaseShareResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" if parameters else None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_file_shares_lease_request( + resource_group_name=resource_group_name, + account_name=account_name, + share_name=share_name, + subscription_id=self._config.subscription_id, + x_ms_snapshot=x_ms_snapshot, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.LeaseShareResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + account_name: str, + *, + filter: Optional[str] = None, + expand: Optional[str] = None, + **kwargs: Any + ) -> AsyncItemPaged["_models.FileShareItem"]: + """Lists all shares. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword filter: Optional. When specified, only share names starting with the filter will be + listed. Default value is None. + :paramtype filter: str + :keyword expand: Optional, used to expand the properties within share's properties. Valid + values are: deleted, snapshots. Should be passed as a string with delimiter ','. Default value + is None. + :paramtype expand: str + :return: An iterator like instance of FileShareItem + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.FileShareItem] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.FileShareItem]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_file_shares_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + maxpagesize=maxpagesize, + filter=filter, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.FileShareItem], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class FileServicesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`file_services` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get_service_properties( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> _models.FileServiceProperties: + """Gets the properties of file services in storage accounts, including CORS (Cross-Origin Resource + Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: FileServiceProperties. The FileServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileServiceProperties] = kwargs.pop("cls", None) + + _request = build_file_services_get_service_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: _models.FileServiceProperties, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileServiceProperties: + """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource + Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of file services in storage accounts, including CORS + (Cross-Origin Resource Sharing) rules. Required. + :type parameters: ~azure.mgmt.storage.models.FileServiceProperties + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: FileServiceProperties. The FileServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileServiceProperties: + """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource + Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of file services in storage accounts, including CORS + (Cross-Origin Resource Sharing) rules. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: FileServiceProperties. The FileServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileServiceProperties: + """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource + Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of file services in storage accounts, including CORS + (Cross-Origin Resource Sharing) rules. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: FileServiceProperties. The FileServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.FileServiceProperties, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.FileServiceProperties: + """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource + Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of file services in storage accounts, including CORS + (Cross-Origin Resource Sharing) rules. Is one of the following types: FileServiceProperties, + JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.FileServiceProperties or JSON or IO[bytes] + :return: FileServiceProperties. The FileServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FileServiceProperties] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_services_set_service_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _models.FileServiceItems: + """List all file services in storage accounts. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: FileServiceItems. The FileServiceItems is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileServiceItems + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileServiceItems] = kwargs.pop("cls", None) + + _request = build_file_services_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileServiceItems, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_service_usage( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> _models.FileServiceUsage: + """Gets the usage of file service in storage account including account limits, file share limits + and constants used in recommendations and bursting formula. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: FileServiceUsage. The FileServiceUsage is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileServiceUsage + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileServiceUsage] = kwargs.pop("cls", None) + + _request = build_file_services_get_service_usage_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileServiceUsage, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_service_usages( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.FileServiceUsage"]: + """Gets the usages of file service in storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of FileServiceUsage + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.FileServiceUsage] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.FileServiceUsage]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_file_services_list_service_usages_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.FileServiceUsage], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class QueueServicesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`queue_services` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get_service_properties( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> _models.QueueServiceProperties: + """Gets the properties of a storage account’s Queue service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: QueueServiceProperties. The QueueServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.QueueServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.QueueServiceProperties] = kwargs.pop("cls", None) + + _request = build_queue_services_get_service_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.QueueServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: _models.QueueServiceProperties, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueueServiceProperties: + """Sets the properties of a storage account’s Queue service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Queue service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. + :type parameters: ~azure.mgmt.storage.models.QueueServiceProperties + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QueueServiceProperties. The QueueServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.QueueServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueueServiceProperties: + """Sets the properties of a storage account’s Queue service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Queue service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QueueServiceProperties. The QueueServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.QueueServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueueServiceProperties: + """Sets the properties of a storage account’s Queue service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Queue service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: QueueServiceProperties. The QueueServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.QueueServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.QueueServiceProperties, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.QueueServiceProperties: + """Sets the properties of a storage account’s Queue service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Queue service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Is one of + the following types: QueueServiceProperties, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.QueueServiceProperties or JSON or IO[bytes] + :return: QueueServiceProperties. The QueueServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.QueueServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.QueueServiceProperties] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_queue_services_set_service_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.QueueServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _models.ListQueueServices: + """List all queue services for the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: ListQueueServices. The ListQueueServices is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListQueueServices + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ListQueueServices] = kwargs.pop("cls", None) + + _request = build_queue_services_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ListQueueServices, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class DeletedAccountsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`deleted_accounts` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get(self, deleted_account_name: str, location: str, **kwargs: Any) -> _models.DeletedAccount: + """Get properties of specified deleted account resource. + + :param deleted_account_name: Name of the deleted storage account. Required. + :type deleted_account_name: str + :param location: The name of the Azure region. Required. + :type location: str + :return: DeletedAccount. The DeletedAccount is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.DeletedAccount + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.DeletedAccount] = kwargs.pop("cls", None) + + _request = build_deleted_accounts_get_request( + deleted_account_name=deleted_account_name, + location=location, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.DeletedAccount, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list(self, **kwargs: Any) -> AsyncItemPaged["_models.DeletedAccount"]: + """Lists deleted accounts under the subscription. + + :return: An iterator like instance of DeletedAccount + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.DeletedAccount] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.DeletedAccount]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_deleted_accounts_list_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.DeletedAccount], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class ManagementPoliciesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`management_policies` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, _models.ManagementPolicyName], + **kwargs: Any + ) -> _models.ManagementPolicy: + """Gets the managementpolicy associated with the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. "default" Required. + :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName + :return: ManagementPolicy. The ManagementPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ManagementPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ManagementPolicy] = kwargs.pop("cls", None) + + _request = build_management_policies_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + management_policy_name=management_policy_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ManagementPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, _models.ManagementPolicyName], + properties: _models.ManagementPolicy, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ManagementPolicy: + """Sets the managementpolicy to the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. "default" Required. + :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName + :param properties: The ManagementPolicy set to a storage account. Required. + :type properties: ~azure.mgmt.storage.models.ManagementPolicy + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ManagementPolicy. The ManagementPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ManagementPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, _models.ManagementPolicyName], + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ManagementPolicy: + """Sets the managementpolicy to the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. "default" Required. + :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName + :param properties: The ManagementPolicy set to a storage account. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ManagementPolicy. The ManagementPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ManagementPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, _models.ManagementPolicyName], + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ManagementPolicy: + """Sets the managementpolicy to the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. "default" Required. + :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName + :param properties: The ManagementPolicy set to a storage account. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: ManagementPolicy. The ManagementPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ManagementPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, _models.ManagementPolicyName], + properties: Union[_models.ManagementPolicy, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.ManagementPolicy: + """Sets the managementpolicy to the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. "default" Required. + :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName + :param properties: The ManagementPolicy set to a storage account. Is one of the following + types: ManagementPolicy, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.storage.models.ManagementPolicy or JSON or IO[bytes] + :return: ManagementPolicy. The ManagementPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ManagementPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ManagementPolicy] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_management_policies_create_or_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + management_policy_name=management_policy_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ManagementPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete( + self, + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, _models.ManagementPolicyName], + **kwargs: Any + ) -> None: + """Deletes the managementpolicy associated with the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. "default" Required. + :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_management_policies_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + management_policy_name=management_policy_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + +class BlobInventoryPoliciesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`blob_inventory_policies` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], + **kwargs: Any + ) -> _models.BlobInventoryPolicy: + """Gets the blob inventory policy associated with the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. "default" Required. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName + :return: BlobInventoryPolicy. The BlobInventoryPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BlobInventoryPolicy] = kwargs.pop("cls", None) + + _request = build_blob_inventory_policies_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + blob_inventory_policy_name=blob_inventory_policy_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BlobInventoryPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], + properties: _models.BlobInventoryPolicy, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobInventoryPolicy: + """Sets the blob inventory policy to the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. "default" Required. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName + :param properties: The blob inventory policy set to a storage account. Required. + :type properties: ~azure.mgmt.storage.models.BlobInventoryPolicy + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobInventoryPolicy. The BlobInventoryPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobInventoryPolicy: + """Sets the blob inventory policy to the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. "default" Required. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName + :param properties: The blob inventory policy set to a storage account. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobInventoryPolicy. The BlobInventoryPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobInventoryPolicy: + """Sets the blob inventory policy to the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. "default" Required. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName + :param properties: The blob inventory policy set to a storage account. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobInventoryPolicy. The BlobInventoryPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], + properties: Union[_models.BlobInventoryPolicy, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.BlobInventoryPolicy: + """Sets the blob inventory policy to the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. "default" Required. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName + :param properties: The blob inventory policy set to a storage account. Is one of the following + types: BlobInventoryPolicy, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.storage.models.BlobInventoryPolicy or JSON or IO[bytes] + :return: BlobInventoryPolicy. The BlobInventoryPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BlobInventoryPolicy] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_blob_inventory_policies_create_or_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + blob_inventory_policy_name=blob_inventory_policy_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BlobInventoryPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete( + self, + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], + **kwargs: Any + ) -> None: + """Deletes the blob inventory policy associated with the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. "default" Required. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_inventory_policies_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + blob_inventory_policy_name=blob_inventory_policy_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.BlobInventoryPolicy"]: + """Gets the blob inventory policy associated with the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of BlobInventoryPolicy + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.BlobInventoryPolicy] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.BlobInventoryPolicy]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_blob_inventory_policies_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.BlobInventoryPolicy], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class PrivateEndpointConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`private_endpoint_connections` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, account_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Gets the specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: PrivateEndpointConnection. The PrivateEndpointConnection is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) + + _request = build_private_endpoint_connections_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.PrivateEndpointConnection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def put( + self, + resource_group_name: str, + account_name: str, + private_endpoint_connection_name: str, + properties: _models.PrivateEndpointConnection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Update the state of specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param properties: The private endpoint connection properties. Required. + :type properties: ~azure.mgmt.storage.models.PrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: PrivateEndpointConnection. The PrivateEndpointConnection is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def put( + self, + resource_group_name: str, + account_name: str, + private_endpoint_connection_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Update the state of specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param properties: The private endpoint connection properties. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: PrivateEndpointConnection. The PrivateEndpointConnection is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def put( + self, + resource_group_name: str, + account_name: str, + private_endpoint_connection_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Update the state of specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param properties: The private endpoint connection properties. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: PrivateEndpointConnection. The PrivateEndpointConnection is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def put( + self, + resource_group_name: str, + account_name: str, + private_endpoint_connection_name: str, + properties: Union[_models.PrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Update the state of specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param properties: The private endpoint connection properties. Is one of the following types: + PrivateEndpointConnection, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.storage.models.PrivateEndpointConnection or JSON or IO[bytes] + :return: PrivateEndpointConnection. The PrivateEndpointConnection is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_private_endpoint_connections_put_request( + resource_group_name=resource_group_name, + account_name=account_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.PrivateEndpointConnection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete( + self, resource_group_name: str, account_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> None: + """Deletes the specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_private_endpoint_connections_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.PrivateEndpointConnection"]: + """List all the private endpoint connections associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of PrivateEndpointConnection + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.PrivateEndpointConnection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_private_endpoint_connections_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.PrivateEndpointConnection], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class EncryptionScopesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`encryption_scopes` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, account_name: str, encryption_scope_name: str, **kwargs: Any + ) -> _models.EncryptionScope: + """Returns the properties for the specified encryption scope. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.EncryptionScope] = kwargs.pop("cls", None) + + _request = build_encryption_scopes_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + encryption_scope_name=encryption_scope_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.EncryptionScope, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def put( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: _models.EncryptionScope, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EncryptionScope: + """Synchronously creates or updates an encryption scope under the specified storage account. If an + encryption scope is already created and a subsequent request is issued with different + properties, the encryption scope properties will be updated per the specified request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the create or update. + Required. + :type encryption_scope: ~azure.mgmt.storage.models.EncryptionScope + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def put( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EncryptionScope: + """Synchronously creates or updates an encryption scope under the specified storage account. If an + encryption scope is already created and a subsequent request is issued with different + properties, the encryption scope properties will be updated per the specified request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the create or update. + Required. + :type encryption_scope: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def put( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EncryptionScope: + """Synchronously creates or updates an encryption scope under the specified storage account. If an + encryption scope is already created and a subsequent request is issued with different + properties, the encryption scope properties will be updated per the specified request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the create or update. + Required. + :type encryption_scope: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def put( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: Union[_models.EncryptionScope, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.EncryptionScope: + """Synchronously creates or updates an encryption scope under the specified storage account. If an + encryption scope is already created and a subsequent request is issued with different + properties, the encryption scope properties will be updated per the specified request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the create or update. Is + one of the following types: EncryptionScope, JSON, IO[bytes] Required. + :type encryption_scope: ~azure.mgmt.storage.models.EncryptionScope or JSON or IO[bytes] + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EncryptionScope] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(encryption_scope, (IOBase, bytes)): + _content = encryption_scope + else: + _content = json.dumps(encryption_scope, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_encryption_scopes_put_request( + resource_group_name=resource_group_name, + account_name=account_name, + encryption_scope_name=encryption_scope_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.EncryptionScope, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def patch( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: _models.EncryptionScope, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EncryptionScope: + """Update encryption scope properties as specified in the request body. Update fails if the + specified encryption scope does not already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the update. Required. + :type encryption_scope: ~azure.mgmt.storage.models.EncryptionScope + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def patch( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EncryptionScope: + """Update encryption scope properties as specified in the request body. Update fails if the + specified encryption scope does not already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the update. Required. + :type encryption_scope: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def patch( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EncryptionScope: + """Update encryption scope properties as specified in the request body. Update fails if the + specified encryption scope does not already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the update. Required. + :type encryption_scope: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def patch( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: Union[_models.EncryptionScope, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.EncryptionScope: + """Update encryption scope properties as specified in the request body. Update fails if the + specified encryption scope does not already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the update. Is one of the + following types: EncryptionScope, JSON, IO[bytes] Required. + :type encryption_scope: ~azure.mgmt.storage.models.EncryptionScope or JSON or IO[bytes] + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EncryptionScope] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(encryption_scope, (IOBase, bytes)): + _content = encryption_scope + else: + _content = json.dumps(encryption_scope, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_encryption_scopes_patch_request( + resource_group_name=resource_group_name, + account_name=account_name, + encryption_scope_name=encryption_scope_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.EncryptionScope, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + account_name: str, + *, + filter: Optional[str] = None, + include: Optional[Union[str, _models.ListEncryptionScopesInclude]] = None, + **kwargs: Any + ) -> AsyncItemPaged["_models.EncryptionScope"]: + """Lists all the encryption scopes available under the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword filter: Optional. When specified, only encryption scope names starting with the filter + will be listed. Default value is None. + :paramtype filter: str + :keyword include: Optional, when specified, will list encryption scopes with the specific + state. Defaults to All. Known values are: "All", "Enabled", and "Disabled". Default value is + None. + :paramtype include: str or ~azure.mgmt.storage.models.ListEncryptionScopesInclude + :return: An iterator like instance of EncryptionScope + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.EncryptionScope] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.EncryptionScope]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_encryption_scopes_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + maxpagesize=maxpagesize, + filter=filter, + include=include, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.EncryptionScope], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class TableServicesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`table_services` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get_service_properties( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> _models.TableServiceProperties: + """Gets the properties of a storage account’s Table service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: TableServiceProperties. The TableServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.TableServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.TableServiceProperties] = kwargs.pop("cls", None) + + _request = build_table_services_get_service_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.TableServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: _models.TableServiceProperties, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.TableServiceProperties: + """Sets the properties of a storage account’s Table service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Table service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. + :type parameters: ~azure.mgmt.storage.models.TableServiceProperties + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: TableServiceProperties. The TableServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.TableServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.TableServiceProperties: + """Sets the properties of a storage account’s Table service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Table service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: TableServiceProperties. The TableServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.TableServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.TableServiceProperties: + """Sets the properties of a storage account’s Table service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Table service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: TableServiceProperties. The TableServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.TableServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.TableServiceProperties, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.TableServiceProperties: + """Sets the properties of a storage account’s Table service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Table service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Is one of + the following types: TableServiceProperties, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.TableServiceProperties or JSON or IO[bytes] + :return: TableServiceProperties. The TableServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.TableServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.TableServiceProperties] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_table_services_set_service_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.TableServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _models.ListTableServices: + """List all table services for the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: ListTableServices. The ListTableServices is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListTableServices + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ListTableServices] = kwargs.pop("cls", None) + + _request = build_table_services_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ListTableServices, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class NetworkSecurityPerimeterConfigurationsOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`network_security_perimeter_configurations` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + account_name: str, + network_security_perimeter_configuration_name: str, + **kwargs: Any + ) -> _models.NetworkSecurityPerimeterConfiguration: + """Gets effective NetworkSecurityPerimeterConfiguration for association. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param network_security_perimeter_configuration_name: The name for Network Security Perimeter + configuration. Required. + :type network_security_perimeter_configuration_name: str + :return: NetworkSecurityPerimeterConfiguration. The NetworkSecurityPerimeterConfiguration is + compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.NetworkSecurityPerimeterConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.NetworkSecurityPerimeterConfiguration] = kwargs.pop("cls", None) + + _request = build_network_security_perimeter_configurations_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.NetworkSecurityPerimeterConfiguration, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.NetworkSecurityPerimeterConfiguration"]: + """Gets list of effective NetworkSecurityPerimeterConfiguration for storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of NetworkSecurityPerimeterConfiguration + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.NetworkSecurityPerimeterConfiguration] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.NetworkSecurityPerimeterConfiguration]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_network_security_perimeter_configurations_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.NetworkSecurityPerimeterConfiguration], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + async def _reconcile_initial( + self, + resource_group_name: str, + account_name: str, + network_security_perimeter_configuration_name: str, + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_network_security_perimeter_configurations_reconcile_request( + resource_group_name=resource_group_name, + account_name=account_name, + network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_reconcile( + self, + resource_group_name: str, + account_name: str, + network_security_perimeter_configuration_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Refreshes any information about the association. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param network_security_perimeter_configuration_name: The name for Network Security Perimeter + configuration. Required. + :type network_security_perimeter_configuration_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._reconcile_initial( + resource_group_name=resource_group_name, + account_name=account_name, + network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + +class StorageTaskAssignmentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`storage_task_assignments` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, account_name: str, storage_task_assignment_name: str, **kwargs: Any + ) -> _models.StorageTaskAssignment: + """Get the storage task assignment properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :return: StorageTaskAssignment. The StorageTaskAssignment is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageTaskAssignment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageTaskAssignment] = kwargs.pop("cls", None) + + _request = build_storage_task_assignments_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageTaskAssignment, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_initial( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: Union[_models.StorageTaskAssignment, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_task_assignments_create_request( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: _models.StorageTaskAssignment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageTaskAssignment]: + """Asynchronously creates a new storage task assignment sub-resource with the specified + parameters. If a storage task assignment is already created and a subsequent create request is + issued with different properties, the storage task assignment properties will be updated. If a + storage task assignment is already created and a subsequent create or update request is issued + with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :param parameters: The parameters to create a Storage Task Assignment. Required. + :type parameters: ~azure.mgmt.storage.models.StorageTaskAssignment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageTaskAssignment. The + StorageTaskAssignment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageTaskAssignment]: + """Asynchronously creates a new storage task assignment sub-resource with the specified + parameters. If a storage task assignment is already created and a subsequent create request is + issued with different properties, the storage task assignment properties will be updated. If a + storage task assignment is already created and a subsequent create or update request is issued + with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :param parameters: The parameters to create a Storage Task Assignment. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageTaskAssignment. The + StorageTaskAssignment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageTaskAssignment]: + """Asynchronously creates a new storage task assignment sub-resource with the specified + parameters. If a storage task assignment is already created and a subsequent create request is + issued with different properties, the storage task assignment properties will be updated. If a + storage task assignment is already created and a subsequent create or update request is issued + with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :param parameters: The parameters to create a Storage Task Assignment. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageTaskAssignment. The + StorageTaskAssignment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: Union[_models.StorageTaskAssignment, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageTaskAssignment]: + """Asynchronously creates a new storage task assignment sub-resource with the specified + parameters. If a storage task assignment is already created and a subsequent create request is + issued with different properties, the storage task assignment properties will be updated. If a + storage task assignment is already created and a subsequent create or update request is issued + with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :param parameters: The parameters to create a Storage Task Assignment. Is one of the following + types: StorageTaskAssignment, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.StorageTaskAssignment or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns StorageTaskAssignment. The + StorageTaskAssignment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageTaskAssignment] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_initial( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + parameters=parameters, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageTaskAssignment, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.StorageTaskAssignment].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.StorageTaskAssignment]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: Union[_models.StorageTaskAssignmentUpdateParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_task_assignments_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: _models.StorageTaskAssignmentUpdateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageTaskAssignment]: + """Update storage task assignment properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :param parameters: The parameters to update a Storage Task Assignment. Required. + :type parameters: ~azure.mgmt.storage.models.StorageTaskAssignmentUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageTaskAssignment. The + StorageTaskAssignment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageTaskAssignment]: + """Update storage task assignment properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :param parameters: The parameters to update a Storage Task Assignment. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageTaskAssignment. The + StorageTaskAssignment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageTaskAssignment]: + """Update storage task assignment properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :param parameters: The parameters to update a Storage Task Assignment. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageTaskAssignment. The + StorageTaskAssignment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: Union[_models.StorageTaskAssignmentUpdateParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageTaskAssignment]: + """Update storage task assignment properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :param parameters: The parameters to update a Storage Task Assignment. Is one of the following + types: StorageTaskAssignmentUpdateParameters, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.StorageTaskAssignmentUpdateParameters or JSON or + IO[bytes] + :return: An instance of AsyncLROPoller that returns StorageTaskAssignment. The + StorageTaskAssignment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageTaskAssignment] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + parameters=parameters, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageTaskAssignment, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.StorageTaskAssignment].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.StorageTaskAssignment]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, account_name: str, storage_task_assignment_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_task_assignments_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, account_name: str, storage_task_assignment_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete the storage task assignment sub-resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, account_name: str, *, top: Optional[int] = None, **kwargs: Any + ) -> AsyncItemPaged["_models.StorageTaskAssignment"]: + """List all the storage task assignments in an account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword top: Optional, specifies the maximum number of storage task assignment Ids to be + included in the list response. Default value is None. + :paramtype top: int + :return: An iterator like instance of StorageTaskAssignment + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageTaskAssignment]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_task_assignments_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + top=top, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageTaskAssignment], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "storage_task_assignment_name", + ] + }, + api_versions_list=["2025-08-01"], + ) + async def _stop_assignment_initial( + self, resource_group_name: str, account_name: str, storage_task_assignment_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_task_assignments_stop_assignment_request( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "storage_task_assignment_name", + ] + }, + api_versions_list=["2025-08-01"], + ) + async def begin_stop_assignment( + self, resource_group_name: str, account_name: str, storage_task_assignment_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Stops any active running storage action for the storage task assignment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._stop_assignment_initial( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + +class ConnectorsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`connectors` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "connector_name", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + async def get( + self, resource_group_name: str, account_name: str, connector_name: str, **kwargs: Any + ) -> _models.Connector: + """Get the specified Storage Connector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :return: Connector. The Connector is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Connector + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Connector] = kwargs.pop("cls", None) + + _request = build_connectors_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Connector, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "connector_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + async def _create_initial( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + resource: Union[_models.Connector, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_connectors_create_request( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + resource: _models.Connector, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Connector]: + """Create a Storage Connector if it does not already exist; otherwise, error out. This API will + not allow you to replace an already existing resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param resource: Create a Storage Connector if it does not already exist; otherwise, error out. + This API will not allow you to replace an already existing resource. Required. + :type resource: ~azure.mgmt.storage.models.Connector + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Connector. The Connector is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Connector]: + """Create a Storage Connector if it does not already exist; otherwise, error out. This API will + not allow you to replace an already existing resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param resource: Create a Storage Connector if it does not already exist; otherwise, error out. + This API will not allow you to replace an already existing resource. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Connector. The Connector is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Connector]: + """Create a Storage Connector if it does not already exist; otherwise, error out. This API will + not allow you to replace an already existing resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param resource: Create a Storage Connector if it does not already exist; otherwise, error out. + This API will not allow you to replace an already existing resource. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Connector. The Connector is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "connector_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + async def begin_create( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + resource: Union[_models.Connector, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Connector]: + """Create a Storage Connector if it does not already exist; otherwise, error out. This API will + not allow you to replace an already existing resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param resource: Create a Storage Connector if it does not already exist; otherwise, error out. + This API will not allow you to replace an already existing resource. Is one of the following + types: Connector, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.storage.models.Connector or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Connector. The Connector is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Connector] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_initial( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Connector, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Connector].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Connector]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "connector_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + async def _update_initial( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + properties: Union[_models.Connector, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_connectors_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + properties: _models.Connector, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Connector]: + """Update a Storage Connector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param properties: The updated properties of the Storage Connector. Required. + :type properties: ~azure.mgmt.storage.models.Connector + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Connector. The Connector is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Connector]: + """Update a Storage Connector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param properties: The updated properties of the Storage Connector. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Connector. The Connector is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Connector]: + """Update a Storage Connector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param properties: The updated properties of the Storage Connector. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Connector. The Connector is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "connector_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + async def begin_update( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + properties: Union[_models.Connector, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Connector]: + """Update a Storage Connector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param properties: The updated properties of the Storage Connector. Is one of the following + types: Connector, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.storage.models.Connector or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Connector. The Connector is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Connector] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Connector, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Connector].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Connector]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": ["api_version", "subscription_id", "resource_group_name", "account_name", "connector_name"] + }, + api_versions_list=["2025-08-01"], + ) + async def _delete_initial( + self, resource_group_name: str, account_name: str, connector_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_connectors_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": ["api_version", "subscription_id", "resource_group_name", "account_name", "connector_name"] + }, + api_versions_list=["2025-08-01"], + ) + async def begin_delete( + self, resource_group_name: str, account_name: str, connector_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a Storage Connector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": ["api_version", "subscription_id", "resource_group_name", "account_name", "accept"] + }, + api_versions_list=["2025-08-01"], + ) + def list_by_storage_account( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.Connector"]: + """List all Storage Connectors in a Storage Account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of Connector + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Connector]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_connectors_list_by_storage_account_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Connector], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "connector_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + async def _test_existing_connection_initial( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + body: Union[_models.TestExistingConnectionRequest, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_connectors_test_existing_connection_request( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_test_existing_connection( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + body: _models.TestExistingConnectionRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.TestConnectionResponse]: + """This method is used to verify that the connection to the backing data store works. This API is + designed to be used for monitoring and debugging purposes. From the caller’s perspective, this + method does the following: Calls List on the backing data store, attempting to list up to one + blob/object/etc. If the above succeeds, and if a blob/object/etc is found, calls Get on that + object, attempting to download one byte. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param body: This method is used to verify that the connection to the backing data store works. + This API is designed to be used for monitoring and debugging purposes. From the caller’s + perspective, this method does the following: Calls List on the backing data store, attempting + to list up to one blob/object/etc. If the above succeeds, and if a blob/object/etc is found, + calls Get on that object, attempting to download one byte. Required. + :type body: ~azure.mgmt.storage.models.TestExistingConnectionRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns TestConnectionResponse. The + TestConnectionResponse is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.TestConnectionResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_test_existing_connection( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + body: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.TestConnectionResponse]: + """This method is used to verify that the connection to the backing data store works. This API is + designed to be used for monitoring and debugging purposes. From the caller’s perspective, this + method does the following: Calls List on the backing data store, attempting to list up to one + blob/object/etc. If the above succeeds, and if a blob/object/etc is found, calls Get on that + object, attempting to download one byte. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param body: This method is used to verify that the connection to the backing data store works. + This API is designed to be used for monitoring and debugging purposes. From the caller’s + perspective, this method does the following: Calls List on the backing data store, attempting + to list up to one blob/object/etc. If the above succeeds, and if a blob/object/etc is found, + calls Get on that object, attempting to download one byte. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns TestConnectionResponse. The + TestConnectionResponse is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.TestConnectionResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_test_existing_connection( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + body: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.TestConnectionResponse]: + """This method is used to verify that the connection to the backing data store works. This API is + designed to be used for monitoring and debugging purposes. From the caller’s perspective, this + method does the following: Calls List on the backing data store, attempting to list up to one + blob/object/etc. If the above succeeds, and if a blob/object/etc is found, calls Get on that + object, attempting to download one byte. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param body: This method is used to verify that the connection to the backing data store works. + This API is designed to be used for monitoring and debugging purposes. From the caller’s + perspective, this method does the following: Calls List on the backing data store, attempting + to list up to one blob/object/etc. If the above succeeds, and if a blob/object/etc is found, + calls Get on that object, attempting to download one byte. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns TestConnectionResponse. The + TestConnectionResponse is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.TestConnectionResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "connector_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + async def begin_test_existing_connection( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + body: Union[_models.TestExistingConnectionRequest, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.TestConnectionResponse]: + """This method is used to verify that the connection to the backing data store works. This API is + designed to be used for monitoring and debugging purposes. From the caller’s perspective, this + method does the following: Calls List on the backing data store, attempting to list up to one + blob/object/etc. If the above succeeds, and if a blob/object/etc is found, calls Get on that + object, attempting to download one byte. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param body: This method is used to verify that the connection to the backing data store works. + This API is designed to be used for monitoring and debugging purposes. From the caller’s + perspective, this method does the following: Calls List on the backing data store, attempting + to list up to one blob/object/etc. If the above succeeds, and if a blob/object/etc is found, + calls Get on that object, attempting to download one byte. Is one of the following types: + TestExistingConnectionRequest, JSON, IO[bytes] Required. + :type body: ~azure.mgmt.storage.models.TestExistingConnectionRequest or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns TestConnectionResponse. The + TestConnectionResponse is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.TestConnectionResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.TestConnectionResponse] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._test_existing_connection_initial( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + body=body, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response_headers = {} + response = pipeline_response.http_response + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = _deserialize(_models.TestConnectionResponse, response.json()) + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.TestConnectionResponse].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.TestConnectionResponse]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + +class DataSharesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`data_shares` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "data_share_name", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + async def get( + self, resource_group_name: str, account_name: str, data_share_name: str, **kwargs: Any + ) -> _models.DataShare: + """Get the specified Storage DataShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :return: DataShare. The DataShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.DataShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.DataShare] = kwargs.pop("cls", None) + + _request = build_data_shares_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + data_share_name=data_share_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.DataShare, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "data_share_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + async def _create_initial( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + resource: Union[_models.DataShare, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_data_shares_create_request( + resource_group_name=resource_group_name, + account_name=account_name, + data_share_name=data_share_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + resource: _models.DataShare, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DataShare]: + """Create a Storage DataShare if it does not already exist; otherwise, error out. This API will + not allow you to replace an already existing resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :param resource: Create a Storage DataShare if it does not already exist; otherwise, error out. + This API will not allow you to replace an already existing resource. Required. + :type resource: ~azure.mgmt.storage.models.DataShare + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns DataShare. The DataShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DataShare]: + """Create a Storage DataShare if it does not already exist; otherwise, error out. This API will + not allow you to replace an already existing resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :param resource: Create a Storage DataShare if it does not already exist; otherwise, error out. + This API will not allow you to replace an already existing resource. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns DataShare. The DataShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DataShare]: + """Create a Storage DataShare if it does not already exist; otherwise, error out. This API will + not allow you to replace an already existing resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :param resource: Create a Storage DataShare if it does not already exist; otherwise, error out. + This API will not allow you to replace an already existing resource. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns DataShare. The DataShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "data_share_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + async def begin_create( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + resource: Union[_models.DataShare, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.DataShare]: + """Create a Storage DataShare if it does not already exist; otherwise, error out. This API will + not allow you to replace an already existing resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :param resource: Create a Storage DataShare if it does not already exist; otherwise, error out. + This API will not allow you to replace an already existing resource. Is one of the following + types: DataShare, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.storage.models.DataShare or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns DataShare. The DataShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DataShare] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_initial( + resource_group_name=resource_group_name, + account_name=account_name, + data_share_name=data_share_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.DataShare, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.DataShare].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.DataShare]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "data_share_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + async def _update_initial( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + properties: Union[_models.DataShare, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_data_shares_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + data_share_name=data_share_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + properties: _models.DataShare, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DataShare]: + """Update a Storage DataShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :param properties: The updated properties of the Storage DataShare. Required. + :type properties: ~azure.mgmt.storage.models.DataShare + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns DataShare. The DataShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DataShare]: + """Update a Storage DataShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :param properties: The updated properties of the Storage DataShare. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns DataShare. The DataShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DataShare]: + """Update a Storage DataShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :param properties: The updated properties of the Storage DataShare. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns DataShare. The DataShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "data_share_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + async def begin_update( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + properties: Union[_models.DataShare, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.DataShare]: + """Update a Storage DataShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :param properties: The updated properties of the Storage DataShare. Is one of the following + types: DataShare, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.storage.models.DataShare or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns DataShare. The DataShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DataShare] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + account_name=account_name, + data_share_name=data_share_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.DataShare, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.DataShare].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.DataShare]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": ["api_version", "subscription_id", "resource_group_name", "account_name", "data_share_name"] + }, + api_versions_list=["2025-08-01"], + ) + async def _delete_initial( + self, resource_group_name: str, account_name: str, data_share_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_data_shares_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + data_share_name=data_share_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": ["api_version", "subscription_id", "resource_group_name", "account_name", "data_share_name"] + }, + api_versions_list=["2025-08-01"], + ) + async def begin_delete( + self, resource_group_name: str, account_name: str, data_share_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a Storage DataShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + account_name=account_name, + data_share_name=data_share_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": ["api_version", "subscription_id", "resource_group_name", "account_name", "accept"] + }, + api_versions_list=["2025-08-01"], + ) + def list_by_storage_account( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.DataShare"]: + """List all Storage DataShares in a Storage Account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of DataShare + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.DataShare]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_data_shares_list_by_storage_account_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.DataShare], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class PrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`private_link_resources` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def list_by_storage_account( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> _models.PrivateLinkResourceListResult: + """Gets the private link resources that need to be created for a storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: PrivateLinkResourceListResult. The PrivateLinkResourceListResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.PrivateLinkResourceListResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.PrivateLinkResourceListResult] = kwargs.pop("cls", None) + + _request = build_private_link_resources_list_by_storage_account_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.PrivateLinkResourceListResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class StorageTaskAssignmentsInstancesReportOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`storage_task_assignments_instances_report` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, account_name: str, *, filter: Optional[str] = None, **kwargs: Any + ) -> AsyncItemPaged["_models.StorageTaskReportInstance"]: + """Fetch the report summary of all the storage task assignments and instances in an account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword filter: Optional. When specified, it can be used to query using reporting properties. + See `Constructing Filter Strings + `_ + for details. Default value is None. + :paramtype filter: str + :return: An iterator like instance of StorageTaskReportInstance + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.StorageTaskReportInstance] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.StorageTaskReportInstance]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_task_assignments_instances_report_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + maxpagesize=maxpagesize, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageTaskReportInstance], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class QueueOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`queue` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, account_name: str, *, filter: Optional[str] = None, **kwargs: Any + ) -> AsyncItemPaged["_models.ListQueue"]: + """Gets a list of all the queues under the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword filter: Optional, When specified, only the queues with a name starting with the given + filter will be listed. Default value is None. + :paramtype filter: str + :return: An iterator like instance of ListQueue + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.ListQueue] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.ListQueue]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_queue_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + maxpagesize=maxpagesize, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.ListQueue], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def get( + self, resource_group_name: str, account_name: str, queue_name: str, **kwargs: Any + ) -> _models.StorageQueue: + """Gets the queue with the specified queue name, under the specified account if it exists. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageQueue] = kwargs.pop("cls", None) + + _request = build_queue_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + queue_name=queue_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageQueue, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def create( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: _models.StorageQueue, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageQueue: + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. Required. + :type queue: ~azure.mgmt.storage.models.StorageQueue + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageQueue: + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. Required. + :type queue: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageQueue: + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. Required. + :type queue: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: Union[_models.StorageQueue, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.StorageQueue: + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. Is one of the following types: + StorageQueue, JSON, IO[bytes] Required. + :type queue: ~azure.mgmt.storage.models.StorageQueue or JSON or IO[bytes] + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageQueue] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(queue, (IOBase, bytes)): + _content = queue + else: + _content = json.dumps(queue, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_queue_create_request( + resource_group_name=resource_group_name, + account_name=account_name, + queue_name=queue_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageQueue, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def update( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: _models.StorageQueue, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageQueue: + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. Required. + :type queue: ~azure.mgmt.storage.models.StorageQueue + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageQueue: + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. Required. + :type queue: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageQueue: + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. Required. + :type queue: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: Union[_models.StorageQueue, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.StorageQueue: + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. Is one of the following types: + StorageQueue, JSON, IO[bytes] Required. + :type queue: ~azure.mgmt.storage.models.StorageQueue or JSON or IO[bytes] + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageQueue] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(queue, (IOBase, bytes)): + _content = queue + else: + _content = json.dumps(queue, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_queue_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + queue_name=queue_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageQueue, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete(self, resource_group_name: str, account_name: str, queue_name: str, **kwargs: Any) -> None: + """Deletes the queue with the specified queue name, under the specified account if it exists. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_queue_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + queue_name=queue_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + +class ObjectReplicationPoliciesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`object_replication_policies` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, account_name: str, object_replication_policy_id: str, **kwargs: Any + ) -> _models.ObjectReplicationPolicy: + """Get the object replication policy of the storage account by policy ID. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. Required. + :type object_replication_policy_id: str + :return: ObjectReplicationPolicy. The ObjectReplicationPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ObjectReplicationPolicy] = kwargs.pop("cls", None) + + _request = build_object_replication_policies_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + object_replication_policy_id=object_replication_policy_id, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ObjectReplicationPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + object_replication_policy_id: str, + properties: _models.ObjectReplicationPolicy, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ObjectReplicationPolicy: + """Create or update the object replication policy of the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. Required. + :type object_replication_policy_id: str + :param properties: The object replication policy set to a storage account. A unique policy ID + will be created if absent. Required. + :type properties: ~azure.mgmt.storage.models.ObjectReplicationPolicy + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ObjectReplicationPolicy. The ObjectReplicationPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + object_replication_policy_id: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ObjectReplicationPolicy: + """Create or update the object replication policy of the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. Required. + :type object_replication_policy_id: str + :param properties: The object replication policy set to a storage account. A unique policy ID + will be created if absent. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ObjectReplicationPolicy. The ObjectReplicationPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + object_replication_policy_id: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ObjectReplicationPolicy: + """Create or update the object replication policy of the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. Required. + :type object_replication_policy_id: str + :param properties: The object replication policy set to a storage account. A unique policy ID + will be created if absent. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: ObjectReplicationPolicy. The ObjectReplicationPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + object_replication_policy_id: str, + properties: Union[_models.ObjectReplicationPolicy, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.ObjectReplicationPolicy: + """Create or update the object replication policy of the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. Required. + :type object_replication_policy_id: str + :param properties: The object replication policy set to a storage account. A unique policy ID + will be created if absent. Is one of the following types: ObjectReplicationPolicy, JSON, + IO[bytes] Required. + :type properties: ~azure.mgmt.storage.models.ObjectReplicationPolicy or JSON or IO[bytes] + :return: ObjectReplicationPolicy. The ObjectReplicationPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ObjectReplicationPolicy] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_object_replication_policies_create_or_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + object_replication_policy_id=object_replication_policy_id, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ObjectReplicationPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete( + self, resource_group_name: str, account_name: str, object_replication_policy_id: str, **kwargs: Any + ) -> None: + """Deletes the object replication policy associated with the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. Required. + :type object_replication_policy_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_object_replication_policies_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + object_replication_policy_id=object_replication_policy_id, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.ObjectReplicationPolicy"]: + """List the object replication policies associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of ObjectReplicationPolicy + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.ObjectReplicationPolicy] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.ObjectReplicationPolicy]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_object_replication_policies_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.ObjectReplicationPolicy], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class LocalUsersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`local_users` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get(self, resource_group_name: str, account_name: str, username: str, **kwargs: Any) -> _models.LocalUser: + """Get the local user of the storage account by username. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param username: The name of local user. The username must contain lowercase letters and + numbers only. It must be unique only within the storage account. Required. + :type username: str + :return: LocalUser. The LocalUser is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LocalUser + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.LocalUser] = kwargs.pop("cls", None) + + _request = build_local_users_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + username=username, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.LocalUser, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + username: str, + properties: _models.LocalUser, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LocalUser: + """Create or update the properties of a local user associated with the storage account. Properties + for NFSv3 enablement and extended groups cannot be set with other properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param username: The name of local user. The username must contain lowercase letters and + numbers only. It must be unique only within the storage account. Required. + :type username: str + :param properties: The local user associated with a storage account. Required. + :type properties: ~azure.mgmt.storage.models.LocalUser + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LocalUser. The LocalUser is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LocalUser + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + username: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LocalUser: + """Create or update the properties of a local user associated with the storage account. Properties + for NFSv3 enablement and extended groups cannot be set with other properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param username: The name of local user. The username must contain lowercase letters and + numbers only. It must be unique only within the storage account. Required. + :type username: str + :param properties: The local user associated with a storage account. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LocalUser. The LocalUser is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LocalUser + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + username: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LocalUser: + """Create or update the properties of a local user associated with the storage account. Properties + for NFSv3 enablement and extended groups cannot be set with other properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param username: The name of local user. The username must contain lowercase letters and + numbers only. It must be unique only within the storage account. Required. + :type username: str + :param properties: The local user associated with a storage account. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: LocalUser. The LocalUser is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LocalUser + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + username: str, + properties: Union[_models.LocalUser, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.LocalUser: + """Create or update the properties of a local user associated with the storage account. Properties + for NFSv3 enablement and extended groups cannot be set with other properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param username: The name of local user. The username must contain lowercase letters and + numbers only. It must be unique only within the storage account. Required. + :type username: str + :param properties: The local user associated with a storage account. Is one of the following + types: LocalUser, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.storage.models.LocalUser or JSON or IO[bytes] + :return: LocalUser. The LocalUser is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LocalUser + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.LocalUser] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_local_users_create_or_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + username=username, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.LocalUser, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete(self, resource_group_name: str, account_name: str, username: str, **kwargs: Any) -> None: + """Deletes the local user associated with the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param username: The name of local user. The username must contain lowercase letters and + numbers only. It must be unique only within the storage account. Required. + :type username: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_local_users_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + username=username, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + account_name: str, + *, + filter: Optional[str] = None, + include: Optional[Union[str, _models.ListLocalUserIncludeParam]] = None, + **kwargs: Any + ) -> AsyncItemPaged["_models.LocalUser"]: + """List the local users associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword filter: Optional. When specified, only local user names starting with the filter will + be listed. Default value is None. + :paramtype filter: str + :keyword include: Optional, when specified, will list local users enabled for the specific + protocol. Lists all users by default. "nfsv3" Default value is None. + :paramtype include: str or ~azure.mgmt.storage.models.ListLocalUserIncludeParam + :return: An iterator like instance of LocalUser + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.LocalUser] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.LocalUser]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_local_users_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + maxpagesize=maxpagesize, + filter=filter, + include=include, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.LocalUser], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def list_keys( + self, resource_group_name: str, account_name: str, username: str, **kwargs: Any + ) -> _models.LocalUserKeys: + """List SSH authorized keys and shared key of the local user. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param username: The name of local user. The username must contain lowercase letters and + numbers only. It must be unique only within the storage account. Required. + :type username: str + :return: LocalUserKeys. The LocalUserKeys is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LocalUserKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.LocalUserKeys] = kwargs.pop("cls", None) + + _request = build_local_users_list_keys_request( + resource_group_name=resource_group_name, + account_name=account_name, + username=username, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.LocalUserKeys, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def regenerate_password( + self, resource_group_name: str, account_name: str, username: str, **kwargs: Any + ) -> _models.LocalUserRegeneratePasswordResult: + """Regenerate the local user SSH password. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param username: The name of local user. The username must contain lowercase letters and + numbers only. It must be unique only within the storage account. Required. + :type username: str + :return: LocalUserRegeneratePasswordResult. The LocalUserRegeneratePasswordResult is compatible + with MutableMapping + :rtype: ~azure.mgmt.storage.models.LocalUserRegeneratePasswordResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.LocalUserRegeneratePasswordResult] = kwargs.pop("cls", None) + + _request = build_local_users_regenerate_password_request( + resource_group_name=resource_group_name, + account_name=account_name, + username=username, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.LocalUserRegeneratePasswordResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class TableOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`table` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get(self, resource_group_name: str, account_name: str, table_name: str, **kwargs: Any) -> _models.Table: + """Gets the table with the specified table name, under the specified account if it exists. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Table] = kwargs.pop("cls", None) + + _request = build_table_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + table_name=table_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Table, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def create( + self, + resource_group_name: str, + account_name: str, + table_name: str, + parameters: Optional[_models.Table] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Table: + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :param parameters: The parameters to provide to create a table. Default value is None. + :type parameters: ~azure.mgmt.storage.models.Table + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create( + self, + resource_group_name: str, + account_name: str, + table_name: str, + parameters: Optional[JSON] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Table: + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :param parameters: The parameters to provide to create a table. Default value is None. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create( + self, + resource_group_name: str, + account_name: str, + table_name: str, + parameters: Optional[IO[bytes]] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Table: + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :param parameters: The parameters to provide to create a table. Default value is None. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create( + self, + resource_group_name: str, + account_name: str, + table_name: str, + parameters: Optional[Union[_models.Table, JSON, IO[bytes]]] = None, + **kwargs: Any + ) -> _models.Table: + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :param parameters: The parameters to provide to create a table. Is one of the following types: + Table, JSON, IO[bytes] Default value is None. + :type parameters: ~azure.mgmt.storage.models.Table or JSON or IO[bytes] + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if parameters else None + cls: ClsType[_models.Table] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" if parameters else None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_table_create_request( + resource_group_name=resource_group_name, + account_name=account_name, + table_name=table_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Table, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def update( + self, + resource_group_name: str, + account_name: str, + table_name: str, + parameters: Optional[_models.Table] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Table: + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :param parameters: The parameters to provide to create a table. Default value is None. + :type parameters: ~azure.mgmt.storage.models.Table + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + resource_group_name: str, + account_name: str, + table_name: str, + parameters: Optional[JSON] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Table: + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :param parameters: The parameters to provide to create a table. Default value is None. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + resource_group_name: str, + account_name: str, + table_name: str, + parameters: Optional[IO[bytes]] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Table: + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :param parameters: The parameters to provide to create a table. Default value is None. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update( + self, + resource_group_name: str, + account_name: str, + table_name: str, + parameters: Optional[Union[_models.Table, JSON, IO[bytes]]] = None, + **kwargs: Any + ) -> _models.Table: + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :param parameters: The parameters to provide to create a table. Is one of the following types: + Table, JSON, IO[bytes] Default value is None. + :type parameters: ~azure.mgmt.storage.models.Table or JSON or IO[bytes] + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if parameters else None + cls: ClsType[_models.Table] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" if parameters else None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_table_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + table_name=table_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Table, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete(self, resource_group_name: str, account_name: str, table_name: str, **kwargs: Any) -> None: + """Deletes the table with the specified table name, under the specified account if it exists. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_table_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + table_name=table_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> AsyncItemPaged["_models.Table"]: + """Gets a list of all the tables under the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of Table + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.Table] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Table]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_table_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Table], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class StorageTaskAssignmentInstancesReportOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`storage_task_assignment_instances_report` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + *, + filter: Optional[str] = None, + **kwargs: Any + ) -> AsyncItemPaged["_models.StorageTaskReportInstance"]: + """Fetch the report summary of a single storage task assignment's instances. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :keyword filter: Optional. When specified, it can be used to query using reporting properties. + See `Constructing Filter Strings + `_ + for details. Default value is None. + :paramtype filter: str + :return: An iterator like instance of StorageTaskReportInstance + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.StorageTaskReportInstance] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.StorageTaskReportInstance]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_task_assignment_instances_report_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + subscription_id=self._config.subscription_id, + maxpagesize=maxpagesize, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageTaskReportInstance], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class SkusOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`skus` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, **kwargs: Any) -> AsyncItemPaged["_models.SkuInformation"]: + """Lists the available SKUs supported by Microsoft.Storage for given subscription. + + :return: An iterator like instance of SkuInformation + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.SkuInformation] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.SkuInformation]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_skus_list_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.SkuInformation], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class UsagesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s + :attr:`usages` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_by_location(self, location: str, **kwargs: Any) -> AsyncItemPaged["_models.Usage"]: + """Gets the current usage count and the limit for the resources of the location under the + subscription. + + :param location: The location name. Required. + :type location: str + :return: An iterator like instance of Usage + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.Usage] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Usage]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_usages_list_by_location_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Usage], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_patch.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_patch.py index 8bcb627aa475..87676c65a8f0 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_patch.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_patch.py @@ -7,9 +7,9 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_private_endpoint_connections_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_private_endpoint_connections_operations.py deleted file mode 100644 index 3fb1794efa10..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_private_endpoint_connections_operations.py +++ /dev/null @@ -1,432 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._private_endpoint_connections_operations import ( - build_delete_request, - build_get_request, - build_list_request, - build_put_request, -) -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class PrivateEndpointConnectionsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`private_endpoint_connections` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> AsyncItemPaged["_models.PrivateEndpointConnection"]: - """List all the private endpoint connections associated with the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: An iterator like instance of either PrivateEndpointConnection or the result of - cls(response) - :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.PrivateEndpointConnection] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.PrivateEndpointConnectionListResult] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("PrivateEndpointConnectionListResult", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) - - @distributed_trace_async - async def get( - self, resource_group_name: str, account_name: str, private_endpoint_connection_name: str, **kwargs: Any - ) -> _models.PrivateEndpointConnection: - """Gets the specified private endpoint connection associated with the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the Azure resource. Required. - :type private_endpoint_connection_name: str - :return: PrivateEndpointConnection or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - private_endpoint_connection_name=private_endpoint_connection_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def put( - self, - resource_group_name: str, - account_name: str, - private_endpoint_connection_name: str, - properties: _models.PrivateEndpointConnection, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.PrivateEndpointConnection: - """Update the state of specified private endpoint connection associated with the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the Azure resource. Required. - :type private_endpoint_connection_name: str - :param properties: The private endpoint connection properties. Required. - :type properties: ~azure.mgmt.storage.models.PrivateEndpointConnection - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: PrivateEndpointConnection or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def put( - self, - resource_group_name: str, - account_name: str, - private_endpoint_connection_name: str, - properties: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.PrivateEndpointConnection: - """Update the state of specified private endpoint connection associated with the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the Azure resource. Required. - :type private_endpoint_connection_name: str - :param properties: The private endpoint connection properties. Required. - :type properties: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: PrivateEndpointConnection or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def put( - self, - resource_group_name: str, - account_name: str, - private_endpoint_connection_name: str, - properties: Union[_models.PrivateEndpointConnection, IO[bytes]], - **kwargs: Any - ) -> _models.PrivateEndpointConnection: - """Update the state of specified private endpoint connection associated with the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the Azure resource. Required. - :type private_endpoint_connection_name: str - :param properties: The private endpoint connection properties. Is either a - PrivateEndpointConnection type or a IO[bytes] type. Required. - :type properties: ~azure.mgmt.storage.models.PrivateEndpointConnection or IO[bytes] - :return: PrivateEndpointConnection or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(properties, (IOBase, bytes)): - _content = properties - else: - _json = self._serialize.body(properties, "PrivateEndpointConnection") - - _request = build_put_request( - resource_group_name=resource_group_name, - account_name=account_name, - private_endpoint_connection_name=private_endpoint_connection_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def delete( - self, resource_group_name: str, account_name: str, private_endpoint_connection_name: str, **kwargs: Any - ) -> None: - """Deletes the specified private endpoint connection associated with the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the Azure resource. Required. - :type private_endpoint_connection_name: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - private_endpoint_connection_name=private_endpoint_connection_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_private_link_resources_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_private_link_resources_operations.py deleted file mode 100644 index 35a2e242ed1c..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_private_link_resources_operations.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, Callable, Optional, TypeVar - -from azure.core import AsyncPipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._private_link_resources_operations import build_list_by_storage_account_request -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class PrivateLinkResourcesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`private_link_resources` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def list_by_storage_account( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> _models.PrivateLinkResourceListResult: - """Gets the private link resources that need to be created for a storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: PrivateLinkResourceListResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.PrivateLinkResourceListResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.PrivateLinkResourceListResult] = kwargs.pop("cls", None) - - _request = build_list_by_storage_account_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("PrivateLinkResourceListResult", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_queue_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_queue_operations.py deleted file mode 100644 index a4f5f19db4b2..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_queue_operations.py +++ /dev/null @@ -1,595 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._queue_operations import ( - build_create_request, - build_delete_request, - build_get_request, - build_list_request, - build_update_request, -) -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class QueueOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`queue` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @overload - async def create( - self, - resource_group_name: str, - account_name: str, - queue_name: str, - queue: _models.StorageQueue, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.StorageQueue: - """Creates a new queue with the specified queue name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param queue_name: A queue name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, - it should begin and end with an alphanumeric character and it cannot have two consecutive - dash(-) characters. Required. - :type queue_name: str - :param queue: Queue properties and metadata to be created with. Required. - :type queue: ~azure.mgmt.storage.models.StorageQueue - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: StorageQueue or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageQueue - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def create( - self, - resource_group_name: str, - account_name: str, - queue_name: str, - queue: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.StorageQueue: - """Creates a new queue with the specified queue name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param queue_name: A queue name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, - it should begin and end with an alphanumeric character and it cannot have two consecutive - dash(-) characters. Required. - :type queue_name: str - :param queue: Queue properties and metadata to be created with. Required. - :type queue: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: StorageQueue or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageQueue - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def create( - self, - resource_group_name: str, - account_name: str, - queue_name: str, - queue: Union[_models.StorageQueue, IO[bytes]], - **kwargs: Any - ) -> _models.StorageQueue: - """Creates a new queue with the specified queue name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param queue_name: A queue name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, - it should begin and end with an alphanumeric character and it cannot have two consecutive - dash(-) characters. Required. - :type queue_name: str - :param queue: Queue properties and metadata to be created with. Is either a StorageQueue type - or a IO[bytes] type. Required. - :type queue: ~azure.mgmt.storage.models.StorageQueue or IO[bytes] - :return: StorageQueue or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageQueue - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.StorageQueue] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(queue, (IOBase, bytes)): - _content = queue - else: - _json = self._serialize.body(queue, "StorageQueue") - - _request = build_create_request( - resource_group_name=resource_group_name, - account_name=account_name, - queue_name=queue_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageQueue", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def update( - self, - resource_group_name: str, - account_name: str, - queue_name: str, - queue: _models.StorageQueue, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.StorageQueue: - """Creates a new queue with the specified queue name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param queue_name: A queue name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, - it should begin and end with an alphanumeric character and it cannot have two consecutive - dash(-) characters. Required. - :type queue_name: str - :param queue: Queue properties and metadata to be created with. Required. - :type queue: ~azure.mgmt.storage.models.StorageQueue - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: StorageQueue or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageQueue - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def update( - self, - resource_group_name: str, - account_name: str, - queue_name: str, - queue: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.StorageQueue: - """Creates a new queue with the specified queue name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param queue_name: A queue name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, - it should begin and end with an alphanumeric character and it cannot have two consecutive - dash(-) characters. Required. - :type queue_name: str - :param queue: Queue properties and metadata to be created with. Required. - :type queue: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: StorageQueue or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageQueue - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def update( - self, - resource_group_name: str, - account_name: str, - queue_name: str, - queue: Union[_models.StorageQueue, IO[bytes]], - **kwargs: Any - ) -> _models.StorageQueue: - """Creates a new queue with the specified queue name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param queue_name: A queue name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, - it should begin and end with an alphanumeric character and it cannot have two consecutive - dash(-) characters. Required. - :type queue_name: str - :param queue: Queue properties and metadata to be created with. Is either a StorageQueue type - or a IO[bytes] type. Required. - :type queue: ~azure.mgmt.storage.models.StorageQueue or IO[bytes] - :return: StorageQueue or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageQueue - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.StorageQueue] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(queue, (IOBase, bytes)): - _content = queue - else: - _json = self._serialize.body(queue, "StorageQueue") - - _request = build_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - queue_name=queue_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageQueue", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get( - self, resource_group_name: str, account_name: str, queue_name: str, **kwargs: Any - ) -> _models.StorageQueue: - """Gets the queue with the specified queue name, under the specified account if it exists. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param queue_name: A queue name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, - it should begin and end with an alphanumeric character and it cannot have two consecutive - dash(-) characters. Required. - :type queue_name: str - :return: StorageQueue or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageQueue - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageQueue] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - queue_name=queue_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageQueue", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def delete(self, resource_group_name: str, account_name: str, queue_name: str, **kwargs: Any) -> None: - """Deletes the queue with the specified queue name, under the specified account if it exists. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param queue_name: A queue name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, - it should begin and end with an alphanumeric character and it cannot have two consecutive - dash(-) characters. Required. - :type queue_name: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - queue_name=queue_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - @distributed_trace - def list( - self, - resource_group_name: str, - account_name: str, - maxpagesize: Optional[str] = None, - filter: Optional[str] = None, - **kwargs: Any - ) -> AsyncItemPaged["_models.ListQueue"]: - """Gets a list of all the queues under the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param maxpagesize: Optional, a maximum number of queues that should be included in a list - queue response. Default value is None. - :type maxpagesize: str - :param filter: Optional, When specified, only the queues with a name starting with the given - filter will be listed. Default value is None. - :type filter: str - :return: An iterator like instance of either ListQueue or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.ListQueue] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ListQueueResource] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - maxpagesize=maxpagesize, - filter=filter, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("ListQueueResource", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_queue_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_queue_services_operations.py deleted file mode 100644 index 8af3267b6d18..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_queue_services_operations.py +++ /dev/null @@ -1,321 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Literal, Optional, TypeVar, Union, overload - -from azure.core import AsyncPipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._queue_services_operations import ( - build_get_service_properties_request, - build_list_request, - build_set_service_properties_request, -) -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class QueueServicesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`queue_services` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _models.ListQueueServices: - """List all queue services for the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: ListQueueServices or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ListQueueServices - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ListQueueServices] = kwargs.pop("cls", None) - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ListQueueServices", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: _models.QueueServiceProperties, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.QueueServiceProperties: - """Sets the properties of a storage account’s Queue service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Queue service, only properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. - :type parameters: ~azure.mgmt.storage.models.QueueServiceProperties - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: QueueServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.QueueServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.QueueServiceProperties: - """Sets the properties of a storage account’s Queue service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Queue service, only properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: QueueServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.QueueServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.QueueServiceProperties, IO[bytes]], - **kwargs: Any - ) -> _models.QueueServiceProperties: - """Sets the properties of a storage account’s Queue service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Queue service, only properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Is either a - QueueServiceProperties type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.QueueServiceProperties or IO[bytes] - :return: QueueServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.QueueServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - queue_service_name: Literal["default"] = kwargs.pop("queue_service_name", "default") - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.QueueServiceProperties] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "QueueServiceProperties") - - _request = build_set_service_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - queue_service_name=queue_service_name, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("QueueServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get_service_properties( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> _models.QueueServiceProperties: - """Gets the properties of a storage account’s Queue service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: QueueServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.QueueServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - queue_service_name: Literal["default"] = kwargs.pop("queue_service_name", "default") - cls: ClsType[_models.QueueServiceProperties] = kwargs.pop("cls", None) - - _request = build_get_service_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - queue_service_name=queue_service_name, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("QueueServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_skus_operations.py deleted file mode 100644 index 2fb425d91e68..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_skus_operations.py +++ /dev/null @@ -1,129 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, Callable, Optional, TypeVar -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._skus_operations import build_list_request -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class SkusOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`skus` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list(self, **kwargs: Any) -> AsyncItemPaged["_models.SkuInformation"]: - """Lists the available SKUs supported by Microsoft.Storage for given subscription. - - :return: An iterator like instance of either SkuInformation or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.SkuInformation] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageSkuListResult] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("StorageSkuListResult", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_storage_accounts_operations.py deleted file mode 100644 index 21429348adba..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_storage_accounts_operations.py +++ /dev/null @@ -1,2207 +0,0 @@ -# pylint: disable=too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, AsyncIterator, Callable, IO, Literal, Optional, TypeVar, Union, cast, overload -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - StreamClosedError, - StreamConsumedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._storage_accounts_operations import ( - build_abort_hierarchical_namespace_migration_request, - build_check_name_availability_request, - build_create_request, - build_customer_initiated_migration_request, - build_delete_request, - build_failover_request, - build_get_customer_initiated_migration_request, - build_get_properties_request, - build_hierarchical_namespace_migration_request, - build_list_account_sas_request, - build_list_by_resource_group_request, - build_list_keys_request, - build_list_request, - build_list_service_sas_request, - build_regenerate_key_request, - build_restore_blob_ranges_request, - build_revoke_user_delegation_keys_request, - build_update_request, -) -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class StorageAccountsOperations: # pylint: disable=too-many-public-methods - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`storage_accounts` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @overload - async def check_name_availability( - self, - account_name: _models.StorageAccountCheckNameAvailabilityParameters, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.CheckNameAvailabilityResult: - """Checks that the storage account name is valid and is not already in use. - - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: ~azure.mgmt.storage.models.StorageAccountCheckNameAvailabilityParameters - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: CheckNameAvailabilityResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.CheckNameAvailabilityResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def check_name_availability( - self, account_name: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.CheckNameAvailabilityResult: - """Checks that the storage account name is valid and is not already in use. - - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: CheckNameAvailabilityResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.CheckNameAvailabilityResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def check_name_availability( - self, account_name: Union[_models.StorageAccountCheckNameAvailabilityParameters, IO[bytes]], **kwargs: Any - ) -> _models.CheckNameAvailabilityResult: - """Checks that the storage account name is valid and is not already in use. - - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Is either a StorageAccountCheckNameAvailabilityParameters type or a - IO[bytes] type. Required. - :type account_name: ~azure.mgmt.storage.models.StorageAccountCheckNameAvailabilityParameters or - IO[bytes] - :return: CheckNameAvailabilityResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.CheckNameAvailabilityResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.CheckNameAvailabilityResult] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(account_name, (IOBase, bytes)): - _content = account_name - else: - _json = self._serialize.body(account_name, "StorageAccountCheckNameAvailabilityParameters") - - _request = build_check_name_availability_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("CheckNameAvailabilityResult", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - async def _create_initial( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.StorageAccountCreateParameters, IO[bytes]], - **kwargs: Any - ) -> AsyncIterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "StorageAccountCreateParameters") - - _request = build_create_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def begin_create( - self, - resource_group_name: str, - account_name: str, - parameters: _models.StorageAccountCreateParameters, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> AsyncLROPoller[_models.StorageAccount]: - """Asynchronously creates a new storage account with the specified parameters. If an account is - already created and a subsequent create request is issued with different properties, the - account properties will be updated. If an account is already created and a subsequent create or - update request is issued with the exact same set of properties, the request will succeed. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for the created account. Required. - :type parameters: ~azure.mgmt.storage.models.StorageAccountCreateParameters - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of - cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageAccount] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def begin_create( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> AsyncLROPoller[_models.StorageAccount]: - """Asynchronously creates a new storage account with the specified parameters. If an account is - already created and a subsequent create request is issued with different properties, the - account properties will be updated. If an account is already created and a subsequent create or - update request is issued with the exact same set of properties, the request will succeed. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for the created account. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of - cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageAccount] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def begin_create( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.StorageAccountCreateParameters, IO[bytes]], - **kwargs: Any - ) -> AsyncLROPoller[_models.StorageAccount]: - """Asynchronously creates a new storage account with the specified parameters. If an account is - already created and a subsequent create request is issued with different properties, the - account properties will be updated. If an account is already created and a subsequent create or - update request is issued with the exact same set of properties, the request will succeed. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for the created account. Is either a - StorageAccountCreateParameters type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.StorageAccountCreateParameters or IO[bytes] - :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of - cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageAccount] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.StorageAccount] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._create_initial( - resource_group_name=resource_group_name, - account_name=account_name, - parameters=parameters, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - await raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("StorageAccount", pipeline_response.http_response) - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - if polling is True: - polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller[_models.StorageAccount].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller[_models.StorageAccount]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - @distributed_trace_async - async def delete(self, resource_group_name: str, account_name: str, **kwargs: Any) -> None: - """Deletes a storage account in Microsoft Azure. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - @distributed_trace_async - async def get_properties( - self, - resource_group_name: str, - account_name: str, - expand: Optional[Union[str, _models.StorageAccountExpand]] = None, - **kwargs: Any - ) -> _models.StorageAccount: - """Returns the properties for the specified storage account including but not limited to name, SKU - name, location, and account status. The ListKeys operation should be used to retrieve storage - keys. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param expand: May be used to expand the properties within account's properties. By default, - data is not included when fetching properties. Currently we only support geoReplicationStats - and blobRestoreStatus. Known values are: "geoReplicationStats" and "blobRestoreStatus". Default - value is None. - :type expand: str or ~azure.mgmt.storage.models.StorageAccountExpand - :return: StorageAccount or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccount - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageAccount] = kwargs.pop("cls", None) - - _request = build_get_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - expand=expand, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageAccount", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def update( - self, - resource_group_name: str, - account_name: str, - parameters: _models.StorageAccountUpdateParameters, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.StorageAccount: - """The update operation can be used to update the SKU, encryption, access tier, or tags for a - storage account. It can also be used to map the account to a custom domain. Only one custom - domain is supported per storage account; the replacement/change of custom domain is not - supported. In order to replace an old custom domain, the old value must be cleared/unregistered - before a new value can be set. The update of multiple properties is supported. This call does - not change the storage keys for the account. If you want to change the storage account keys, - use the regenerate keys operation. The location and name of the storage account cannot be - changed after creation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for the updated account. Required. - :type parameters: ~azure.mgmt.storage.models.StorageAccountUpdateParameters - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: StorageAccount or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccount - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def update( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.StorageAccount: - """The update operation can be used to update the SKU, encryption, access tier, or tags for a - storage account. It can also be used to map the account to a custom domain. Only one custom - domain is supported per storage account; the replacement/change of custom domain is not - supported. In order to replace an old custom domain, the old value must be cleared/unregistered - before a new value can be set. The update of multiple properties is supported. This call does - not change the storage keys for the account. If you want to change the storage account keys, - use the regenerate keys operation. The location and name of the storage account cannot be - changed after creation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for the updated account. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: StorageAccount or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccount - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def update( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.StorageAccountUpdateParameters, IO[bytes]], - **kwargs: Any - ) -> _models.StorageAccount: - """The update operation can be used to update the SKU, encryption, access tier, or tags for a - storage account. It can also be used to map the account to a custom domain. Only one custom - domain is supported per storage account; the replacement/change of custom domain is not - supported. In order to replace an old custom domain, the old value must be cleared/unregistered - before a new value can be set. The update of multiple properties is supported. This call does - not change the storage keys for the account. If you want to change the storage account keys, - use the regenerate keys operation. The location and name of the storage account cannot be - changed after creation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for the updated account. Is either a - StorageAccountUpdateParameters type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.StorageAccountUpdateParameters or IO[bytes] - :return: StorageAccount or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccount - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.StorageAccount] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "StorageAccountUpdateParameters") - - _request = build_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageAccount", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def list(self, **kwargs: Any) -> AsyncItemPaged["_models.StorageAccount"]: - """Lists all the storage accounts available under the subscription. Note that storage keys are not - returned; use the ListKeys operation for this. - - :return: An iterator like instance of either StorageAccount or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.StorageAccount] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageAccountListResult] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("StorageAccountListResult", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) - - @distributed_trace - def list_by_resource_group( - self, resource_group_name: str, **kwargs: Any - ) -> AsyncItemPaged["_models.StorageAccount"]: - """Lists all the storage accounts available under the given resource group. Note that storage keys - are not returned; use the ListKeys operation for this. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :return: An iterator like instance of either StorageAccount or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.StorageAccount] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageAccountListResult] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_by_resource_group_request( - resource_group_name=resource_group_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("StorageAccountListResult", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) - - @distributed_trace_async - async def list_keys( - self, resource_group_name: str, account_name: str, expand: Literal["kerb"] = "kerb", **kwargs: Any - ) -> _models.StorageAccountListKeysResult: - """Lists the access keys or Kerberos keys (if active directory enabled) for the specified storage - account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param expand: Specifies type of the key to be listed. Possible value is kerb. Known values are - "kerb" and None. Default value is "kerb". - :type expand: str - :return: StorageAccountListKeysResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageAccountListKeysResult] = kwargs.pop("cls", None) - - _request = build_list_keys_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - expand=expand, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageAccountListKeysResult", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def regenerate_key( - self, - resource_group_name: str, - account_name: str, - regenerate_key: _models.StorageAccountRegenerateKeyParameters, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.StorageAccountListKeysResult: - """Regenerates one of the access keys or Kerberos keys for the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2, - kerb1, kerb2. Required. - :type regenerate_key: ~azure.mgmt.storage.models.StorageAccountRegenerateKeyParameters - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: StorageAccountListKeysResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def regenerate_key( - self, - resource_group_name: str, - account_name: str, - regenerate_key: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.StorageAccountListKeysResult: - """Regenerates one of the access keys or Kerberos keys for the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2, - kerb1, kerb2. Required. - :type regenerate_key: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: StorageAccountListKeysResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def regenerate_key( - self, - resource_group_name: str, - account_name: str, - regenerate_key: Union[_models.StorageAccountRegenerateKeyParameters, IO[bytes]], - **kwargs: Any - ) -> _models.StorageAccountListKeysResult: - """Regenerates one of the access keys or Kerberos keys for the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2, - kerb1, kerb2. Is either a StorageAccountRegenerateKeyParameters type or a IO[bytes] type. - Required. - :type regenerate_key: ~azure.mgmt.storage.models.StorageAccountRegenerateKeyParameters or - IO[bytes] - :return: StorageAccountListKeysResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.StorageAccountListKeysResult] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(regenerate_key, (IOBase, bytes)): - _content = regenerate_key - else: - _json = self._serialize.body(regenerate_key, "StorageAccountRegenerateKeyParameters") - - _request = build_regenerate_key_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageAccountListKeysResult", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def list_account_sas( - self, - resource_group_name: str, - account_name: str, - parameters: _models.AccountSasParameters, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ListAccountSasResponse: - """List SAS credentials of a storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide to list SAS credentials for the storage account. - Required. - :type parameters: ~azure.mgmt.storage.models.AccountSasParameters - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: ListAccountSasResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ListAccountSasResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def list_account_sas( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ListAccountSasResponse: - """List SAS credentials of a storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide to list SAS credentials for the storage account. - Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: ListAccountSasResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ListAccountSasResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def list_account_sas( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.AccountSasParameters, IO[bytes]], - **kwargs: Any - ) -> _models.ListAccountSasResponse: - """List SAS credentials of a storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide to list SAS credentials for the storage account. - Is either a AccountSasParameters type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.AccountSasParameters or IO[bytes] - :return: ListAccountSasResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ListAccountSasResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.ListAccountSasResponse] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "AccountSasParameters") - - _request = build_list_account_sas_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ListAccountSasResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def list_service_sas( - self, - resource_group_name: str, - account_name: str, - parameters: _models.ServiceSasParameters, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ListServiceSasResponse: - """List service SAS credentials of a specific resource. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide to list service SAS credentials. Required. - :type parameters: ~azure.mgmt.storage.models.ServiceSasParameters - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: ListServiceSasResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ListServiceSasResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def list_service_sas( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ListServiceSasResponse: - """List service SAS credentials of a specific resource. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide to list service SAS credentials. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: ListServiceSasResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ListServiceSasResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def list_service_sas( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.ServiceSasParameters, IO[bytes]], - **kwargs: Any - ) -> _models.ListServiceSasResponse: - """List service SAS credentials of a specific resource. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide to list service SAS credentials. Is either a - ServiceSasParameters type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.ServiceSasParameters or IO[bytes] - :return: ListServiceSasResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ListServiceSasResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.ListServiceSasResponse] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "ServiceSasParameters") - - _request = build_list_service_sas_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ListServiceSasResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - async def _failover_initial( - self, resource_group_name: str, account_name: str, failover_type: Literal["Planned"] = "Planned", **kwargs: Any - ) -> AsyncIterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - - _request = build_failover_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - failover_type=failover_type, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def begin_failover( - self, resource_group_name: str, account_name: str, failover_type: Literal["Planned"] = "Planned", **kwargs: Any - ) -> AsyncLROPoller[None]: - """A failover request can be triggered for a storage account in the event a primary endpoint - becomes unavailable for any reason. The failover occurs from the storage account's primary - cluster to the secondary cluster for RA-GRS accounts. The secondary cluster will become primary - after failover and the account is converted to LRS. In the case of a Planned Failover, the - primary and secondary clusters are swapped after failover and the account remains - geo-replicated. Failover should continue to be used in the event of availability issues as - Planned failover is only available while the primary and secondary endpoints are available. The - primary use case of a Planned Failover is disaster recovery testing drills. This type of - failover is invoked by setting FailoverType parameter to 'Planned'. Learn more about the - failover options here- - https://learn.microsoft.com/azure/storage/common/storage-disaster-recovery-guidance. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param failover_type: The parameter is set to 'Planned' to indicate whether a Planned failover - is requested. Known values are "Planned" and None. Default value is "Planned". - :type failover_type: str - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._failover_initial( - resource_group_name=resource_group_name, - account_name=account_name, - failover_type=failover_type, - api_version=api_version, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - await raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller[None].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - async def _hierarchical_namespace_migration_initial( # pylint: disable=name-too-long - self, resource_group_name: str, account_name: str, request_type: str, **kwargs: Any - ) -> AsyncIterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - - _request = build_hierarchical_namespace_migration_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - request_type=request_type, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def begin_hierarchical_namespace_migration( - self, resource_group_name: str, account_name: str, request_type: str, **kwargs: Any - ) -> AsyncLROPoller[None]: - """Live Migration of storage account to enable Hns. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param request_type: Required. Hierarchical namespace migration type can either be a - hierarchical namespace validation request 'HnsOnValidationRequest' or a hydration request - 'HnsOnHydrationRequest'. The validation request will validate the migration whereas the - hydration request will migrate the account. Required. - :type request_type: str - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._hierarchical_namespace_migration_initial( - resource_group_name=resource_group_name, - account_name=account_name, - request_type=request_type, - api_version=api_version, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - await raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller[None].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - async def _abort_hierarchical_namespace_migration_initial( # pylint: disable=name-too-long - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> AsyncIterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - - _request = build_abort_hierarchical_namespace_migration_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def begin_abort_hierarchical_namespace_migration( # pylint: disable=name-too-long - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> AsyncLROPoller[None]: - """Abort live Migration of storage account to enable Hns. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._abort_hierarchical_namespace_migration_initial( - resource_group_name=resource_group_name, - account_name=account_name, - api_version=api_version, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - await raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller[None].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - async def _customer_initiated_migration_initial( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.StorageAccountMigration, IO[bytes]], - **kwargs: Any - ) -> AsyncIterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "StorageAccountMigration") - - _request = build_customer_initiated_migration_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - if response.status_code == 202: - response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @overload - async def begin_customer_initiated_migration( - self, - resource_group_name: str, - account_name: str, - parameters: _models.StorageAccountMigration, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> AsyncLROPoller[None]: - """Account Migration request can be triggered for a storage account to change its redundancy - level. The migration updates the non-zonal redundant storage account to a zonal redundant - account or vice-versa in order to have better reliability and availability. Zone-redundant - storage (ZRS) replicates your storage account synchronously across three Azure availability - zones in the primary region. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The request parameters required to perform storage account migration. - Required. - :type parameters: ~azure.mgmt.storage.models.StorageAccountMigration - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def begin_customer_initiated_migration( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> AsyncLROPoller[None]: - """Account Migration request can be triggered for a storage account to change its redundancy - level. The migration updates the non-zonal redundant storage account to a zonal redundant - account or vice-versa in order to have better reliability and availability. Zone-redundant - storage (ZRS) replicates your storage account synchronously across three Azure availability - zones in the primary region. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The request parameters required to perform storage account migration. - Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def begin_customer_initiated_migration( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.StorageAccountMigration, IO[bytes]], - **kwargs: Any - ) -> AsyncLROPoller[None]: - """Account Migration request can be triggered for a storage account to change its redundancy - level. The migration updates the non-zonal redundant storage account to a zonal redundant - account or vice-versa in order to have better reliability and availability. Zone-redundant - storage (ZRS) replicates your storage account synchronously across three Azure availability - zones in the primary region. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The request parameters required to perform storage account migration. Is - either a StorageAccountMigration type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.StorageAccountMigration or IO[bytes] - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._customer_initiated_migration_initial( - resource_group_name=resource_group_name, - account_name=account_name, - parameters=parameters, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - await raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller[None].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - @distributed_trace_async - async def get_customer_initiated_migration( - self, - resource_group_name: str, - account_name: str, - migration_name: Union[str, _models.MigrationName], - **kwargs: Any - ) -> _models.StorageAccountMigration: - """Gets the status of the ongoing migration for the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param migration_name: The name of the Storage Account Migration. It should always be - 'default'. "default" Required. - :type migration_name: str or ~azure.mgmt.storage.models.MigrationName - :return: StorageAccountMigration or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccountMigration - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageAccountMigration] = kwargs.pop("cls", None) - - _request = build_get_customer_initiated_migration_request( - resource_group_name=resource_group_name, - account_name=account_name, - migration_name=migration_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageAccountMigration", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - async def _restore_blob_ranges_initial( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.BlobRestoreParameters, IO[bytes]], - **kwargs: Any - ) -> AsyncIterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "BlobRestoreParameters") - - _request = build_restore_blob_ranges_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def begin_restore_blob_ranges( - self, - resource_group_name: str, - account_name: str, - parameters: _models.BlobRestoreParameters, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> AsyncLROPoller[_models.BlobRestoreStatus]: - """Restore blobs in the specified blob ranges. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for restore blob ranges. Required. - :type parameters: ~azure.mgmt.storage.models.BlobRestoreParameters - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of AsyncLROPoller that returns either BlobRestoreStatus or the result of - cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.BlobRestoreStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def begin_restore_blob_ranges( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> AsyncLROPoller[_models.BlobRestoreStatus]: - """Restore blobs in the specified blob ranges. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for restore blob ranges. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of AsyncLROPoller that returns either BlobRestoreStatus or the result of - cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.BlobRestoreStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def begin_restore_blob_ranges( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.BlobRestoreParameters, IO[bytes]], - **kwargs: Any - ) -> AsyncLROPoller[_models.BlobRestoreStatus]: - """Restore blobs in the specified blob ranges. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for restore blob ranges. Is either a - BlobRestoreParameters type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.BlobRestoreParameters or IO[bytes] - :return: An instance of AsyncLROPoller that returns either BlobRestoreStatus or the result of - cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.BlobRestoreStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.BlobRestoreStatus] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._restore_blob_ranges_initial( - resource_group_name=resource_group_name, - account_name=account_name, - parameters=parameters, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - await raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("BlobRestoreStatus", pipeline_response.http_response) - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller[_models.BlobRestoreStatus].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller[_models.BlobRestoreStatus]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - @distributed_trace_async - async def revoke_user_delegation_keys(self, resource_group_name: str, account_name: str, **kwargs: Any) -> None: - """Revoke user delegation keys. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_revoke_user_delegation_keys_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_storage_task_assignment_instances_report_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_storage_task_assignment_instances_report_operations.py deleted file mode 100644 index 685c76a32630..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_storage_task_assignment_instances_report_operations.py +++ /dev/null @@ -1,168 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, Callable, Optional, TypeVar -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._storage_task_assignment_instances_report_operations import build_list_request -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class StorageTaskAssignmentInstancesReportOperations: # pylint: disable=name-too-long - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`storage_task_assignment_instances_report` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - maxpagesize: Optional[int] = None, - filter: Optional[str] = None, - **kwargs: Any - ) -> AsyncItemPaged["_models.StorageTaskReportInstance"]: - """Fetch the report summary of a single storage task assignment's instances. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :param maxpagesize: Optional, specifies the maximum number of storage task assignment instances - to be included in the list response. Default value is None. - :type maxpagesize: int - :param filter: Optional. When specified, it can be used to query using reporting properties. - See `Constructing Filter Strings - `_ - for details. Default value is None. - :type filter: str - :return: An iterator like instance of either StorageTaskReportInstance or the result of - cls(response) - :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.StorageTaskReportInstance] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageTaskReportSummary] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - storage_task_assignment_name=storage_task_assignment_name, - subscription_id=self._config.subscription_id, - maxpagesize=maxpagesize, - filter=filter, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("StorageTaskReportSummary", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_storage_task_assignments_instances_report_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_storage_task_assignments_instances_report_operations.py deleted file mode 100644 index c57d739ce228..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_storage_task_assignments_instances_report_operations.py +++ /dev/null @@ -1,162 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, Callable, Optional, TypeVar -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._storage_task_assignments_instances_report_operations import build_list_request -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class StorageTaskAssignmentsInstancesReportOperations: # pylint: disable=name-too-long - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`storage_task_assignments_instances_report` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, - resource_group_name: str, - account_name: str, - maxpagesize: Optional[int] = None, - filter: Optional[str] = None, - **kwargs: Any - ) -> AsyncItemPaged["_models.StorageTaskReportInstance"]: - """Fetch the report summary of all the storage task assignments and instances in an account. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param maxpagesize: Optional, specifies the maximum number of storage task assignment instances - to be included in the list response. Default value is None. - :type maxpagesize: int - :param filter: Optional. When specified, it can be used to query using reporting properties. - See `Constructing Filter Strings - `_ - for details. Default value is None. - :type filter: str - :return: An iterator like instance of either StorageTaskReportInstance or the result of - cls(response) - :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.StorageTaskReportInstance] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageTaskReportSummary] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - maxpagesize=maxpagesize, - filter=filter, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("StorageTaskReportSummary", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_storage_task_assignments_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_storage_task_assignments_operations.py deleted file mode 100644 index 0b71a110c7a3..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_storage_task_assignments_operations.py +++ /dev/null @@ -1,829 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, AsyncIterator, Callable, IO, Optional, TypeVar, Union, cast, overload -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - StreamClosedError, - StreamConsumedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._storage_task_assignments_operations import ( - build_create_request, - build_delete_request, - build_get_request, - build_list_request, - build_update_request, -) -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class StorageTaskAssignmentsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`storage_task_assignments` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - async def _create_initial( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - parameters: Union[_models.StorageTaskAssignment, IO[bytes]], - **kwargs: Any - ) -> AsyncIterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "StorageTaskAssignment") - - _request = build_create_request( - resource_group_name=resource_group_name, - account_name=account_name, - storage_task_assignment_name=storage_task_assignment_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 201, 202]: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - if response.status_code == 202: - response_headers["Azure-AsyncOperation"] = self._deserialize( - "str", response.headers.get("Azure-AsyncOperation") - ) - response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @overload - async def begin_create( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - parameters: _models.StorageTaskAssignment, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> AsyncLROPoller[_models.StorageTaskAssignment]: - """Asynchronously creates a new storage task assignment sub-resource with the specified - parameters. If a storage task assignment is already created and a subsequent create request is - issued with different properties, the storage task assignment properties will be updated. If a - storage task assignment is already created and a subsequent create or update request is issued - with the exact same set of properties, the request will succeed. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :param parameters: The parameters to create a Storage Task Assignment. Required. - :type parameters: ~azure.mgmt.storage.models.StorageTaskAssignment - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of AsyncLROPoller that returns either StorageTaskAssignment or the result - of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def begin_create( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> AsyncLROPoller[_models.StorageTaskAssignment]: - """Asynchronously creates a new storage task assignment sub-resource with the specified - parameters. If a storage task assignment is already created and a subsequent create request is - issued with different properties, the storage task assignment properties will be updated. If a - storage task assignment is already created and a subsequent create or update request is issued - with the exact same set of properties, the request will succeed. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :param parameters: The parameters to create a Storage Task Assignment. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of AsyncLROPoller that returns either StorageTaskAssignment or the result - of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def begin_create( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - parameters: Union[_models.StorageTaskAssignment, IO[bytes]], - **kwargs: Any - ) -> AsyncLROPoller[_models.StorageTaskAssignment]: - """Asynchronously creates a new storage task assignment sub-resource with the specified - parameters. If a storage task assignment is already created and a subsequent create request is - issued with different properties, the storage task assignment properties will be updated. If a - storage task assignment is already created and a subsequent create or update request is issued - with the exact same set of properties, the request will succeed. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :param parameters: The parameters to create a Storage Task Assignment. Is either a - StorageTaskAssignment type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.StorageTaskAssignment or IO[bytes] - :return: An instance of AsyncLROPoller that returns either StorageTaskAssignment or the result - of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.StorageTaskAssignment] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._create_initial( - resource_group_name=resource_group_name, - account_name=account_name, - storage_task_assignment_name=storage_task_assignment_name, - parameters=parameters, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - await raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("StorageTaskAssignment", pipeline_response.http_response) - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, - AsyncARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs), - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller[_models.StorageTaskAssignment].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller[_models.StorageTaskAssignment]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - async def _update_initial( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - parameters: Union[_models.StorageTaskAssignmentUpdateParameters, IO[bytes]], - **kwargs: Any - ) -> AsyncIterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "StorageTaskAssignmentUpdateParameters") - - _request = build_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - storage_task_assignment_name=storage_task_assignment_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - if response.status_code == 202: - response_headers["Azure-AsyncOperation"] = self._deserialize( - "str", response.headers.get("Azure-AsyncOperation") - ) - response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @overload - async def begin_update( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - parameters: _models.StorageTaskAssignmentUpdateParameters, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> AsyncLROPoller[_models.StorageTaskAssignment]: - """Update storage task assignment properties. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :param parameters: The parameters to update a Storage Task Assignment. Required. - :type parameters: ~azure.mgmt.storage.models.StorageTaskAssignmentUpdateParameters - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of AsyncLROPoller that returns either StorageTaskAssignment or the result - of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def begin_update( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> AsyncLROPoller[_models.StorageTaskAssignment]: - """Update storage task assignment properties. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :param parameters: The parameters to update a Storage Task Assignment. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of AsyncLROPoller that returns either StorageTaskAssignment or the result - of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def begin_update( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - parameters: Union[_models.StorageTaskAssignmentUpdateParameters, IO[bytes]], - **kwargs: Any - ) -> AsyncLROPoller[_models.StorageTaskAssignment]: - """Update storage task assignment properties. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :param parameters: The parameters to update a Storage Task Assignment. Is either a - StorageTaskAssignmentUpdateParameters type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.StorageTaskAssignmentUpdateParameters or IO[bytes] - :return: An instance of AsyncLROPoller that returns either StorageTaskAssignment or the result - of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.StorageTaskAssignment] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._update_initial( - resource_group_name=resource_group_name, - account_name=account_name, - storage_task_assignment_name=storage_task_assignment_name, - parameters=parameters, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - await raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("StorageTaskAssignment", pipeline_response.http_response) - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, - AsyncARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs), - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller[_models.StorageTaskAssignment].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller[_models.StorageTaskAssignment]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - @distributed_trace_async - async def get( - self, resource_group_name: str, account_name: str, storage_task_assignment_name: str, **kwargs: Any - ) -> _models.StorageTaskAssignment: - """Get the storage task assignment properties. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :return: StorageTaskAssignment or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageTaskAssignment - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageTaskAssignment] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - storage_task_assignment_name=storage_task_assignment_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageTaskAssignment", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - async def _delete_initial( - self, resource_group_name: str, account_name: str, storage_task_assignment_name: str, **kwargs: Any - ) -> AsyncIterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - storage_task_assignment_name=storage_task_assignment_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202, 204]: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - if response.status_code == 202: - response_headers["Azure-AsyncOperation"] = self._deserialize( - "str", response.headers.get("Azure-AsyncOperation") - ) - response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def begin_delete( - self, resource_group_name: str, account_name: str, storage_task_assignment_name: str, **kwargs: Any - ) -> AsyncLROPoller[None]: - """Delete the storage task assignment sub-resource. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._delete_initial( - resource_group_name=resource_group_name, - account_name=account_name, - storage_task_assignment_name=storage_task_assignment_name, - api_version=api_version, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - await raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, - AsyncARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs), - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller[None].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - @distributed_trace - def list( - self, resource_group_name: str, account_name: str, top: Optional[int] = None, **kwargs: Any - ) -> AsyncItemPaged["_models.StorageTaskAssignment"]: - """List all the storage task assignments in an account. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param top: Optional, specifies the maximum number of storage task assignment Ids to be - included in the list response. Default value is None. - :type top: int - :return: An iterator like instance of either StorageTaskAssignment or the result of - cls(response) - :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.StorageTaskAssignment] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageTaskAssignmentsList] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - top=top, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("StorageTaskAssignmentsList", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_table_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_table_operations.py deleted file mode 100644 index 6575ade47a15..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_table_operations.py +++ /dev/null @@ -1,578 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._table_operations import ( - build_create_request, - build_delete_request, - build_get_request, - build_list_request, - build_update_request, -) -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class TableOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`table` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @overload - async def create( - self, - resource_group_name: str, - account_name: str, - table_name: str, - parameters: Optional[_models.Table] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.Table: - """Creates a new table with the specified table name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param table_name: A table name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin - with a numeric character. Required. - :type table_name: str - :param parameters: The parameters to provide to create a table. Default value is None. - :type parameters: ~azure.mgmt.storage.models.Table - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: Table or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.Table - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def create( - self, - resource_group_name: str, - account_name: str, - table_name: str, - parameters: Optional[IO[bytes]] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.Table: - """Creates a new table with the specified table name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param table_name: A table name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin - with a numeric character. Required. - :type table_name: str - :param parameters: The parameters to provide to create a table. Default value is None. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: Table or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.Table - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def create( - self, - resource_group_name: str, - account_name: str, - table_name: str, - parameters: Optional[Union[_models.Table, IO[bytes]]] = None, - **kwargs: Any - ) -> _models.Table: - """Creates a new table with the specified table name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param table_name: A table name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin - with a numeric character. Required. - :type table_name: str - :param parameters: The parameters to provide to create a table. Is either a Table type or a - IO[bytes] type. Default value is None. - :type parameters: ~azure.mgmt.storage.models.Table or IO[bytes] - :return: Table or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.Table - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - content_type = content_type if parameters else None - cls: ClsType[_models.Table] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" if parameters else None - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - if parameters is not None: - _json = self._serialize.body(parameters, "Table") - else: - _json = None - - _request = build_create_request( - resource_group_name=resource_group_name, - account_name=account_name, - table_name=table_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("Table", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def update( - self, - resource_group_name: str, - account_name: str, - table_name: str, - parameters: Optional[_models.Table] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.Table: - """Creates a new table with the specified table name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param table_name: A table name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin - with a numeric character. Required. - :type table_name: str - :param parameters: The parameters to provide to create a table. Default value is None. - :type parameters: ~azure.mgmt.storage.models.Table - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: Table or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.Table - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def update( - self, - resource_group_name: str, - account_name: str, - table_name: str, - parameters: Optional[IO[bytes]] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.Table: - """Creates a new table with the specified table name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param table_name: A table name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin - with a numeric character. Required. - :type table_name: str - :param parameters: The parameters to provide to create a table. Default value is None. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: Table or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.Table - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def update( - self, - resource_group_name: str, - account_name: str, - table_name: str, - parameters: Optional[Union[_models.Table, IO[bytes]]] = None, - **kwargs: Any - ) -> _models.Table: - """Creates a new table with the specified table name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param table_name: A table name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin - with a numeric character. Required. - :type table_name: str - :param parameters: The parameters to provide to create a table. Is either a Table type or a - IO[bytes] type. Default value is None. - :type parameters: ~azure.mgmt.storage.models.Table or IO[bytes] - :return: Table or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.Table - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - content_type = content_type if parameters else None - cls: ClsType[_models.Table] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" if parameters else None - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - if parameters is not None: - _json = self._serialize.body(parameters, "Table") - else: - _json = None - - _request = build_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - table_name=table_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("Table", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get(self, resource_group_name: str, account_name: str, table_name: str, **kwargs: Any) -> _models.Table: - """Gets the table with the specified table name, under the specified account if it exists. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param table_name: A table name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin - with a numeric character. Required. - :type table_name: str - :return: Table or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.Table - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.Table] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - table_name=table_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("Table", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def delete(self, resource_group_name: str, account_name: str, table_name: str, **kwargs: Any) -> None: - """Deletes the table with the specified table name, under the specified account if it exists. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param table_name: A table name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin - with a numeric character. Required. - :type table_name: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - table_name=table_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - @distributed_trace - def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> AsyncItemPaged["_models.Table"]: - """Gets a list of all the tables under the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: An iterator like instance of either Table or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.Table] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ListTableResource] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("ListTableResource", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_table_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_table_services_operations.py deleted file mode 100644 index 24cf29afc2fe..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_table_services_operations.py +++ /dev/null @@ -1,321 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Literal, Optional, TypeVar, Union, overload - -from azure.core import AsyncPipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._table_services_operations import ( - build_get_service_properties_request, - build_list_request, - build_set_service_properties_request, -) -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class TableServicesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`table_services` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _models.ListTableServices: - """List all table services for the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: ListTableServices or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ListTableServices - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ListTableServices] = kwargs.pop("cls", None) - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ListTableServices", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - async def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: _models.TableServiceProperties, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.TableServiceProperties: - """Sets the properties of a storage account’s Table service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Table service, only properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. - :type parameters: ~azure.mgmt.storage.models.TableServiceProperties - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: TableServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.TableServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.TableServiceProperties: - """Sets the properties of a storage account’s Table service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Table service, only properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: TableServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.TableServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.TableServiceProperties, IO[bytes]], - **kwargs: Any - ) -> _models.TableServiceProperties: - """Sets the properties of a storage account’s Table service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Table service, only properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Is either a - TableServiceProperties type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.TableServiceProperties or IO[bytes] - :return: TableServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.TableServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - table_service_name: Literal["default"] = kwargs.pop("table_service_name", "default") - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.TableServiceProperties] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "TableServiceProperties") - - _request = build_set_service_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - table_service_name=table_service_name, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("TableServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get_service_properties( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> _models.TableServiceProperties: - """Gets the properties of a storage account’s Table service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: TableServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.TableServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - table_service_name: Literal["default"] = kwargs.pop("table_service_name", "default") - cls: ClsType[_models.TableServiceProperties] = kwargs.pop("cls", None) - - _request = build_get_service_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - table_service_name=table_service_name, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("TableServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_usages_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_usages_operations.py deleted file mode 100644 index 804c7165d2f9..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/operations/_usages_operations.py +++ /dev/null @@ -1,133 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, Callable, Optional, TypeVar -import urllib.parse - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._usages_operations import build_list_by_location_request -from .._configuration import StorageManagementClientConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list - - -class UsagesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.aio.StorageManagementClient`'s - :attr:`usages` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list_by_location(self, location: str, **kwargs: Any) -> AsyncItemPaged["_models.Usage"]: - """Gets the current usage count and the limit for the resources of the location under the - subscription. - - :param location: The location of the Azure Storage resource. Required. - :type location: str - :return: An iterator like instance of either Usage or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.models.Usage] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.UsageListResult] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_by_location_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("UsageListResult", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/__init__.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/__init__.py index 9625287641ac..5c7689748e16 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/__init__.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/__init__.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pylint: disable=wrong-import-position @@ -13,7 +13,7 @@ from ._patch import * # pylint: disable=unused-wildcard-import -from ._models_py3 import ( # type: ignore +from ._models import ( # type: ignore AccessPolicy, AccountImmutabilityPolicyProperties, AccountLimits, @@ -28,25 +28,32 @@ BlobInventoryPolicy, BlobInventoryPolicyDefinition, BlobInventoryPolicyFilter, + BlobInventoryPolicyProperties, BlobInventoryPolicyRule, BlobInventoryPolicySchema, BlobRestoreParameters, BlobRestoreRange, BlobRestoreStatus, - BlobServiceItems, BlobServiceProperties, + BlobServicePropertiesProperties, BurstingConstants, ChangeFeed, CheckNameAvailabilityResult, + CloudError, CloudErrorBody, + Connector, + ContainerProperties, CorsRule, CorsRules, CustomDomain, + DataShare, + DataShareConnection, + DataShareSource, DateAfterCreation, DateAfterModification, DeleteRetentionPolicy, DeletedAccount, - DeletedAccountListResult, + DeletedAccountProperties, DeletedShare, Dimension, DualStackEndpointPreference, @@ -55,7 +62,7 @@ EncryptionInTransit, EncryptionScope, EncryptionScopeKeyVaultProperties, - EncryptionScopeListResult, + EncryptionScopeProperties, EncryptionService, EncryptionServices, Endpoints, @@ -70,13 +77,13 @@ ExtendedLocation, FileServiceItems, FileServiceProperties, + FileServicePropertiesProperties, FileServiceUsage, FileServiceUsageProperties, - FileServiceUsages, FileShare, FileShareItem, - FileShareItems, FileShareLimits, + FileShareProperties, FileSharePropertiesFileSharePaidBursting, FileShareRecommendations, GeoPriorityReplicationStatus, @@ -85,6 +92,7 @@ Identity, ImmutabilityPolicy, ImmutabilityPolicyProperties, + ImmutabilityPolicyProperty, ImmutableStorageAccount, ImmutableStorageWithVersioning, KeyCreationTime, @@ -98,24 +106,23 @@ LegalHold, LegalHoldProperties, ListAccountSasResponse, - ListBlobInventoryPolicy, ListContainerItem, - ListContainerItems, ListQueue, - ListQueueResource, + ListQueueProperties, ListQueueServices, ListServiceSasResponse, - ListTableResource, ListTableServices, LocalUser, LocalUserKeys, + LocalUserProperties, LocalUserRegeneratePasswordResult, - LocalUsers, + ManagedIdentityAuthProperties, ManagementPolicy, ManagementPolicyAction, ManagementPolicyBaseBlob, ManagementPolicyDefinition, ManagementPolicyFilter, + ManagementPolicyProperties, ManagementPolicyRule, ManagementPolicySchema, ManagementPolicySnapShot, @@ -125,38 +132,40 @@ NetworkRuleSet, NetworkSecurityPerimeter, NetworkSecurityPerimeterConfiguration, - NetworkSecurityPerimeterConfigurationList, + NetworkSecurityPerimeterConfigurationProperties, NetworkSecurityPerimeterConfigurationPropertiesProfile, NetworkSecurityPerimeterConfigurationPropertiesResourceAssociation, NfsSetting, NspAccessRule, NspAccessRuleProperties, NspAccessRulePropertiesSubscriptionsItem, - ObjectReplicationPolicies, ObjectReplicationPolicy, ObjectReplicationPolicyFilter, + ObjectReplicationPolicyProperties, ObjectReplicationPolicyPropertiesMetrics, ObjectReplicationPolicyPropertiesPriorityReplication, + ObjectReplicationPolicyPropertiesTagsReplication, ObjectReplicationPolicyRule, Operation, OperationDisplay, - OperationListResult, + OperationProperties, PermissionScope, Placement, PrivateEndpoint, PrivateEndpointConnection, - PrivateEndpointConnectionListResult, + PrivateEndpointConnectionProperties, PrivateLinkResource, PrivateLinkResourceListResult, + PrivateLinkResourceProperties, PrivateLinkServiceConnectionState, ProtectedAppendWritesHistory, ProtocolSettings, ProvisioningIssue, ProvisioningIssueProperties, ProxyResource, - ProxyResourceAutoGenerated, + QueueProperties, QueueServiceProperties, - Resource, + QueueServicePropertiesProperties, ResourceAccessRule, ResourceAutoGenerated, RestorePolicyProperties, @@ -165,6 +174,7 @@ SKUCapability, SasPolicy, ServiceSasParameters, + ServiceSharedKeyAccessProperties, ServiceSpecification, SignedIdentifier, Sku, @@ -173,6 +183,7 @@ SmbOAuthSettings, SmbSetting, SshPublicKey, + StaticWebsite, StorageAccount, StorageAccountCheckNameAvailabilityParameters, StorageAccountCreateParameters, @@ -180,14 +191,25 @@ StorageAccountIpv6Endpoints, StorageAccountKey, StorageAccountListKeysResult, - StorageAccountListResult, StorageAccountMicrosoftEndpoints, StorageAccountMigration, + StorageAccountMigrationProperties, + StorageAccountProperties, + StorageAccountPropertiesCreateParameters, + StorageAccountPropertiesUpdateParameters, StorageAccountRegenerateKeyParameters, + StorageAccountSharedKeyAccessProperties, StorageAccountSkuConversionStatus, StorageAccountUpdateParameters, + StorageConnectorAuthProperties, + StorageConnectorConnection, + StorageConnectorProperties, + StorageConnectorSource, + StorageDataCollaborationPolicyProperties, + StorageDataShareAccessPolicy, + StorageDataShareAsset, + StorageDataShareProperties, StorageQueue, - StorageSkuListResult, StorageTaskAssignment, StorageTaskAssignmentExecutionContext, StorageTaskAssignmentProperties, @@ -196,29 +218,30 @@ StorageTaskAssignmentUpdateParameters, StorageTaskAssignmentUpdateProperties, StorageTaskAssignmentUpdateReport, - StorageTaskAssignmentsList, StorageTaskReportInstance, StorageTaskReportProperties, - StorageTaskReportSummary, SystemData, Table, TableAccessPolicy, + TableProperties, TableServiceProperties, + TableServicePropertiesProperties, TableSignedIdentifier, TagFilter, TagProperty, + TestConnectionResponse, + TestExistingConnectionRequest, TrackedResource, TriggerParameters, TriggerParametersUpdate, UpdateHistoryProperty, Usage, - UsageListResult, UsageName, UserAssignedIdentity, VirtualNetworkRule, ) -from ._storage_management_client_enums import ( # type: ignore +from ._enums import ( # type: ignore AccessTier, AccountImmutabilityPolicyState, AccountStatus, @@ -252,7 +275,7 @@ KeyType, Kind, LargeFileSharesState, - LeaseContainerRequestEnum, + LeaseContainerRequestAction, LeaseDuration, LeaseShareAction, LeaseState, @@ -266,6 +289,7 @@ MigrationStatus, MinimumTlsVersion, Name, + NativeDataSharingProvisioningState, NetworkSecurityPerimeterConfigurationProvisioningState, NspAccessRuleDirection, ObjectType, @@ -296,6 +320,13 @@ SkuTier, State, StorageAccountExpand, + StorageConnectorAuthType, + StorageConnectorConnectionType, + StorageConnectorDataSourceType, + StorageConnectorSourceType, + StorageConnectorState, + StorageDataShareAccessPolicyPermission, + StorageTaskAssignmentProvisioningState, TriggerType, UsageUnit, ZonePlacementPolicy, @@ -319,25 +350,32 @@ "BlobInventoryPolicy", "BlobInventoryPolicyDefinition", "BlobInventoryPolicyFilter", + "BlobInventoryPolicyProperties", "BlobInventoryPolicyRule", "BlobInventoryPolicySchema", "BlobRestoreParameters", "BlobRestoreRange", "BlobRestoreStatus", - "BlobServiceItems", "BlobServiceProperties", + "BlobServicePropertiesProperties", "BurstingConstants", "ChangeFeed", "CheckNameAvailabilityResult", + "CloudError", "CloudErrorBody", + "Connector", + "ContainerProperties", "CorsRule", "CorsRules", "CustomDomain", + "DataShare", + "DataShareConnection", + "DataShareSource", "DateAfterCreation", "DateAfterModification", "DeleteRetentionPolicy", "DeletedAccount", - "DeletedAccountListResult", + "DeletedAccountProperties", "DeletedShare", "Dimension", "DualStackEndpointPreference", @@ -346,7 +384,7 @@ "EncryptionInTransit", "EncryptionScope", "EncryptionScopeKeyVaultProperties", - "EncryptionScopeListResult", + "EncryptionScopeProperties", "EncryptionService", "EncryptionServices", "Endpoints", @@ -361,13 +399,13 @@ "ExtendedLocation", "FileServiceItems", "FileServiceProperties", + "FileServicePropertiesProperties", "FileServiceUsage", "FileServiceUsageProperties", - "FileServiceUsages", "FileShare", "FileShareItem", - "FileShareItems", "FileShareLimits", + "FileShareProperties", "FileSharePropertiesFileSharePaidBursting", "FileShareRecommendations", "GeoPriorityReplicationStatus", @@ -376,6 +414,7 @@ "Identity", "ImmutabilityPolicy", "ImmutabilityPolicyProperties", + "ImmutabilityPolicyProperty", "ImmutableStorageAccount", "ImmutableStorageWithVersioning", "KeyCreationTime", @@ -389,24 +428,23 @@ "LegalHold", "LegalHoldProperties", "ListAccountSasResponse", - "ListBlobInventoryPolicy", "ListContainerItem", - "ListContainerItems", "ListQueue", - "ListQueueResource", + "ListQueueProperties", "ListQueueServices", "ListServiceSasResponse", - "ListTableResource", "ListTableServices", "LocalUser", "LocalUserKeys", + "LocalUserProperties", "LocalUserRegeneratePasswordResult", - "LocalUsers", + "ManagedIdentityAuthProperties", "ManagementPolicy", "ManagementPolicyAction", "ManagementPolicyBaseBlob", "ManagementPolicyDefinition", "ManagementPolicyFilter", + "ManagementPolicyProperties", "ManagementPolicyRule", "ManagementPolicySchema", "ManagementPolicySnapShot", @@ -416,38 +454,40 @@ "NetworkRuleSet", "NetworkSecurityPerimeter", "NetworkSecurityPerimeterConfiguration", - "NetworkSecurityPerimeterConfigurationList", + "NetworkSecurityPerimeterConfigurationProperties", "NetworkSecurityPerimeterConfigurationPropertiesProfile", "NetworkSecurityPerimeterConfigurationPropertiesResourceAssociation", "NfsSetting", "NspAccessRule", "NspAccessRuleProperties", "NspAccessRulePropertiesSubscriptionsItem", - "ObjectReplicationPolicies", "ObjectReplicationPolicy", "ObjectReplicationPolicyFilter", + "ObjectReplicationPolicyProperties", "ObjectReplicationPolicyPropertiesMetrics", "ObjectReplicationPolicyPropertiesPriorityReplication", + "ObjectReplicationPolicyPropertiesTagsReplication", "ObjectReplicationPolicyRule", "Operation", "OperationDisplay", - "OperationListResult", + "OperationProperties", "PermissionScope", "Placement", "PrivateEndpoint", "PrivateEndpointConnection", - "PrivateEndpointConnectionListResult", + "PrivateEndpointConnectionProperties", "PrivateLinkResource", "PrivateLinkResourceListResult", + "PrivateLinkResourceProperties", "PrivateLinkServiceConnectionState", "ProtectedAppendWritesHistory", "ProtocolSettings", "ProvisioningIssue", "ProvisioningIssueProperties", "ProxyResource", - "ProxyResourceAutoGenerated", + "QueueProperties", "QueueServiceProperties", - "Resource", + "QueueServicePropertiesProperties", "ResourceAccessRule", "ResourceAutoGenerated", "RestorePolicyProperties", @@ -456,6 +496,7 @@ "SKUCapability", "SasPolicy", "ServiceSasParameters", + "ServiceSharedKeyAccessProperties", "ServiceSpecification", "SignedIdentifier", "Sku", @@ -464,6 +505,7 @@ "SmbOAuthSettings", "SmbSetting", "SshPublicKey", + "StaticWebsite", "StorageAccount", "StorageAccountCheckNameAvailabilityParameters", "StorageAccountCreateParameters", @@ -471,14 +513,25 @@ "StorageAccountIpv6Endpoints", "StorageAccountKey", "StorageAccountListKeysResult", - "StorageAccountListResult", "StorageAccountMicrosoftEndpoints", "StorageAccountMigration", + "StorageAccountMigrationProperties", + "StorageAccountProperties", + "StorageAccountPropertiesCreateParameters", + "StorageAccountPropertiesUpdateParameters", "StorageAccountRegenerateKeyParameters", + "StorageAccountSharedKeyAccessProperties", "StorageAccountSkuConversionStatus", "StorageAccountUpdateParameters", + "StorageConnectorAuthProperties", + "StorageConnectorConnection", + "StorageConnectorProperties", + "StorageConnectorSource", + "StorageDataCollaborationPolicyProperties", + "StorageDataShareAccessPolicy", + "StorageDataShareAsset", + "StorageDataShareProperties", "StorageQueue", - "StorageSkuListResult", "StorageTaskAssignment", "StorageTaskAssignmentExecutionContext", "StorageTaskAssignmentProperties", @@ -487,23 +540,24 @@ "StorageTaskAssignmentUpdateParameters", "StorageTaskAssignmentUpdateProperties", "StorageTaskAssignmentUpdateReport", - "StorageTaskAssignmentsList", "StorageTaskReportInstance", "StorageTaskReportProperties", - "StorageTaskReportSummary", "SystemData", "Table", "TableAccessPolicy", + "TableProperties", "TableServiceProperties", + "TableServicePropertiesProperties", "TableSignedIdentifier", "TagFilter", "TagProperty", + "TestConnectionResponse", + "TestExistingConnectionRequest", "TrackedResource", "TriggerParameters", "TriggerParametersUpdate", "UpdateHistoryProperty", "Usage", - "UsageListResult", "UsageName", "UserAssignedIdentity", "VirtualNetworkRule", @@ -540,7 +594,7 @@ "KeyType", "Kind", "LargeFileSharesState", - "LeaseContainerRequestEnum", + "LeaseContainerRequestAction", "LeaseDuration", "LeaseShareAction", "LeaseState", @@ -554,6 +608,7 @@ "MigrationStatus", "MinimumTlsVersion", "Name", + "NativeDataSharingProvisioningState", "NetworkSecurityPerimeterConfigurationProvisioningState", "NspAccessRuleDirection", "ObjectType", @@ -584,6 +639,13 @@ "SkuTier", "State", "StorageAccountExpand", + "StorageConnectorAuthType", + "StorageConnectorConnectionType", + "StorageConnectorDataSourceType", + "StorageConnectorSourceType", + "StorageConnectorState", + "StorageDataShareAccessPolicyPermission", + "StorageTaskAssignmentProvisioningState", "TriggerType", "UsageUnit", "ZonePlacementPolicy", diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_storage_management_client_enums.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_enums.py similarity index 71% rename from sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_storage_management_client_enums.py rename to sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_enums.py index f512dda85739..00f33a0a6db9 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_storage_management_client_enums.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_enums.py @@ -1,8 +1,9 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -11,15 +12,22 @@ class AccessTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Required for storage accounts where kind = BlobStorage. The access tier is used for billing. - The 'Premium' access tier is the default value for premium block blobs storage account type and - it cannot be changed for the premium block blobs storage account type. + """The default access tier for block blobs in the storage account. Required for storage accounts + where kind = BlobStorage. See more details in: + `https://learn.microsoft.com/azure/storage/blobs/access-tiers-overview + `_. """ HOT = "Hot" + """HOT.""" COOL = "Cool" + """COOL.""" PREMIUM = "Premium" + """PREMIUM.""" COLD = "Cold" + """COLD.""" + SMART = "Smart" + """SMART.""" class AccountImmutabilityPolicyState(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -32,8 +40,11 @@ class AccountImmutabilityPolicyState(str, Enum, metaclass=CaseInsensitiveEnumMet """ UNLOCKED = "Unlocked" + """UNLOCKED.""" LOCKED = "Locked" + """LOCKED.""" DISABLED = "Disabled" + """DISABLED.""" class AccountStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -42,7 +53,9 @@ class AccountStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ AVAILABLE = "available" + """AVAILABLE.""" UNAVAILABLE = "unavailable" + """UNAVAILABLE.""" class AccountType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -53,7 +66,9 @@ class AccountType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ USER = "User" + """USER.""" COMPUTER = "Computer" + """COMPUTER.""" class AllowedCopyScope(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -62,28 +77,43 @@ class AllowedCopyScope(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ PRIVATE_LINK = "PrivateLink" + """PRIVATE_LINK.""" AAD = "AAD" + """AAD.""" + ALL = "All" + """ALL.""" class AllowedMethods(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """AllowedMethods.""" + """Type of AllowedMethods.""" DELETE = "DELETE" + """DELETE.""" GET = "GET" + """GET.""" HEAD = "HEAD" + """HEAD.""" MERGE = "MERGE" + """MERGE.""" POST = "POST" + """POST.""" OPTIONS = "OPTIONS" + """OPTIONS.""" PUT = "PUT" + """PUT.""" PATCH = "PATCH" + """PATCH.""" CONNECT = "CONNECT" + """CONNECT.""" TRACE = "TRACE" + """TRACE.""" class BlobInventoryPolicyName(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """BlobInventoryPolicyName.""" + """Type of BlobInventoryPolicyName.""" DEFAULT = "default" + """DEFAULT.""" class BlobRestoreProgressStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -93,8 +123,11 @@ class BlobRestoreProgressStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ IN_PROGRESS = "InProgress" + """IN_PROGRESS.""" COMPLETE = "Complete" + """COMPLETE.""" FAILED = "Failed" + """FAILED.""" class Bypass(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -104,43 +137,61 @@ class Bypass(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ NONE = "None" + """NONE.""" LOGGING = "Logging" + """LOGGING.""" METRICS = "Metrics" + """METRICS.""" AZURE_SERVICES = "AzureServices" + """AZURE_SERVICES.""" class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of identity that created the resource.""" + """The kind of entity that created the resource.""" USER = "User" + """The entity was created by a user.""" APPLICATION = "Application" + """The entity was created by an application.""" MANAGED_IDENTITY = "ManagedIdentity" + """The entity was created by a managed identity.""" KEY = "Key" + """The entity was created by a key.""" class DefaultAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Specifies the default action of allow or deny when no other rules match.""" ALLOW = "Allow" + """ALLOW.""" DENY = "Deny" + """DENY.""" class DefaultSharePermission(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Default share permission for users using Kerberos authentication if RBAC role is not assigned.""" NONE = "None" + """NONE.""" STORAGE_FILE_DATA_SMB_SHARE_READER = "StorageFileDataSmbShareReader" + """STORAGE_FILE_DATA_SMB_SHARE_READER.""" STORAGE_FILE_DATA_SMB_SHARE_CONTRIBUTOR = "StorageFileDataSmbShareContributor" + """STORAGE_FILE_DATA_SMB_SHARE_CONTRIBUTOR.""" STORAGE_FILE_DATA_SMB_SHARE_ELEVATED_CONTRIBUTOR = "StorageFileDataSmbShareElevatedContributor" + """STORAGE_FILE_DATA_SMB_SHARE_ELEVATED_CONTRIBUTOR.""" class DirectoryServiceOptions(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Indicates the directory service used. Note that this enum may be extended in the future.""" NONE = "None" + """NONE.""" AADDS = "AADDS" + """AADDS.""" AD = "AD" + """AD.""" AADKERB = "AADKERB" + """AADKERB.""" class DnsEndpointType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -150,7 +201,9 @@ class DnsEndpointType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ STANDARD = "Standard" + """STANDARD.""" AZURE_DNS_ZONE = "AzureDnsZone" + """AZURE_DNS_ZONE.""" class EnabledProtocols(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -159,7 +212,9 @@ class EnabledProtocols(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ SMB = "SMB" + """SMB.""" NFS = "NFS" + """NFS.""" class EncryptionScopeSource(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -168,14 +223,18 @@ class EncryptionScopeSource(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ MICROSOFT_STORAGE = "Microsoft.Storage" + """MICROSOFT_STORAGE.""" MICROSOFT_KEY_VAULT = "Microsoft.KeyVault" + """MICROSOFT_KEY_VAULT.""" class EncryptionScopeState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The state of the encryption scope. Possible values (case-insensitive): Enabled, Disabled.""" ENABLED = "Enabled" + """ENABLED.""" DISABLED = "Disabled" + """DISABLED.""" class ExpirationAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -186,20 +245,25 @@ class ExpirationAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ LOG = "Log" + """LOG.""" BLOCK = "Block" + """BLOCK.""" class ExtendedLocationTypes(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The type of extendedLocation.""" EDGE_ZONE = "EdgeZone" + """EDGE_ZONE.""" class Format(str, Enum, metaclass=CaseInsensitiveEnumMeta): """This is a required field, it specifies the format for the inventory files.""" CSV = "Csv" + """CSV.""" PARQUET = "Parquet" + """PARQUET.""" class GeoReplicationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -211,31 +275,42 @@ class GeoReplicationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ LIVE = "Live" + """LIVE.""" BOOTSTRAP = "Bootstrap" + """BOOTSTRAP.""" UNAVAILABLE = "Unavailable" + """UNAVAILABLE.""" class HttpProtocol(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The protocol permitted for a request made with the account SAS.""" HTTPS_HTTP = "https,http" + """HTTPS_HTTP.""" HTTPS = "https" + """HTTPS.""" class IdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The identity type.""" NONE = "None" + """NONE.""" SYSTEM_ASSIGNED = "SystemAssigned" + """SYSTEM_ASSIGNED.""" USER_ASSIGNED = "UserAssigned" + """USER_ASSIGNED.""" SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned" + """SYSTEM_ASSIGNED_USER_ASSIGNED.""" class ImmutabilityPolicyState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The ImmutabilityPolicy state of a blob container, possible values include: Locked and Unlocked.""" LOCKED = "Locked" + """LOCKED.""" UNLOCKED = "Unlocked" + """UNLOCKED.""" class ImmutabilityPolicyUpdateType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -244,8 +319,11 @@ class ImmutabilityPolicyUpdateType(str, Enum, metaclass=CaseInsensitiveEnumMeta) """ PUT = "put" + """PUT.""" LOCK = "lock" + """LOCK.""" EXTEND = "extend" + """EXTEND.""" class IntervalUnit(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -255,26 +333,32 @@ class IntervalUnit(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ DAYS = "Days" + """DAYS.""" class InventoryRuleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The valid value is Inventory.""" INVENTORY = "Inventory" + """INVENTORY.""" class IssueType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of issue.""" UNKNOWN = "Unknown" + """UNKNOWN.""" CONFIGURATION_PROPAGATION_FAILURE = "ConfigurationPropagationFailure" + """CONFIGURATION_PROPAGATION_FAILURE.""" class KeyPermission(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Permissions for the key -- read-only or full permissions.""" READ = "Read" + """READ.""" FULL = "Full" + """FULL.""" class KeySource(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -283,7 +367,9 @@ class KeySource(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ MICROSOFT_STORAGE = "Microsoft.Storage" + """MICROSOFT_STORAGE.""" MICROSOFT_KEYVAULT = "Microsoft.Keyvault" + """MICROSOFT_KEYVAULT.""" class KeyType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -293,34 +379,48 @@ class KeyType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ SERVICE = "Service" + """SERVICE.""" ACCOUNT = "Account" + """ACCOUNT.""" class Kind(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Indicates the type of storage account.""" STORAGE = "Storage" + """STORAGE.""" STORAGE_V2 = "StorageV2" + """STORAGE_V2.""" BLOB_STORAGE = "BlobStorage" + """BLOB_STORAGE.""" FILE_STORAGE = "FileStorage" + """FILE_STORAGE.""" BLOCK_BLOB_STORAGE = "BlockBlobStorage" + """BLOCK_BLOB_STORAGE.""" class LargeFileSharesState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Allow large file shares if sets to Enabled. It cannot be disabled once it is enabled.""" DISABLED = "Disabled" + """DISABLED.""" ENABLED = "Enabled" + """ENABLED.""" -class LeaseContainerRequestEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): +class LeaseContainerRequestAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Specifies the lease action. Can be one of the available actions.""" ACQUIRE = "Acquire" + """ACQUIRE.""" RENEW = "Renew" + """RENEW.""" CHANGE = "Change" + """CHANGE.""" RELEASE = "Release" + """RELEASE.""" BREAK = "Break" + """BREAK.""" class LeaseDuration(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -329,66 +429,87 @@ class LeaseDuration(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ INFINITE = "Infinite" + """INFINITE.""" FIXED = "Fixed" + """FIXED.""" class LeaseShareAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Specifies the lease action. Can be one of the available actions.""" ACQUIRE = "Acquire" + """ACQUIRE.""" RENEW = "Renew" + """RENEW.""" CHANGE = "Change" + """CHANGE.""" RELEASE = "Release" + """RELEASE.""" BREAK = "Break" + """BREAK.""" class LeaseState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Lease state of the container.""" AVAILABLE = "Available" + """AVAILABLE.""" LEASED = "Leased" + """LEASED.""" EXPIRED = "Expired" + """EXPIRED.""" BREAKING = "Breaking" + """BREAKING.""" BROKEN = "Broken" + """BROKEN.""" class LeaseStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The lease status of the container.""" LOCKED = "Locked" + """LOCKED.""" UNLOCKED = "Unlocked" + """UNLOCKED.""" class ListContainersInclude(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """ListContainersInclude.""" + """Type of ListContainersInclude.""" DELETED = "deleted" + """DELETED.""" class ListEncryptionScopesInclude(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """ListEncryptionScopesInclude.""" + """Type of ListEncryptionScopesInclude.""" ALL = "All" + """ALL.""" ENABLED = "Enabled" + """ENABLED.""" DISABLED = "Disabled" + """DISABLED.""" class ListLocalUserIncludeParam(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """ListLocalUserIncludeParam.""" + """Type of ListLocalUserIncludeParam.""" NFSV3 = "nfsv3" + """NFSV3.""" class ManagementPolicyName(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """ManagementPolicyName.""" + """Type of ManagementPolicyName.""" DEFAULT = "default" + """DEFAULT.""" class MigrationName(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """MigrationName.""" + """Type of MigrationName.""" DEFAULT = "default" + """DEFAULT.""" class MigrationState(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -397,34 +518,63 @@ class MigrationState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ IN_PROGRESS = "InProgress" + """IN_PROGRESS.""" COMPLETED = "Completed" + """COMPLETED.""" class MigrationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Current status of migration.""" INVALID = "Invalid" + """INVALID.""" SUBMITTED_FOR_CONVERSION = "SubmittedForConversion" + """SUBMITTED_FOR_CONVERSION.""" IN_PROGRESS = "InProgress" + """IN_PROGRESS.""" COMPLETE = "Complete" + """COMPLETE.""" FAILED = "Failed" + """FAILED.""" class MinimumTlsVersion(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Set the minimum TLS version to be permitted on requests to storage. The default interpretation - is TLS 1.0 for this property. + is TLS 1.0 for this property. Minimum TLS version 1.3 version is not supported. """ TLS1_0 = "TLS1_0" + """TLS1_0.""" TLS1_1 = "TLS1_1" + """TLS1_1.""" TLS1_2 = "TLS1_2" + """TLS1_2.""" TLS1_3 = "TLS1_3" + """TLS1_3.""" class Name(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Name of the policy. The valid value is AccessTimeTracking. This field is currently read only.""" ACCESS_TIME_TRACKING = "AccessTimeTracking" + """ACCESS_TIME_TRACKING.""" + + +class NativeDataSharingProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Provisioning state of the resource at the time the operation was called.""" + + ACCEPTED = "Accepted" + """The request has been accepted for processing.""" + CREATING = "Creating" + """The resource is being created.""" + SUCCEEDED = "Succeeded" + """The resource has been successfully created.""" + DELETING = "Deleting" + """The resource is being deleted.""" + CANCELED = "Canceled" + """The request has been canceled.""" + FAILED = "Failed" + """The resource creation or deletion has failed.""" class NetworkSecurityPerimeterConfigurationProvisioningState( # pylint: disable=name-too-long @@ -433,17 +583,24 @@ class NetworkSecurityPerimeterConfigurationProvisioningState( # pylint: disable """Provisioning state of Network Security Perimeter configuration propagation.""" ACCEPTED = "Accepted" + """ACCEPTED.""" SUCCEEDED = "Succeeded" + """SUCCEEDED.""" FAILED = "Failed" + """FAILED.""" DELETING = "Deleting" + """DELETING.""" CANCELED = "Canceled" + """CANCELED.""" class NspAccessRuleDirection(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Direction of Access Rule.""" INBOUND = "Inbound" + """INBOUND.""" OUTBOUND = "Outbound" + """OUTBOUND.""" class ObjectType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -452,7 +609,9 @@ class ObjectType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ BLOB = "Blob" + """BLOB.""" CONTAINER = "Container" + """CONTAINER.""" class Permissions(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -461,29 +620,43 @@ class Permissions(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ R = "r" + """R.""" D = "d" + """D.""" W = "w" + """W.""" L = "l" + """L.""" A = "a" + """A.""" C = "c" + """C.""" U = "u" + """U.""" P = "p" + """P.""" class PostFailoverRedundancy(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The redundancy type of the account after an account failover is performed.""" STANDARD_LRS = "Standard_LRS" + """STANDARD_LRS.""" STANDARD_ZRS = "Standard_ZRS" + """STANDARD_ZRS.""" class PostPlannedFailoverRedundancy(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The redundancy type of the account after a planned account failover is performed.""" STANDARD_GRS = "Standard_GRS" + """STANDARD_GRS.""" STANDARD_GZRS = "Standard_GZRS" + """STANDARD_GZRS.""" STANDARD_RAGRS = "Standard_RAGRS" + """STANDARD_RAGRS.""" STANDARD_RAGZRS = "Standard_RAGZRS" + """STANDARD_RAGZRS.""" class PrivateEndpointConnectionProvisioningState( # pylint: disable=name-too-long @@ -492,39 +665,46 @@ class PrivateEndpointConnectionProvisioningState( # pylint: disable=name-too-lo """The current provisioning state.""" SUCCEEDED = "Succeeded" + """SUCCEEDED.""" CREATING = "Creating" + """CREATING.""" DELETING = "Deleting" + """DELETING.""" FAILED = "Failed" + """FAILED.""" class PrivateEndpointServiceConnectionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The private endpoint connection status.""" PENDING = "Pending" + """PENDING.""" APPROVED = "Approved" + """APPROVED.""" REJECTED = "Rejected" + """REJECTED.""" class ProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Gets the status of the storage account at the time the operation was called.""" CREATING = "Creating" + """CREATING.""" RESOLVING_DNS = "ResolvingDNS" + """RESOLVING_DNS.""" SUCCEEDED = "Succeeded" - VALIDATE_SUBSCRIPTION_QUOTA_BEGIN = "ValidateSubscriptionQuotaBegin" - VALIDATE_SUBSCRIPTION_QUOTA_END = "ValidateSubscriptionQuotaEnd" - ACCEPTED = "Accepted" - DELETING = "Deleting" - CANCELED = "Canceled" - FAILED = "Failed" + """SUCCEEDED.""" class PublicAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Specifies whether data in the container may be accessed publicly and the level of access.""" CONTAINER = "Container" + """CONTAINER.""" BLOB = "Blob" + """BLOB.""" NONE = "None" + """NONE.""" class PublicNetworkAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -534,8 +714,11 @@ class PublicNetworkAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ ENABLED = "Enabled" + """ENABLED.""" DISABLED = "Disabled" + """DISABLED.""" SECURED_BY_PERIMETER = "SecuredByPerimeter" + """SECURED_BY_PERIMETER.""" class Reason(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -544,7 +727,9 @@ class Reason(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ ACCOUNT_NAME_INVALID = "AccountNameInvalid" + """ACCOUNT_NAME_INVALID.""" ALREADY_EXISTS = "AlreadyExists" + """ALREADY_EXISTS.""" class ReasonCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -555,57 +740,74 @@ class ReasonCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ QUOTA_ID = "QuotaId" + """QUOTA_ID.""" NOT_AVAILABLE_FOR_SUBSCRIPTION = "NotAvailableForSubscription" + """NOT_AVAILABLE_FOR_SUBSCRIPTION.""" class ResourceAssociationAccessMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Access Mode of the resource association.""" ENFORCED = "Enforced" + """ENFORCED.""" LEARNING = "Learning" + """LEARNING.""" AUDIT = "Audit" + """AUDIT.""" class RootSquashType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The property is for NFS share only. The default is NoRootSquash.""" NO_ROOT_SQUASH = "NoRootSquash" + """NO_ROOT_SQUASH.""" ROOT_SQUASH = "RootSquash" + """ROOT_SQUASH.""" ALL_SQUASH = "AllSquash" + """ALL_SQUASH.""" class RoutingChoice(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Routing Choice defines the kind of network routing opted by the user.""" MICROSOFT_ROUTING = "MicrosoftRouting" + """MICROSOFT_ROUTING.""" INTERNET_ROUTING = "InternetRouting" + """INTERNET_ROUTING.""" class RuleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The valid value is Lifecycle.""" LIFECYCLE = "Lifecycle" + """LIFECYCLE.""" class RunResult(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Represents the overall result of the execution for the run instance.""" SUCCEEDED = "Succeeded" + """SUCCEEDED.""" FAILED = "Failed" + """FAILED.""" class RunStatusEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Represents the status of the execution.""" IN_PROGRESS = "InProgress" + """IN_PROGRESS.""" FINISHED = "Finished" + """FINISHED.""" class Schedule(str, Enum, metaclass=CaseInsensitiveEnumMeta): """This is a required field. This field is used to schedule an inventory formation.""" DAILY = "Daily" + """DAILY.""" WEEKLY = "Weekly" + """WEEKLY.""" class Services(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -614,16 +816,22 @@ class Services(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ B = "b" + """B.""" Q = "q" + """Q.""" T = "t" + """T.""" F = "f" + """F.""" class Severity(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Severity of the issue.""" WARNING = "Warning" + """WARNING.""" ERROR = "Error" + """ERROR.""" class ShareAccessTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -632,9 +840,13 @@ class ShareAccessTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ TRANSACTION_OPTIMIZED = "TransactionOptimized" + """TRANSACTION_OPTIMIZED.""" HOT = "Hot" + """HOT.""" COOL = "Cool" + """COOL.""" PREMIUM = "Premium" + """PREMIUM.""" class SignedResource(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -643,9 +855,13 @@ class SignedResource(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ B = "b" + """B.""" C = "c" + """C.""" F = "f" + """F.""" S = "s" + """S.""" class SignedResourceTypes(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -655,16 +871,22 @@ class SignedResourceTypes(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ S = "s" + """S.""" C = "c" + """C.""" O = "o" + """O.""" class SkuConversionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """This property indicates the current sku conversion status.""" IN_PROGRESS = "InProgress" + """IN_PROGRESS.""" SUCCEEDED = "Succeeded" + """SUCCEEDED.""" FAILED = "Failed" + """FAILED.""" class SkuName(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -673,65 +895,167 @@ class SkuName(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ STANDARD_LRS = "Standard_LRS" + """STANDARD_LRS.""" STANDARD_GRS = "Standard_GRS" + """STANDARD_GRS.""" STANDARD_RAGRS = "Standard_RAGRS" + """STANDARD_RAGRS.""" STANDARD_ZRS = "Standard_ZRS" + """STANDARD_ZRS.""" PREMIUM_LRS = "Premium_LRS" + """PREMIUM_LRS.""" PREMIUM_ZRS = "Premium_ZRS" + """PREMIUM_ZRS.""" STANDARD_GZRS = "Standard_GZRS" + """STANDARD_GZRS.""" STANDARD_RAGZRS = "Standard_RAGZRS" + """STANDARD_RAGZRS.""" STANDARD_V2_LRS = "StandardV2_LRS" + """STANDARD_V2_LRS.""" STANDARD_V2_GRS = "StandardV2_GRS" + """STANDARD_V2_GRS.""" STANDARD_V2_ZRS = "StandardV2_ZRS" + """STANDARD_V2_ZRS.""" STANDARD_V2_GZRS = "StandardV2_GZRS" + """STANDARD_V2_GZRS.""" PREMIUM_V2_LRS = "PremiumV2_LRS" + """PREMIUM_V2_LRS.""" PREMIUM_V2_ZRS = "PremiumV2_ZRS" + """PREMIUM_V2_ZRS.""" class SkuTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The SKU tier. This is based on the SKU name.""" STANDARD = "Standard" + """STANDARD.""" PREMIUM = "Premium" + """PREMIUM.""" class State(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Gets the state of virtual network rule.""" PROVISIONING = "Provisioning" + """PROVISIONING.""" DEPROVISIONING = "Deprovisioning" + """DEPROVISIONING.""" SUCCEEDED = "Succeeded" + """SUCCEEDED.""" FAILED = "Failed" + """FAILED.""" NETWORK_SOURCE_DELETED = "NetworkSourceDeleted" + """NETWORK_SOURCE_DELETED.""" class StorageAccountExpand(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """StorageAccountExpand.""" + """Type of StorageAccountExpand.""" GEO_REPLICATION_STATS = "geoReplicationStats" + """GEO_REPLICATION_STATS.""" BLOB_RESTORE_STATUS = "blobRestoreStatus" + """BLOB_RESTORE_STATUS.""" + + +class StorageConnectorAuthType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The auth type supported for bucket connection in storage connector.""" + + MANAGED_IDENTITY = "ManagedIdentity" + """Managed Identity auth type.""" + + +class StorageConnectorConnectionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The connection type for bucket connection in storage connector.""" + + DATA_SHARE = "DataShare" + """DataShare connection type.""" + + +class StorageConnectorDataSourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of the backing data source for storage connector.""" + + AZURE_DATA_SHARE = "Azure_DataShare" + """Azure DataShare data source type.""" + + +class StorageConnectorSourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of the backing data source for storage connector.""" + + DATA_SHARE = "DataShare" + """Source type - DataShare.""" + + +class StorageConnectorState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The state of the storage connector.""" + + ACTIVE = "Active" + """Whether the connector is active.""" + INACTIVE = "Inactive" + """Whether the connector is inactive.""" + + +class StorageDataShareAccessPolicyPermission(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The permissions supported in access policies for storage data share.""" + + NONE = "None" + """No permission.""" + READ = "Read" + """Read permission.""" + + +class StorageTaskAssignmentProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Gets the status of the storage account at the time the operation was called.""" + + VALIDATE_SUBSCRIPTION_QUOTA_BEGIN = "ValidateSubscriptionQuotaBegin" + """VALIDATE_SUBSCRIPTION_QUOTA_BEGIN.""" + VALIDATE_SUBSCRIPTION_QUOTA_END = "ValidateSubscriptionQuotaEnd" + """VALIDATE_SUBSCRIPTION_QUOTA_END.""" + ACCEPTED = "Accepted" + """ACCEPTED.""" + CREATING = "Creating" + """CREATING.""" + SUCCEEDED = "Succeeded" + """SUCCEEDED.""" + DELETING = "Deleting" + """DELETING.""" + CANCELED = "Canceled" + """CANCELED.""" + FAILED = "Failed" + """FAILED.""" class TriggerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The trigger type of the storage task assignment execution.""" RUN_ONCE = "RunOnce" + """RUN_ONCE.""" ON_SCHEDULE = "OnSchedule" + """ON_SCHEDULE.""" + MOCK_RUN = "MockRun" + """Run the task as a mock for testing.""" class UsageUnit(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Gets the unit of measurement.""" COUNT = "Count" + """COUNT.""" BYTES = "Bytes" + """BYTES.""" SECONDS = "Seconds" + """SECONDS.""" PERCENT = "Percent" + """PERCENT.""" COUNTS_PER_SECOND = "CountsPerSecond" + """COUNTS_PER_SECOND.""" BYTES_PER_SECOND = "BytesPerSecond" + """BYTES_PER_SECOND.""" class ZonePlacementPolicy(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The availability zone pinning policy for the storage account.""" ANY = "Any" + """ANY.""" NONE = "None" + """NONE.""" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_models.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_models.py new file mode 100644 index 000000000000..a0fb1b8d1222 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_models.py @@ -0,0 +1,11828 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=useless-super-delegation + +import datetime +from typing import Any, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload + +from .._utils.model_base import Model as _Model, rest_discriminator, rest_field +from ._enums import StorageConnectorAuthType, StorageConnectorConnectionType, StorageConnectorSourceType + +if TYPE_CHECKING: + from .. import models as _models + + +class AccessPolicy(_Model): + """AccessPolicy. + + :ivar start_time: Start time of the access policy. + :vartype start_time: ~datetime.datetime + :ivar expiry_time: Expiry time of the access policy. + :vartype expiry_time: ~datetime.datetime + :ivar permission: List of abbreviated permissions. + :vartype permission: str + """ + + start_time: Optional[datetime.datetime] = rest_field( + name="startTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """Start time of the access policy.""" + expiry_time: Optional[datetime.datetime] = rest_field( + name="expiryTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """Expiry time of the access policy.""" + permission: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of abbreviated permissions.""" + + @overload + def __init__( + self, + *, + start_time: Optional[datetime.datetime] = None, + expiry_time: Optional[datetime.datetime] = None, + permission: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AccountImmutabilityPolicyProperties(_Model): + """This defines account-level immutability policy properties. + + :ivar immutability_period_since_creation_in_days: The immutability period for the blobs in the + container since the policy creation, in days. + :vartype immutability_period_since_creation_in_days: int + :ivar state: The ImmutabilityPolicy state defines the mode of the policy. Disabled state + disables the policy, Unlocked state allows increase and decrease of immutability retention time + and also allows toggling allowProtectedAppendWrites property, Locked state only allows the + increase of the immutability retention time. A policy can only be created in a Disabled or + Unlocked state and can be toggled between the two states. Only a policy in an Unlocked state + can transition to a Locked state which cannot be reverted. Known values are: "Unlocked", + "Locked", and "Disabled". + :vartype state: str or ~azure.mgmt.storage.models.AccountImmutabilityPolicyState + :ivar allow_protected_append_writes: This property can only be changed for disabled and + unlocked time-based retention policies. When enabled, new blocks can be written to an append + blob while maintaining immutability protection and compliance. Only new blocks can be added and + any existing blocks cannot be modified or deleted. + :vartype allow_protected_append_writes: bool + """ + + immutability_period_since_creation_in_days: Optional[int] = rest_field( + name="immutabilityPeriodSinceCreationInDays", visibility=["read", "create", "update", "delete", "query"] + ) + """The immutability period for the blobs in the container since the policy creation, in days.""" + state: Optional[Union[str, "_models.AccountImmutabilityPolicyState"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The ImmutabilityPolicy state defines the mode of the policy. Disabled state disables the + policy, Unlocked state allows increase and decrease of immutability retention time and also + allows toggling allowProtectedAppendWrites property, Locked state only allows the increase of + the immutability retention time. A policy can only be created in a Disabled or Unlocked state + and can be toggled between the two states. Only a policy in an Unlocked state can transition to + a Locked state which cannot be reverted. Known values are: \"Unlocked\", \"Locked\", and + \"Disabled\".""" + allow_protected_append_writes: Optional[bool] = rest_field( + name="allowProtectedAppendWrites", visibility=["read", "create", "update", "delete", "query"] + ) + """This property can only be changed for disabled and unlocked time-based retention policies. When + enabled, new blocks can be written to an append blob while maintaining immutability protection + and compliance. Only new blocks can be added and any existing blocks cannot be modified or + deleted.""" + + @overload + def __init__( + self, + *, + immutability_period_since_creation_in_days: Optional[int] = None, + state: Optional[Union[str, "_models.AccountImmutabilityPolicyState"]] = None, + allow_protected_append_writes: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AccountLimits(_Model): + """Maximum provisioned storage, IOPS, bandwidth and number of file shares limits for the storage + account. + + :ivar max_file_shares: The maximum number of file shares limit for the storage account. + :vartype max_file_shares: int + :ivar max_provisioned_storage_gi_b: The maximum provisioned storage quota limit in gibibytes + for the storage account. + :vartype max_provisioned_storage_gi_b: int + :ivar max_provisioned_iops: The maximum provisioned IOPS limit for the storage account. + :vartype max_provisioned_iops: int + :ivar max_provisioned_bandwidth_mi_b_per_sec: The maximum provisioned bandwidth limit in + mebibytes per second for the storage account. + :vartype max_provisioned_bandwidth_mi_b_per_sec: int + """ + + max_file_shares: Optional[int] = rest_field(name="maxFileShares", visibility=["read"]) + """The maximum number of file shares limit for the storage account.""" + max_provisioned_storage_gi_b: Optional[int] = rest_field(name="maxProvisionedStorageGiB", visibility=["read"]) + """The maximum provisioned storage quota limit in gibibytes for the storage account.""" + max_provisioned_iops: Optional[int] = rest_field(name="maxProvisionedIOPS", visibility=["read"]) + """The maximum provisioned IOPS limit for the storage account.""" + max_provisioned_bandwidth_mi_b_per_sec: Optional[int] = rest_field( + name="maxProvisionedBandwidthMiBPerSec", visibility=["read"] + ) + """The maximum provisioned bandwidth limit in mebibytes per second for the storage account.""" + + +class AccountSasParameters(_Model): + """The parameters to list SAS credentials of a storage account. + + :ivar services: The signed services accessible with the account SAS. Possible values include: + Blob (b), Queue (q), Table (t), File (f). Required. Known values are: "b", "q", "t", and "f". + :vartype services: str or ~azure.mgmt.storage.models.Services + :ivar resource_types: The signed resource types that are accessible with the account SAS. + Service (s): Access to service-level APIs; Container (c): Access to container-level APIs; + Object (o): Access to object-level APIs for blobs, queue messages, table entities, and files. + Required. Known values are: "s", "c", and "o". + :vartype resource_types: str or ~azure.mgmt.storage.models.SignedResourceTypes + :ivar permissions: The signed permissions for the account SAS. Possible values include: Read + (r), Write (w), Delete (d), List (l), Add (a), Create (c), Update (u) and Process (p). + Required. Known values are: "r", "d", "w", "l", "a", "c", "u", and "p". + :vartype permissions: str or ~azure.mgmt.storage.models.Permissions + :ivar ip_address_or_range: An IP address or a range of IP addresses from which to accept + requests. + :vartype ip_address_or_range: str + :ivar protocols: The protocol permitted for a request made with the account SAS. Known values + are: "https,http" and "https". + :vartype protocols: str or ~azure.mgmt.storage.models.HttpProtocol + :ivar shared_access_start_time: The time at which the SAS becomes valid. + :vartype shared_access_start_time: ~datetime.datetime + :ivar shared_access_expiry_time: The time at which the shared access signature becomes invalid. + Required. + :vartype shared_access_expiry_time: ~datetime.datetime + :ivar key_to_sign: The key to sign the account SAS token with. + :vartype key_to_sign: str + """ + + services: Union[str, "_models.Services"] = rest_field( + name="signedServices", visibility=["read", "create", "update", "delete", "query"] + ) + """The signed services accessible with the account SAS. Possible values include: Blob (b), Queue + (q), Table (t), File (f). Required. Known values are: \"b\", \"q\", \"t\", and \"f\".""" + resource_types: Union[str, "_models.SignedResourceTypes"] = rest_field( + name="signedResourceTypes", visibility=["read", "create", "update", "delete", "query"] + ) + """The signed resource types that are accessible with the account SAS. Service (s): Access to + service-level APIs; Container (c): Access to container-level APIs; Object (o): Access to + object-level APIs for blobs, queue messages, table entities, and files. Required. Known values + are: \"s\", \"c\", and \"o\".""" + permissions: Union[str, "_models.Permissions"] = rest_field( + name="signedPermission", visibility=["read", "create", "update", "delete", "query"] + ) + """The signed permissions for the account SAS. Possible values include: Read (r), Write (w), + Delete (d), List (l), Add (a), Create (c), Update (u) and Process (p). Required. Known values + are: \"r\", \"d\", \"w\", \"l\", \"a\", \"c\", \"u\", and \"p\".""" + ip_address_or_range: Optional[str] = rest_field( + name="signedIp", visibility=["read", "create", "update", "delete", "query"] + ) + """An IP address or a range of IP addresses from which to accept requests.""" + protocols: Optional[Union[str, "_models.HttpProtocol"]] = rest_field( + name="signedProtocol", visibility=["read", "create", "update", "delete", "query"] + ) + """The protocol permitted for a request made with the account SAS. Known values are: + \"https,http\" and \"https\".""" + shared_access_start_time: Optional[datetime.datetime] = rest_field( + name="signedStart", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The time at which the SAS becomes valid.""" + shared_access_expiry_time: datetime.datetime = rest_field( + name="signedExpiry", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The time at which the shared access signature becomes invalid. Required.""" + key_to_sign: Optional[str] = rest_field( + name="keyToSign", visibility=["read", "create", "update", "delete", "query"] + ) + """The key to sign the account SAS token with.""" + + @overload + def __init__( + self, + *, + services: Union[str, "_models.Services"], + resource_types: Union[str, "_models.SignedResourceTypes"], + permissions: Union[str, "_models.Permissions"], + shared_access_expiry_time: datetime.datetime, + ip_address_or_range: Optional[str] = None, + protocols: Optional[Union[str, "_models.HttpProtocol"]] = None, + shared_access_start_time: Optional[datetime.datetime] = None, + key_to_sign: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AccountUsage(_Model): + """Usage of provisioned storage, IOPS, bandwidth and number of file shares across all live shares + and soft-deleted shares in the account. + + :ivar live_shares: Usage of provisioned storage, IOPS, bandwidth and number of file shares + across all live shares or soft-deleted shares in the account. + :vartype live_shares: ~azure.mgmt.storage.models.AccountUsageElements + :ivar soft_deleted_shares: Usage of provisioned storage, IOPS, bandwidth and number of file + shares across all live shares or soft-deleted shares in the account. + :vartype soft_deleted_shares: ~azure.mgmt.storage.models.AccountUsageElements + """ + + live_shares: Optional["_models.AccountUsageElements"] = rest_field( + name="liveShares", visibility=["read", "create", "update", "delete", "query"] + ) + """Usage of provisioned storage, IOPS, bandwidth and number of file shares across all live shares + or soft-deleted shares in the account.""" + soft_deleted_shares: Optional["_models.AccountUsageElements"] = rest_field( + name="softDeletedShares", visibility=["read", "create", "update", "delete", "query"] + ) + """Usage of provisioned storage, IOPS, bandwidth and number of file shares across all live shares + or soft-deleted shares in the account.""" + + @overload + def __init__( + self, + *, + live_shares: Optional["_models.AccountUsageElements"] = None, + soft_deleted_shares: Optional["_models.AccountUsageElements"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AccountUsageElements(_Model): + """Usage of provisioned storage, IOPS, bandwidth and number of file shares across all live shares + or soft-deleted shares in the account. + + :ivar file_share_count: The total number of file shares. + :vartype file_share_count: int + :ivar provisioned_storage_gi_b: The total provisioned storage quota in gibibytes. + :vartype provisioned_storage_gi_b: int + :ivar provisioned_iops: The total provisioned IOPS. + :vartype provisioned_iops: int + :ivar provisioned_bandwidth_mi_b_per_sec: The total provisioned bandwidth in mebibytes per + second. + :vartype provisioned_bandwidth_mi_b_per_sec: int + """ + + file_share_count: Optional[int] = rest_field(name="fileShareCount", visibility=["read"]) + """The total number of file shares.""" + provisioned_storage_gi_b: Optional[int] = rest_field(name="provisionedStorageGiB", visibility=["read"]) + """The total provisioned storage quota in gibibytes.""" + provisioned_iops: Optional[int] = rest_field(name="provisionedIOPS", visibility=["read"]) + """The total provisioned IOPS.""" + provisioned_bandwidth_mi_b_per_sec: Optional[int] = rest_field( + name="provisionedBandwidthMiBPerSec", visibility=["read"] + ) + """The total provisioned bandwidth in mebibytes per second.""" + + +class ActiveDirectoryProperties(_Model): + """Settings properties for Active Directory (AD). + + :ivar domain_name: Specifies the primary domain that the AD DNS server is authoritative for. + This property is required if directoryServiceOptions is set to AD (AD DS authentication). If + directoryServiceOptions is set to AADDS (Entra DS authentication), providing this property is + optional, as it will be inferred automatically if omitted. If directoryServiceOptions is set to + AADKERB (Entra authentication), this property is optional; it is needed to support + configuration of directory- and file-level permissions via Windows File Explorer, but is not + required for authentication. + :vartype domain_name: str + :ivar net_bios_domain_name: Specifies the NetBIOS domain name. If directoryServiceOptions is + set to AD (AD DS authentication), this property is required. Otherwise, it can be omitted. + :vartype net_bios_domain_name: str + :ivar forest_name: Specifies the Active Directory forest to get. If directoryServiceOptions is + set to AD (AD DS authentication), this property is required. Otherwise, it can be omitted. + :vartype forest_name: str + :ivar domain_guid: Specifies the domain GUID. If directoryServiceOptions is set to AD (AD DS + authentication), this property is required. If directoryServiceOptions is set to AADDS (Entra + DS authentication), this property can be omitted. If directoryServiceOptions is set to AADKERB + (Entra authentication), this property is optional; it is needed to support configuration of + directory- and file-level permissions via Windows File Explorer, but is not required for + authentication. + :vartype domain_guid: str + :ivar domain_sid: Specifies the security identifier (SID) of the AD domain. If + directoryServiceOptions is set to AD (AD DS authentication), this property is required. + Otherwise, it can be omitted. + :vartype domain_sid: str + :ivar azure_storage_sid: Specifies the security identifier (SID) for Azure Storage. If + directoryServiceOptions is set to AD (AD DS authentication), this property is required. + Otherwise, it can be omitted. + :vartype azure_storage_sid: str + :ivar sam_account_name: Specifies the Active Directory SAMAccountName for Azure Storage. If + directoryServiceOptions is set to AD (AD DS authentication), this property is optional. If + provided, accountType should also be provided. For directoryServiceOptions AADDS (Entra DS + authentication) or AADKERB (Entra authentication), this property can be omitted. + :vartype sam_account_name: str + :ivar account_type: Specifies the Active Directory account type for Azure Storage. If + directoryServiceOptions is set to AD (AD DS authentication), this property is optional. If + provided, samAccountName should also be provided. For directoryServiceOptions AADDS (Entra DS + authentication) or AADKERB (Entra authentication), this property can be omitted. Known values + are: "User" and "Computer". + :vartype account_type: str or ~azure.mgmt.storage.models.AccountType + """ + + domain_name: Optional[str] = rest_field( + name="domainName", visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies the primary domain that the AD DNS server is authoritative for. This property is + required if directoryServiceOptions is set to AD (AD DS authentication). If + directoryServiceOptions is set to AADDS (Entra DS authentication), providing this property is + optional, as it will be inferred automatically if omitted. If directoryServiceOptions is set to + AADKERB (Entra authentication), this property is optional; it is needed to support + configuration of directory- and file-level permissions via Windows File Explorer, but is not + required for authentication.""" + net_bios_domain_name: Optional[str] = rest_field( + name="netBiosDomainName", visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies the NetBIOS domain name. If directoryServiceOptions is set to AD (AD DS + authentication), this property is required. Otherwise, it can be omitted.""" + forest_name: Optional[str] = rest_field( + name="forestName", visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies the Active Directory forest to get. If directoryServiceOptions is set to AD (AD DS + authentication), this property is required. Otherwise, it can be omitted.""" + domain_guid: Optional[str] = rest_field( + name="domainGuid", visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies the domain GUID. If directoryServiceOptions is set to AD (AD DS authentication), this + property is required. If directoryServiceOptions is set to AADDS (Entra DS authentication), + this property can be omitted. If directoryServiceOptions is set to AADKERB (Entra + authentication), this property is optional; it is needed to support configuration of directory- + and file-level permissions via Windows File Explorer, but is not required for authentication.""" + domain_sid: Optional[str] = rest_field(name="domainSid", visibility=["read", "create", "update", "delete", "query"]) + """Specifies the security identifier (SID) of the AD domain. If directoryServiceOptions is set to + AD (AD DS authentication), this property is required. Otherwise, it can be omitted.""" + azure_storage_sid: Optional[str] = rest_field( + name="azureStorageSid", visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies the security identifier (SID) for Azure Storage. If directoryServiceOptions is set to + AD (AD DS authentication), this property is required. Otherwise, it can be omitted.""" + sam_account_name: Optional[str] = rest_field( + name="samAccountName", visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies the Active Directory SAMAccountName for Azure Storage. If directoryServiceOptions is + set to AD (AD DS authentication), this property is optional. If provided, accountType should + also be provided. For directoryServiceOptions AADDS (Entra DS authentication) or AADKERB (Entra + authentication), this property can be omitted.""" + account_type: Optional[Union[str, "_models.AccountType"]] = rest_field( + name="accountType", visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies the Active Directory account type for Azure Storage. If directoryServiceOptions is + set to AD (AD DS authentication), this property is optional. If provided, samAccountName should + also be provided. For directoryServiceOptions AADDS (Entra DS authentication) or AADKERB (Entra + authentication), this property can be omitted. Known values are: \"User\" and \"Computer\".""" + + @overload + def __init__( + self, + *, + domain_name: Optional[str] = None, + net_bios_domain_name: Optional[str] = None, + forest_name: Optional[str] = None, + domain_guid: Optional[str] = None, + domain_sid: Optional[str] = None, + azure_storage_sid: Optional[str] = None, + sam_account_name: Optional[str] = None, + account_type: Optional[Union[str, "_models.AccountType"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResourceAutoGenerated(_Model): + """Resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + """ + + id: Optional[str] = rest_field(visibility=["read"]) + """Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.""" + name: Optional[str] = rest_field(visibility=["read"]) + """The name of the resource.""" + type: Optional[str] = rest_field(visibility=["read"]) + """The type of the resource. E.g. \"Microsoft.Compute/virtualMachines\" or + \"Microsoft.Storage/storageAccounts\".""" + system_data: Optional["_models.SystemData"] = rest_field(name="systemData", visibility=["read"]) + """Azure Resource Manager metadata containing createdBy and modifiedBy information.""" + + +class AzureEntityResource(ResourceAutoGenerated): + """Entity Resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar etag: Resource Etag. + :vartype etag: str + """ + + etag: Optional[str] = rest_field(visibility=["read"]) + """Resource Etag.""" + + +class AzureFilesIdentityBasedAuthentication(_Model): + """Settings for Azure Files identity based authentication. + + :ivar directory_service_options: Indicates the directory service used. Note that this enum may + be extended in the future. Required. Known values are: "None", "AADDS", "AD", and "AADKERB". + :vartype directory_service_options: str or ~azure.mgmt.storage.models.DirectoryServiceOptions + :ivar active_directory_properties: Additional information about the directory service. Required + if directoryServiceOptions is AD (AD DS authentication). Optional for directoryServiceOptions + AADDS (Entra DS authentication) and AADKERB (Entra authentication). + :vartype active_directory_properties: ~azure.mgmt.storage.models.ActiveDirectoryProperties + :ivar default_share_permission: Default share permission for users using Kerberos + authentication if RBAC role is not assigned. Known values are: "None", + "StorageFileDataSmbShareReader", "StorageFileDataSmbShareContributor", and + "StorageFileDataSmbShareElevatedContributor". + :vartype default_share_permission: str or ~azure.mgmt.storage.models.DefaultSharePermission + :ivar smb_o_auth_settings: Required for Managed Identities access using OAuth over SMB. + :vartype smb_o_auth_settings: ~azure.mgmt.storage.models.SmbOAuthSettings + """ + + directory_service_options: Union[str, "_models.DirectoryServiceOptions"] = rest_field( + name="directoryServiceOptions", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates the directory service used. Note that this enum may be extended in the future. + Required. Known values are: \"None\", \"AADDS\", \"AD\", and \"AADKERB\".""" + active_directory_properties: Optional["_models.ActiveDirectoryProperties"] = rest_field( + name="activeDirectoryProperties", visibility=["read", "create", "update", "delete", "query"] + ) + """Additional information about the directory service. Required if directoryServiceOptions is AD + (AD DS authentication). Optional for directoryServiceOptions AADDS (Entra DS authentication) + and AADKERB (Entra authentication).""" + default_share_permission: Optional[Union[str, "_models.DefaultSharePermission"]] = rest_field( + name="defaultSharePermission", visibility=["read", "create", "update", "delete", "query"] + ) + """Default share permission for users using Kerberos authentication if RBAC role is not assigned. + Known values are: \"None\", \"StorageFileDataSmbShareReader\", + \"StorageFileDataSmbShareContributor\", and \"StorageFileDataSmbShareElevatedContributor\".""" + smb_o_auth_settings: Optional["_models.SmbOAuthSettings"] = rest_field( + name="smbOAuthSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Required for Managed Identities access using OAuth over SMB.""" + + @overload + def __init__( + self, + *, + directory_service_options: Union[str, "_models.DirectoryServiceOptions"], + active_directory_properties: Optional["_models.ActiveDirectoryProperties"] = None, + default_share_permission: Optional[Union[str, "_models.DefaultSharePermission"]] = None, + smb_o_auth_settings: Optional["_models.SmbOAuthSettings"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ProxyResource(ResourceAutoGenerated): + """Proxy Resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + """ + + +class BlobContainer(ProxyResource): + """Properties of the blob container, including Id, resource name, resource type, Etag. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar container_properties: Properties of the blob container. + :vartype container_properties: ~azure.mgmt.storage.models.ContainerProperties + :ivar etag: Resource Etag. + :vartype etag: str + """ + + container_properties: Optional["_models.ContainerProperties"] = rest_field( + name="properties", visibility=["read", "create", "update", "delete", "query"] + ) + """Properties of the blob container.""" + etag: Optional[str] = rest_field(visibility=["read"]) + """Resource Etag.""" + + __flattened_items = [ + "version", + "deleted", + "deleted_time", + "remaining_retention_days", + "default_encryption_scope", + "deny_encryption_scope_override", + "public_access", + "last_modified_time", + "lease_status", + "lease_state", + "lease_duration", + "metadata", + "immutability_policy", + "legal_hold", + "has_legal_hold", + "has_immutability_policy", + "immutable_storage_with_versioning", + "enable_nfs_v3_root_squash", + "enable_nfs_v3_all_squash", + ] + + @overload + def __init__( + self, + *, + container_properties: Optional["_models.ContainerProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.container_properties is None: + return None + return getattr(self.container_properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.container_properties is None: + self.container_properties = self._attr_to_rest_field["container_properties"]._class_type() + setattr(self.container_properties, key, value) + else: + super().__setattr__(key, value) + + +class BlobInventoryCreationTime(_Model): + """This property defines the creation time based filtering condition. Blob Inventory schema + parameter 'Creation-Time' is mandatory with this filter. + + :ivar last_n_days: When set the policy filters the objects that are created in the last N days. + Where N is an integer value between 1 to 36500. + :vartype last_n_days: int + """ + + last_n_days: Optional[int] = rest_field( + name="lastNDays", visibility=["read", "create", "update", "delete", "query"] + ) + """When set the policy filters the objects that are created in the last N days. Where N is an + integer value between 1 to 36500.""" + + @overload + def __init__( + self, + *, + last_n_days: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BlobInventoryPolicy(ProxyResource): + """The storage account blob inventory policy. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar properties: Returns the storage account blob inventory policy rules. + :vartype properties: ~azure.mgmt.storage.models.BlobInventoryPolicyProperties + """ + + properties: Optional["_models.BlobInventoryPolicyProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Returns the storage account blob inventory policy rules.""" + + __flattened_items = ["last_modified_time", "policy"] + + @overload + def __init__( + self, + *, + properties: Optional["_models.BlobInventoryPolicyProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.properties is None: + return None + return getattr(self.properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.properties is None: + self.properties = self._attr_to_rest_field["properties"]._class_type() + setattr(self.properties, key, value) + else: + super().__setattr__(key, value) + + +class BlobInventoryPolicyDefinition(_Model): + """An object that defines the blob inventory rule. + + :ivar filters: An object that defines the filter set. + :vartype filters: ~azure.mgmt.storage.models.BlobInventoryPolicyFilter + :ivar format: This is a required field, it specifies the format for the inventory files. + Required. Known values are: "Csv" and "Parquet". + :vartype format: str or ~azure.mgmt.storage.models.Format + :ivar schedule: This is a required field. This field is used to schedule an inventory + formation. Required. Known values are: "Daily" and "Weekly". + :vartype schedule: str or ~azure.mgmt.storage.models.Schedule + :ivar object_type: This is a required field. This field specifies the scope of the inventory + created either at the blob or container level. Required. Known values are: "Blob" and + "Container". + :vartype object_type: str or ~azure.mgmt.storage.models.ObjectType + :ivar schema_fields: This is a required field. This field specifies the fields and properties + of the object to be included in the inventory. The Schema field value 'Name' is always + required. The valid values for this field for the 'Blob' definition.objectType include 'Name, + Creation-Time, Last-Modified, Content-Length, Content-MD5, BlobType, AccessTier, + AccessTierChangeTime, AccessTierInferred, Tags, Expiry-Time, hdi_isfolder, Owner, Group, + Permissions, Acl, Snapshot, VersionId, IsCurrentVersion, Metadata, LastAccessTime, Tags, Etag, + ContentType, ContentEncoding, ContentLanguage, ContentCRC64, CacheControl, ContentDisposition, + LeaseStatus, LeaseState, LeaseDuration, ServerEncrypted, Deleted, DeletionId, DeletedTime, + RemainingRetentionDays, ImmutabilityPolicyUntilDate, ImmutabilityPolicyMode, LegalHold, CopyId, + CopyStatus, CopySource, CopyProgress, CopyCompletionTime, CopyStatusDescription, + CustomerProvidedKeySha256, RehydratePriority, ArchiveStatus, XmsBlobSequenceNumber, + EncryptionScope, IncrementalCopy, TagCount'. For Blob object type schema field value + 'DeletedTime' is applicable only for Hns enabled accounts. The valid values for 'Container' + definition.objectType include 'Name, Last-Modified, Metadata, LeaseStatus, LeaseState, + LeaseDuration, PublicAccess, HasImmutabilityPolicy, HasLegalHold, Etag, DefaultEncryptionScope, + DenyEncryptionScopeOverride, ImmutableStorageWithVersioningEnabled, Deleted, Version, + DeletedTime, RemainingRetentionDays'. Schema field values 'Expiry-Time, hdi_isfolder, Owner, + Group, Permissions, Acl, DeletionId' are valid only for Hns enabled accounts.Schema field + values 'Tags, TagCount' are only valid for Non-Hns accounts. Required. + :vartype schema_fields: list[str] + """ + + filters: Optional["_models.BlobInventoryPolicyFilter"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """An object that defines the filter set.""" + format: Union[str, "_models.Format"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """This is a required field, it specifies the format for the inventory files. Required. Known + values are: \"Csv\" and \"Parquet\".""" + schedule: Union[str, "_models.Schedule"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """This is a required field. This field is used to schedule an inventory formation. Required. + Known values are: \"Daily\" and \"Weekly\".""" + object_type: Union[str, "_models.ObjectType"] = rest_field( + name="objectType", visibility=["read", "create", "update", "delete", "query"] + ) + """This is a required field. This field specifies the scope of the inventory created either at the + blob or container level. Required. Known values are: \"Blob\" and \"Container\".""" + schema_fields: list[str] = rest_field( + name="schemaFields", visibility=["read", "create", "update", "delete", "query"] + ) + """This is a required field. This field specifies the fields and properties of the object to be + included in the inventory. The Schema field value 'Name' is always required. The valid values + for this field for the 'Blob' definition.objectType include 'Name, Creation-Time, + Last-Modified, Content-Length, Content-MD5, BlobType, AccessTier, AccessTierChangeTime, + AccessTierInferred, Tags, Expiry-Time, hdi_isfolder, Owner, Group, Permissions, Acl, Snapshot, + VersionId, IsCurrentVersion, Metadata, LastAccessTime, Tags, Etag, ContentType, + ContentEncoding, ContentLanguage, ContentCRC64, CacheControl, ContentDisposition, LeaseStatus, + LeaseState, LeaseDuration, ServerEncrypted, Deleted, DeletionId, DeletedTime, + RemainingRetentionDays, ImmutabilityPolicyUntilDate, ImmutabilityPolicyMode, LegalHold, CopyId, + CopyStatus, CopySource, CopyProgress, CopyCompletionTime, CopyStatusDescription, + CustomerProvidedKeySha256, RehydratePriority, ArchiveStatus, XmsBlobSequenceNumber, + EncryptionScope, IncrementalCopy, TagCount'. For Blob object type schema field value + 'DeletedTime' is applicable only for Hns enabled accounts. The valid values for 'Container' + definition.objectType include 'Name, Last-Modified, Metadata, LeaseStatus, LeaseState, + LeaseDuration, PublicAccess, HasImmutabilityPolicy, HasLegalHold, Etag, DefaultEncryptionScope, + DenyEncryptionScopeOverride, ImmutableStorageWithVersioningEnabled, Deleted, Version, + DeletedTime, RemainingRetentionDays'. Schema field values 'Expiry-Time, hdi_isfolder, Owner, + Group, Permissions, Acl, DeletionId' are valid only for Hns enabled accounts.Schema field + values 'Tags, TagCount' are only valid for Non-Hns accounts. Required.""" + + @overload + def __init__( + self, + *, + format: Union[str, "_models.Format"], + schedule: Union[str, "_models.Schedule"], + object_type: Union[str, "_models.ObjectType"], + schema_fields: list[str], + filters: Optional["_models.BlobInventoryPolicyFilter"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BlobInventoryPolicyFilter(_Model): + """An object that defines the blob inventory rule filter conditions. For 'Blob' + definition.objectType all filter properties are applicable, 'blobTypes' is required and others + are optional. For 'Container' definition.objectType only prefixMatch is applicable and is + optional. + + :ivar prefix_match: An array of strings with maximum 10 blob prefixes to be included in the + inventory. + :vartype prefix_match: list[str] + :ivar exclude_prefix: An array of strings with maximum 10 blob prefixes to be excluded from the + inventory. + :vartype exclude_prefix: list[str] + :ivar blob_types: An array of predefined enum values. Valid values include blockBlob, + appendBlob, pageBlob. Hns accounts does not support pageBlobs. This field is required when + definition.objectType property is set to 'Blob'. + :vartype blob_types: list[str] + :ivar include_blob_versions: Includes blob versions in blob inventory when value is set to + true. The definition.schemaFields values 'VersionId and IsCurrentVersion' are required if this + property is set to true, else they must be excluded. + :vartype include_blob_versions: bool + :ivar include_snapshots: Includes blob snapshots in blob inventory when value is set to true. + The definition.schemaFields value 'Snapshot' is required if this property is set to true, else + it must be excluded. + :vartype include_snapshots: bool + :ivar include_deleted: For 'Container' definition.objectType the definition.schemaFields must + include 'Deleted, Version, DeletedTime and RemainingRetentionDays'. For 'Blob' + definition.objectType and HNS enabled storage accounts the definition.schemaFields must include + 'DeletionId, Deleted, DeletedTime and RemainingRetentionDays' and for Hns disabled accounts the + definition.schemaFields must include 'Deleted and RemainingRetentionDays', else it must be + excluded. + :vartype include_deleted: bool + :ivar creation_time: This property is used to filter objects based on the object creation time. + :vartype creation_time: ~azure.mgmt.storage.models.BlobInventoryCreationTime + """ + + prefix_match: Optional[list[str]] = rest_field( + name="prefixMatch", visibility=["read", "create", "update", "delete", "query"] + ) + """An array of strings with maximum 10 blob prefixes to be included in the inventory.""" + exclude_prefix: Optional[list[str]] = rest_field( + name="excludePrefix", visibility=["read", "create", "update", "delete", "query"] + ) + """An array of strings with maximum 10 blob prefixes to be excluded from the inventory.""" + blob_types: Optional[list[str]] = rest_field( + name="blobTypes", visibility=["read", "create", "update", "delete", "query"] + ) + """An array of predefined enum values. Valid values include blockBlob, appendBlob, pageBlob. Hns + accounts does not support pageBlobs. This field is required when definition.objectType property + is set to 'Blob'.""" + include_blob_versions: Optional[bool] = rest_field( + name="includeBlobVersions", visibility=["read", "create", "update", "delete", "query"] + ) + """Includes blob versions in blob inventory when value is set to true. The definition.schemaFields + values 'VersionId and IsCurrentVersion' are required if this property is set to true, else they + must be excluded.""" + include_snapshots: Optional[bool] = rest_field( + name="includeSnapshots", visibility=["read", "create", "update", "delete", "query"] + ) + """Includes blob snapshots in blob inventory when value is set to true. The + definition.schemaFields value 'Snapshot' is required if this property is set to true, else it + must be excluded.""" + include_deleted: Optional[bool] = rest_field( + name="includeDeleted", visibility=["read", "create", "update", "delete", "query"] + ) + """For 'Container' definition.objectType the definition.schemaFields must include 'Deleted, + Version, DeletedTime and RemainingRetentionDays'. For 'Blob' definition.objectType and HNS + enabled storage accounts the definition.schemaFields must include 'DeletionId, Deleted, + DeletedTime and RemainingRetentionDays' and for Hns disabled accounts the + definition.schemaFields must include 'Deleted and RemainingRetentionDays', else it must be + excluded.""" + creation_time: Optional["_models.BlobInventoryCreationTime"] = rest_field( + name="creationTime", visibility=["read", "create", "update", "delete", "query"] + ) + """This property is used to filter objects based on the object creation time.""" + + @overload + def __init__( + self, + *, + prefix_match: Optional[list[str]] = None, + exclude_prefix: Optional[list[str]] = None, + blob_types: Optional[list[str]] = None, + include_blob_versions: Optional[bool] = None, + include_snapshots: Optional[bool] = None, + include_deleted: Optional[bool] = None, + creation_time: Optional["_models.BlobInventoryCreationTime"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BlobInventoryPolicyProperties(_Model): + """The storage account blob inventory policy properties. + + :ivar last_modified_time: Returns the last modified date and time of the blob inventory policy. + :vartype last_modified_time: ~datetime.datetime + :ivar policy: The storage account blob inventory policy object. It is composed of policy rules. + Required. + :vartype policy: ~azure.mgmt.storage.models.BlobInventoryPolicySchema + """ + + last_modified_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedTime", visibility=["read"], format="rfc3339" + ) + """Returns the last modified date and time of the blob inventory policy.""" + policy: "_models.BlobInventoryPolicySchema" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The storage account blob inventory policy object. It is composed of policy rules. Required.""" + + @overload + def __init__( + self, + *, + policy: "_models.BlobInventoryPolicySchema", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BlobInventoryPolicyRule(_Model): + """An object that wraps the blob inventory rule. Each rule is uniquely defined by name. + + :ivar enabled: Rule is enabled when set to true. Required. + :vartype enabled: bool + :ivar name: A rule name can contain any combination of alpha numeric characters. Rule name is + case-sensitive. It must be unique within a policy. Required. + :vartype name: str + :ivar destination: Container name where blob inventory files are stored. Must be pre-created. + Required. + :vartype destination: str + :ivar definition: An object that defines the blob inventory policy rule. Required. + :vartype definition: ~azure.mgmt.storage.models.BlobInventoryPolicyDefinition + """ + + enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Rule is enabled when set to true. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A rule name can contain any combination of alpha numeric characters. Rule name is + case-sensitive. It must be unique within a policy. Required.""" + destination: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Container name where blob inventory files are stored. Must be pre-created. Required.""" + definition: "_models.BlobInventoryPolicyDefinition" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """An object that defines the blob inventory policy rule. Required.""" + + @overload + def __init__( + self, + *, + enabled: bool, + name: str, + destination: str, + definition: "_models.BlobInventoryPolicyDefinition", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BlobInventoryPolicySchema(_Model): + """The storage account blob inventory policy rules. + + :ivar enabled: Policy is enabled if set to true. Required. + :vartype enabled: bool + :ivar destination: Deprecated Property from API version 2021-04-01 onwards, the required + destination container name must be specified at the rule level 'policy.rule.destination'. + :vartype destination: str + :ivar type: The valid value is Inventory. Required. "Inventory" + :vartype type: str or ~azure.mgmt.storage.models.InventoryRuleType + :ivar rules: The storage account blob inventory policy rules. The rule is applied when it is + enabled. Required. + :vartype rules: list[~azure.mgmt.storage.models.BlobInventoryPolicyRule] + """ + + enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Policy is enabled if set to true. Required.""" + destination: Optional[str] = rest_field(visibility=["read"]) + """Deprecated Property from API version 2021-04-01 onwards, the required destination container + name must be specified at the rule level 'policy.rule.destination'.""" + type: Union[str, "_models.InventoryRuleType"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The valid value is Inventory. Required. \"Inventory\"""" + rules: list["_models.BlobInventoryPolicyRule"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The storage account blob inventory policy rules. The rule is applied when it is enabled. + Required.""" + + @overload + def __init__( + self, + *, + enabled: bool, + type: Union[str, "_models.InventoryRuleType"], + rules: list["_models.BlobInventoryPolicyRule"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BlobRestoreParameters(_Model): + """Blob restore parameters. + + :ivar time_to_restore: Restore blob to the specified time. Required. + :vartype time_to_restore: ~datetime.datetime + :ivar blob_ranges: Blob ranges to restore. Required. + :vartype blob_ranges: list[~azure.mgmt.storage.models.BlobRestoreRange] + """ + + time_to_restore: datetime.datetime = rest_field( + name="timeToRestore", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """Restore blob to the specified time. Required.""" + blob_ranges: list["_models.BlobRestoreRange"] = rest_field( + name="blobRanges", visibility=["read", "create", "update", "delete", "query"] + ) + """Blob ranges to restore. Required.""" + + @overload + def __init__( + self, + *, + time_to_restore: datetime.datetime, + blob_ranges: list["_models.BlobRestoreRange"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BlobRestoreRange(_Model): + """Blob range. + + :ivar start_range: Blob start range. This is inclusive. Empty means account start. Required. + :vartype start_range: str + :ivar end_range: Blob end range. This is exclusive. Empty means account end. Required. + :vartype end_range: str + """ + + start_range: str = rest_field(name="startRange", visibility=["read", "create", "update", "delete", "query"]) + """Blob start range. This is inclusive. Empty means account start. Required.""" + end_range: str = rest_field(name="endRange", visibility=["read", "create", "update", "delete", "query"]) + """Blob end range. This is exclusive. Empty means account end. Required.""" + + @overload + def __init__( + self, + *, + start_range: str, + end_range: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BlobRestoreStatus(_Model): + """Blob restore status. + + :ivar status: The status of blob restore progress. Possible values are: - InProgress: Indicates + that blob restore is ongoing. - Complete: Indicates that blob restore has been completed + successfully. - Failed: Indicates that blob restore is failed. Known values are: "InProgress", + "Complete", and "Failed". + :vartype status: str or ~azure.mgmt.storage.models.BlobRestoreProgressStatus + :ivar failure_reason: Failure reason when blob restore is failed. + :vartype failure_reason: str + :ivar restore_id: Id for tracking blob restore request. + :vartype restore_id: str + :ivar parameters: Blob restore request parameters. + :vartype parameters: ~azure.mgmt.storage.models.BlobRestoreParameters + """ + + status: Optional[Union[str, "_models.BlobRestoreProgressStatus"]] = rest_field(visibility=["read"]) + """The status of blob restore progress. Possible values are: - InProgress: Indicates that blob + restore is ongoing. - Complete: Indicates that blob restore has been completed successfully. - + Failed: Indicates that blob restore is failed. Known values are: \"InProgress\", \"Complete\", + and \"Failed\".""" + failure_reason: Optional[str] = rest_field(name="failureReason", visibility=["read"]) + """Failure reason when blob restore is failed.""" + restore_id: Optional[str] = rest_field(name="restoreId", visibility=["read"]) + """Id for tracking blob restore request.""" + parameters: Optional["_models.BlobRestoreParameters"] = rest_field(visibility=["read"]) + """Blob restore request parameters.""" + + +class BlobServiceProperties(ProxyResource): + """The properties of a storage account’s Blob service. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar blob_service_properties: The properties of a storage account’s Blob service. + :vartype blob_service_properties: ~azure.mgmt.storage.models.BlobServicePropertiesProperties + :ivar sku: Sku name and tier. + :vartype sku: ~azure.mgmt.storage.models.Sku + """ + + blob_service_properties: Optional["_models.BlobServicePropertiesProperties"] = rest_field( + name="properties", visibility=["read", "create", "update", "delete", "query"] + ) + """The properties of a storage account’s Blob service.""" + sku: Optional["_models.Sku"] = rest_field(visibility=["read"]) + """Sku name and tier.""" + + __flattened_items = [ + "cors", + "default_service_version", + "delete_retention_policy", + "static_website", + "is_versioning_enabled", + "automatic_snapshot_policy_enabled", + "change_feed", + "restore_policy", + "container_delete_retention_policy", + "last_access_time_tracking_policy", + ] + + @overload + def __init__( + self, + *, + blob_service_properties: Optional["_models.BlobServicePropertiesProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.blob_service_properties is None: + return None + return getattr(self.blob_service_properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.blob_service_properties is None: + self.blob_service_properties = self._attr_to_rest_field["blob_service_properties"]._class_type() + setattr(self.blob_service_properties, key, value) + else: + super().__setattr__(key, value) + + +class BlobServicePropertiesProperties(_Model): + """The properties of a storage account’s Blob service. + + :ivar cors: Specifies CORS rules for the Blob service. You can include up to five CorsRule + elements in the request. If no CorsRule elements are included in the request body, all CORS + rules will be deleted, and CORS will be disabled for the Blob service. + :vartype cors: ~azure.mgmt.storage.models.CorsRules + :ivar default_service_version: DefaultServiceVersion indicates the default version to use for + requests to the Blob service if an incoming request’s version is not specified. Possible values + include version 2008-10-27 and all more recent versions. + :vartype default_service_version: str + :ivar delete_retention_policy: The blob service properties for blob soft delete. + :vartype delete_retention_policy: ~azure.mgmt.storage.models.DeleteRetentionPolicy + :ivar static_website: The static website properties for blob storage. + :vartype static_website: ~azure.mgmt.storage.models.StaticWebsite + :ivar is_versioning_enabled: Versioning is enabled if set to true. + :vartype is_versioning_enabled: bool + :ivar automatic_snapshot_policy_enabled: Deprecated in favor of isVersioningEnabled property. + :vartype automatic_snapshot_policy_enabled: bool + :ivar change_feed: The blob service properties for change feed events. + :vartype change_feed: ~azure.mgmt.storage.models.ChangeFeed + :ivar restore_policy: The blob service properties for blob restore policy. + :vartype restore_policy: ~azure.mgmt.storage.models.RestorePolicyProperties + :ivar container_delete_retention_policy: The blob service properties for container soft delete. + :vartype container_delete_retention_policy: ~azure.mgmt.storage.models.DeleteRetentionPolicy + :ivar last_access_time_tracking_policy: The blob service property to configure last access time + based tracking policy. + :vartype last_access_time_tracking_policy: + ~azure.mgmt.storage.models.LastAccessTimeTrackingPolicy + """ + + cors: Optional["_models.CorsRules"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Specifies CORS rules for the Blob service. You can include up to five CorsRule elements in the + request. If no CorsRule elements are included in the request body, all CORS rules will be + deleted, and CORS will be disabled for the Blob service.""" + default_service_version: Optional[str] = rest_field( + name="defaultServiceVersion", visibility=["read", "create", "update", "delete", "query"] + ) + """DefaultServiceVersion indicates the default version to use for requests to the Blob service if + an incoming request’s version is not specified. Possible values include version 2008-10-27 and + all more recent versions.""" + delete_retention_policy: Optional["_models.DeleteRetentionPolicy"] = rest_field( + name="deleteRetentionPolicy", visibility=["read", "create", "update", "delete", "query"] + ) + """The blob service properties for blob soft delete.""" + static_website: Optional["_models.StaticWebsite"] = rest_field( + name="staticWebsite", visibility=["read", "create", "update", "delete", "query"] + ) + """The static website properties for blob storage.""" + is_versioning_enabled: Optional[bool] = rest_field( + name="isVersioningEnabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Versioning is enabled if set to true.""" + automatic_snapshot_policy_enabled: Optional[bool] = rest_field( + name="automaticSnapshotPolicyEnabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Deprecated in favor of isVersioningEnabled property.""" + change_feed: Optional["_models.ChangeFeed"] = rest_field( + name="changeFeed", visibility=["read", "create", "update", "delete", "query"] + ) + """The blob service properties for change feed events.""" + restore_policy: Optional["_models.RestorePolicyProperties"] = rest_field( + name="restorePolicy", visibility=["read", "create", "update", "delete", "query"] + ) + """The blob service properties for blob restore policy.""" + container_delete_retention_policy: Optional["_models.DeleteRetentionPolicy"] = rest_field( + name="containerDeleteRetentionPolicy", visibility=["read", "create", "update", "delete", "query"] + ) + """The blob service properties for container soft delete.""" + last_access_time_tracking_policy: Optional["_models.LastAccessTimeTrackingPolicy"] = rest_field( + name="lastAccessTimeTrackingPolicy", visibility=["read", "create", "update", "delete", "query"] + ) + """The blob service property to configure last access time based tracking policy.""" + + @overload + def __init__( + self, + *, + cors: Optional["_models.CorsRules"] = None, + default_service_version: Optional[str] = None, + delete_retention_policy: Optional["_models.DeleteRetentionPolicy"] = None, + static_website: Optional["_models.StaticWebsite"] = None, + is_versioning_enabled: Optional[bool] = None, + automatic_snapshot_policy_enabled: Optional[bool] = None, + change_feed: Optional["_models.ChangeFeed"] = None, + restore_policy: Optional["_models.RestorePolicyProperties"] = None, + container_delete_retention_policy: Optional["_models.DeleteRetentionPolicy"] = None, + last_access_time_tracking_policy: Optional["_models.LastAccessTimeTrackingPolicy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BurstingConstants(_Model): + """Constants used for calculating included burst IOPS and maximum burst credits for IOPS for a + file share in the storage account. + + :ivar burst_floor_iops: The guaranteed floor of burst IOPS for small file shares. + :vartype burst_floor_iops: int + :ivar burst_io_scalar: The scalar against provisioned IOPS in the file share included burst + IOPS formula. + :vartype burst_io_scalar: float + :ivar burst_timeframe_seconds: The time frame for bursting in seconds in the file share maximum + burst credits for IOPS formula. + :vartype burst_timeframe_seconds: int + """ + + burst_floor_iops: Optional[int] = rest_field(name="burstFloorIOPS", visibility=["read"]) + """The guaranteed floor of burst IOPS for small file shares.""" + burst_io_scalar: Optional[float] = rest_field(name="burstIOScalar", visibility=["read"]) + """The scalar against provisioned IOPS in the file share included burst IOPS formula.""" + burst_timeframe_seconds: Optional[int] = rest_field(name="burstTimeframeSeconds", visibility=["read"]) + """The time frame for bursting in seconds in the file share maximum burst credits for IOPS + formula.""" + + +class ChangeFeed(_Model): + """The blob service properties for change feed events. + + :ivar enabled: Indicates whether change feed event logging is enabled for the Blob service. + :vartype enabled: bool + :ivar retention_in_days: Indicates the duration of changeFeed retention in days. Minimum value + is 1 day and maximum value is 146000 days (400 years). A null value indicates an infinite + retention of the change feed. + :vartype retention_in_days: int + """ + + enabled: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates whether change feed event logging is enabled for the Blob service.""" + retention_in_days: Optional[int] = rest_field( + name="retentionInDays", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates the duration of changeFeed retention in days. Minimum value is 1 day and maximum + value is 146000 days (400 years). A null value indicates an infinite retention of the change + feed.""" + + @overload + def __init__( + self, + *, + enabled: Optional[bool] = None, + retention_in_days: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CheckNameAvailabilityResult(_Model): + """The CheckNameAvailability operation response. + + :ivar name_available: Gets a boolean value that indicates whether the name is available for you + to use. If true, the name is available. If false, the name has already been taken or is invalid + and cannot be used. + :vartype name_available: bool + :ivar reason: Gets the reason that a storage account name could not be used. The Reason element + is only returned if NameAvailable is false. Known values are: "AccountNameInvalid" and + "AlreadyExists". + :vartype reason: str or ~azure.mgmt.storage.models.Reason + :ivar message: Gets an error message explaining the Reason value in more detail. + :vartype message: str + """ + + name_available: Optional[bool] = rest_field(name="nameAvailable", visibility=["read"]) + """Gets a boolean value that indicates whether the name is available for you to use. If true, the + name is available. If false, the name has already been taken or is invalid and cannot be used.""" + reason: Optional[Union[str, "_models.Reason"]] = rest_field(visibility=["read"]) + """Gets the reason that a storage account name could not be used. The Reason element is only + returned if NameAvailable is false. Known values are: \"AccountNameInvalid\" and + \"AlreadyExists\".""" + message: Optional[str] = rest_field(visibility=["read"]) + """Gets an error message explaining the Reason value in more detail.""" + + +class CloudError(_Model): + """An error response from the Storage service. + + :ivar error: An error response from the Storage service. + :vartype error: ~azure.mgmt.storage.models.CloudErrorBody + """ + + error: Optional["_models.CloudErrorBody"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An error response from the Storage service.""" + + @overload + def __init__( + self, + *, + error: Optional["_models.CloudErrorBody"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CloudErrorBody(_Model): + """An error response from the Storage service. + + :ivar code: An identifier for the error. Codes are invariant and are intended to be consumed + programmatically. + :vartype code: str + :ivar message: A message describing the error, intended to be suitable for display in a user + interface. + :vartype message: str + :ivar target: The target of the particular error. For example, the name of the property in + error. + :vartype target: str + :ivar details: A list of additional details about the error. + :vartype details: list[~azure.mgmt.storage.models.CloudErrorBody] + """ + + code: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An identifier for the error. Codes are invariant and are intended to be consumed + programmatically.""" + message: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A message describing the error, intended to be suitable for display in a user interface.""" + target: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The target of the particular error. For example, the name of the property in error.""" + details: Optional[list["_models.CloudErrorBody"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A list of additional details about the error.""" + + @overload + def __init__( + self, + *, + code: Optional[str] = None, + message: Optional[str] = None, + target: Optional[str] = None, + details: Optional[list["_models.CloudErrorBody"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TrackedResource(ResourceAutoGenerated): + """Tracked Resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + """ + + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Resource tags.""" + location: str = rest_field(visibility=["read", "create"]) + """The geo-location where the resource lives. Required.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Connector(TrackedResource): + """A Connector is a tracked ARM resource modeled as a sub-resource of a Storage Account. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The properties of the Storage Connector. Required. + :vartype properties: ~azure.mgmt.storage.models.StorageConnectorProperties + """ + + properties: "_models.StorageConnectorProperties" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The properties of the Storage Connector. Required.""" + + @overload + def __init__( + self, + *, + location: str, + properties: "_models.StorageConnectorProperties", + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ContainerProperties(_Model): + """The properties of a container. + + :ivar version: The version of the deleted blob container. + :vartype version: str + :ivar deleted: Indicates whether the blob container was deleted. + :vartype deleted: bool + :ivar deleted_time: Blob container deletion time. + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: Remaining retention days for soft deleted blob container. + :vartype remaining_retention_days: int + :ivar default_encryption_scope: Default the container to use specified encryption scope for all + writes. + :vartype default_encryption_scope: str + :ivar deny_encryption_scope_override: Block override of encryption scope from the container + default. + :vartype deny_encryption_scope_override: bool + :ivar public_access: Specifies whether data in the container may be accessed publicly and the + level of access. Known values are: "Container", "Blob", and "None". + :vartype public_access: str or ~azure.mgmt.storage.models.PublicAccess + :ivar last_modified_time: Returns the date and time the container was last modified. + :vartype last_modified_time: ~datetime.datetime + :ivar lease_status: The lease status of the container. Known values are: "Locked" and + "Unlocked". + :vartype lease_status: str or ~azure.mgmt.storage.models.LeaseStatus + :ivar lease_state: Lease state of the container. Known values are: "Available", "Leased", + "Expired", "Breaking", and "Broken". + :vartype lease_state: str or ~azure.mgmt.storage.models.LeaseState + :ivar lease_duration: Specifies whether the lease on a container is of infinite or fixed + duration, only when the container is leased. Known values are: "Infinite" and "Fixed". + :vartype lease_duration: str or ~azure.mgmt.storage.models.LeaseDuration + :ivar metadata: A name-value pair to associate with the container as metadata. + :vartype metadata: dict[str, str] + :ivar immutability_policy: The ImmutabilityPolicy property of the container. + :vartype immutability_policy: ~azure.mgmt.storage.models.ImmutabilityPolicyProperties + :ivar legal_hold: The LegalHold property of the container. + :vartype legal_hold: ~azure.mgmt.storage.models.LegalHoldProperties + :ivar has_legal_hold: The hasLegalHold public property is set to true by SRP if there are at + least one existing tag. The hasLegalHold public property is set to false by SRP if all existing + legal hold tags are cleared out. There can be a maximum of 1000 blob containers with + hasLegalHold=true for a given account. + :vartype has_legal_hold: bool + :ivar has_immutability_policy: The hasImmutabilityPolicy public property is set to true by SRP + if ImmutabilityPolicy has been created for this container. The hasImmutabilityPolicy public + property is set to false by SRP if ImmutabilityPolicy has not been created for this container. + :vartype has_immutability_policy: bool + :ivar immutable_storage_with_versioning: The object level immutability property of the + container. The property is immutable and can only be set to true at the container creation + time. Existing containers must undergo a migration process. + :vartype immutable_storage_with_versioning: + ~azure.mgmt.storage.models.ImmutableStorageWithVersioning + :ivar enable_nfs_v3_root_squash: Enable NFSv3 root squash on blob container. + :vartype enable_nfs_v3_root_squash: bool + :ivar enable_nfs_v3_all_squash: Enable NFSv3 all squash on blob container. + :vartype enable_nfs_v3_all_squash: bool + """ + + version: Optional[str] = rest_field(visibility=["read"]) + """The version of the deleted blob container.""" + deleted: Optional[bool] = rest_field(visibility=["read"]) + """Indicates whether the blob container was deleted.""" + deleted_time: Optional[datetime.datetime] = rest_field(name="deletedTime", visibility=["read"], format="rfc3339") + """Blob container deletion time.""" + remaining_retention_days: Optional[int] = rest_field(name="remainingRetentionDays", visibility=["read"]) + """Remaining retention days for soft deleted blob container.""" + default_encryption_scope: Optional[str] = rest_field( + name="defaultEncryptionScope", visibility=["read", "create", "update", "delete", "query"] + ) + """Default the container to use specified encryption scope for all writes.""" + deny_encryption_scope_override: Optional[bool] = rest_field( + name="denyEncryptionScopeOverride", visibility=["read", "create", "update", "delete", "query"] + ) + """Block override of encryption scope from the container default.""" + public_access: Optional[Union[str, "_models.PublicAccess"]] = rest_field( + name="publicAccess", visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies whether data in the container may be accessed publicly and the level of access. Known + values are: \"Container\", \"Blob\", and \"None\".""" + last_modified_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedTime", visibility=["read"], format="rfc3339" + ) + """Returns the date and time the container was last modified.""" + lease_status: Optional[Union[str, "_models.LeaseStatus"]] = rest_field(name="leaseStatus", visibility=["read"]) + """The lease status of the container. Known values are: \"Locked\" and \"Unlocked\".""" + lease_state: Optional[Union[str, "_models.LeaseState"]] = rest_field(name="leaseState", visibility=["read"]) + """Lease state of the container. Known values are: \"Available\", \"Leased\", \"Expired\", + \"Breaking\", and \"Broken\".""" + lease_duration: Optional[Union[str, "_models.LeaseDuration"]] = rest_field( + name="leaseDuration", visibility=["read"] + ) + """Specifies whether the lease on a container is of infinite or fixed duration, only when the + container is leased. Known values are: \"Infinite\" and \"Fixed\".""" + metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A name-value pair to associate with the container as metadata.""" + immutability_policy: Optional["_models.ImmutabilityPolicyProperties"] = rest_field( + name="immutabilityPolicy", visibility=["read"] + ) + """The ImmutabilityPolicy property of the container.""" + legal_hold: Optional["_models.LegalHoldProperties"] = rest_field(name="legalHold", visibility=["read"]) + """The LegalHold property of the container.""" + has_legal_hold: Optional[bool] = rest_field(name="hasLegalHold", visibility=["read"]) + """The hasLegalHold public property is set to true by SRP if there are at least one existing tag. + The hasLegalHold public property is set to false by SRP if all existing legal hold tags are + cleared out. There can be a maximum of 1000 blob containers with hasLegalHold=true for a given + account.""" + has_immutability_policy: Optional[bool] = rest_field(name="hasImmutabilityPolicy", visibility=["read"]) + """The hasImmutabilityPolicy public property is set to true by SRP if ImmutabilityPolicy has been + created for this container. The hasImmutabilityPolicy public property is set to false by SRP if + ImmutabilityPolicy has not been created for this container.""" + immutable_storage_with_versioning: Optional["_models.ImmutableStorageWithVersioning"] = rest_field( + name="immutableStorageWithVersioning", visibility=["read", "create", "update", "delete", "query"] + ) + """The object level immutability property of the container. The property is immutable and can only + be set to true at the container creation time. Existing containers must undergo a migration + process.""" + enable_nfs_v3_root_squash: Optional[bool] = rest_field( + name="enableNfsV3RootSquash", visibility=["read", "create", "update", "delete", "query"] + ) + """Enable NFSv3 root squash on blob container.""" + enable_nfs_v3_all_squash: Optional[bool] = rest_field( + name="enableNfsV3AllSquash", visibility=["read", "create", "update", "delete", "query"] + ) + """Enable NFSv3 all squash on blob container.""" + + @overload + def __init__( + self, + *, + default_encryption_scope: Optional[str] = None, + deny_encryption_scope_override: Optional[bool] = None, + public_access: Optional[Union[str, "_models.PublicAccess"]] = None, + metadata: Optional[dict[str, str]] = None, + immutable_storage_with_versioning: Optional["_models.ImmutableStorageWithVersioning"] = None, + enable_nfs_v3_root_squash: Optional[bool] = None, + enable_nfs_v3_all_squash: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CorsRule(_Model): + """Specifies a CORS rule for the Blob service. + + :ivar allowed_origins: Required if CorsRule element is present. A list of origin domains that + will be allowed via CORS, or "*" to allow all domains. Required. + :vartype allowed_origins: list[str] + :ivar allowed_methods: Required if CorsRule element is present. A list of HTTP methods that are + allowed to be executed by the origin. Required. + :vartype allowed_methods: list[str or ~azure.mgmt.storage.models.AllowedMethods] + :ivar max_age_in_seconds: Required if CorsRule element is present. The number of seconds that + the client/browser should cache a preflight response. Required. + :vartype max_age_in_seconds: int + :ivar exposed_headers: Required if CorsRule element is present. A list of response headers to + expose to CORS clients. Required. + :vartype exposed_headers: list[str] + :ivar allowed_headers: Required if CorsRule element is present. A list of headers allowed to be + part of the cross-origin request. Required. + :vartype allowed_headers: list[str] + """ + + allowed_origins: list[str] = rest_field( + name="allowedOrigins", visibility=["read", "create", "update", "delete", "query"] + ) + """Required if CorsRule element is present. A list of origin domains that will be allowed via + CORS, or \"*\" to allow all domains. Required.""" + allowed_methods: list[Union[str, "_models.AllowedMethods"]] = rest_field( + name="allowedMethods", visibility=["read", "create", "update", "delete", "query"] + ) + """Required if CorsRule element is present. A list of HTTP methods that are allowed to be executed + by the origin. Required.""" + max_age_in_seconds: int = rest_field( + name="maxAgeInSeconds", visibility=["read", "create", "update", "delete", "query"] + ) + """Required if CorsRule element is present. The number of seconds that the client/browser should + cache a preflight response. Required.""" + exposed_headers: list[str] = rest_field( + name="exposedHeaders", visibility=["read", "create", "update", "delete", "query"] + ) + """Required if CorsRule element is present. A list of response headers to expose to CORS clients. + Required.""" + allowed_headers: list[str] = rest_field( + name="allowedHeaders", visibility=["read", "create", "update", "delete", "query"] + ) + """Required if CorsRule element is present. A list of headers allowed to be part of the + cross-origin request. Required.""" + + @overload + def __init__( + self, + *, + allowed_origins: list[str], + allowed_methods: list[Union[str, "_models.AllowedMethods"]], + max_age_in_seconds: int, + exposed_headers: list[str], + allowed_headers: list[str], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CorsRules(_Model): + """Sets the CORS rules. You can include up to five CorsRule elements in the request. + + :ivar cors_rules: The List of CORS rules. You can include up to five CorsRule elements in the + request. + :vartype cors_rules: list[~azure.mgmt.storage.models.CorsRule] + """ + + cors_rules: Optional[list["_models.CorsRule"]] = rest_field( + name="corsRules", visibility=["read", "create", "update", "delete", "query"] + ) + """The List of CORS rules. You can include up to five CorsRule elements in the request.""" + + @overload + def __init__( + self, + *, + cors_rules: Optional[list["_models.CorsRule"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CustomDomain(_Model): + """The custom domain assigned to this storage account. This can be set via Update. + + :ivar name: Gets or sets the custom domain name assigned to the storage account. Name is the + CNAME source. Required. + :vartype name: str + :ivar use_sub_domain_name: Indicates whether indirect CName validation is enabled. Default + value is false. This should only be set on updates. + :vartype use_sub_domain_name: bool + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Gets or sets the custom domain name assigned to the storage account. Name is the CNAME source. + Required.""" + use_sub_domain_name: Optional[bool] = rest_field( + name="useSubDomainName", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates whether indirect CName validation is enabled. Default value is false. This should + only be set on updates.""" + + @overload + def __init__( + self, + *, + name: str, + use_sub_domain_name: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataShare(TrackedResource): + """A DataShare is a tracked ARM resource modeled as a sub-resource of a Storage Account. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The properties of the Storage DataShare. Required. + :vartype properties: ~azure.mgmt.storage.models.StorageDataShareProperties + """ + + properties: "_models.StorageDataShareProperties" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The properties of the Storage DataShare. Required.""" + + @overload + def __init__( + self, + *, + location: str, + properties: "_models.StorageDataShareProperties", + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageConnectorConnection(_Model): + """The connection properties of the backing data source. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + DataShareConnection + + :ivar type: Type of the connection. Controls the type of the connection object. Not mutable + once the Storage Connector is created. Required. "DataShare" + :vartype type: str or ~azure.mgmt.storage.models.StorageConnectorConnectionType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of the connection. Controls the type of the connection object. Not mutable once the + Storage Connector is created. Required. \"DataShare\"""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataShareConnection(StorageConnectorConnection, discriminator="DataShare"): + """The connection details for Data Share source. + + :ivar type: The type for data share connection. Required. DataShare connection type. + :vartype type: str or ~azure.mgmt.storage.models.DATA_SHARE + :ivar data_share_uri: The URI of the backing DataShare. Must be in the format: + azds://::. Required. + :vartype data_share_uri: str + """ + + type: Literal[StorageConnectorConnectionType.DATA_SHARE] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The type for data share connection. Required. DataShare connection type.""" + data_share_uri: str = rest_field(name="dataShareUri", visibility=["read", "create", "update", "delete", "query"]) + """The URI of the backing DataShare. Must be in the format: + azds://::. Required.""" + + @overload + def __init__( + self, + *, + data_share_uri: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = StorageConnectorConnectionType.DATA_SHARE # type: ignore + + +class StorageConnectorSource(_Model): + """The storage connector backing data source information. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + DataShareSource + + :ivar type: Type of the Storage Connector. Not mutable once the Storage Connector is created.". + Required. "DataShare" + :vartype type: str or ~azure.mgmt.storage.models.StorageConnectorSourceType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of the Storage Connector. Not mutable once the Storage Connector is created.\". Required. + \"DataShare\"""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataShareSource(StorageConnectorSource, discriminator="DataShare"): + """The properties of data share source. + + :ivar type: The type for data share source. Required. Source type - DataShare. + :vartype type: str or ~azure.mgmt.storage.models.DATA_SHARE + :ivar connection: Details for how to connect to the backing data store. Required. + :vartype connection: ~azure.mgmt.storage.models.StorageConnectorConnection + :ivar auth_properties: Details for how to authenticate to the backing data store. Required. + :vartype auth_properties: ~azure.mgmt.storage.models.StorageConnectorAuthProperties + """ + + type: Literal[StorageConnectorSourceType.DATA_SHARE] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The type for data share source. Required. Source type - DataShare.""" + connection: "_models.StorageConnectorConnection" = rest_field(visibility=["read", "create"]) + """Details for how to connect to the backing data store. Required.""" + auth_properties: "_models.StorageConnectorAuthProperties" = rest_field( + name="authProperties", visibility=["read", "create", "update", "delete", "query"] + ) + """Details for how to authenticate to the backing data store. Required.""" + + @overload + def __init__( + self, + *, + connection: "_models.StorageConnectorConnection", + auth_properties: "_models.StorageConnectorAuthProperties", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = StorageConnectorSourceType.DATA_SHARE # type: ignore + + +class DateAfterCreation(_Model): + """Object to define snapshot and version action conditions. + + :ivar days_after_creation_greater_than: Value indicating the age in days after creation. + Required. + :vartype days_after_creation_greater_than: float + :ivar days_after_last_tier_change_greater_than: Value indicating the age in days after last + blob tier change time. This property is only applicable for tierToArchive actions and requires + daysAfterCreationGreaterThan to be set for snapshots and blob version based actions. The blob + will be archived if both the conditions are satisfied. + :vartype days_after_last_tier_change_greater_than: float + """ + + days_after_creation_greater_than: float = rest_field( + name="daysAfterCreationGreaterThan", visibility=["read", "create", "update", "delete", "query"] + ) + """Value indicating the age in days after creation. Required.""" + days_after_last_tier_change_greater_than: Optional[float] = rest_field( + name="daysAfterLastTierChangeGreaterThan", visibility=["read", "create", "update", "delete", "query"] + ) + """Value indicating the age in days after last blob tier change time. This property is only + applicable for tierToArchive actions and requires daysAfterCreationGreaterThan to be set for + snapshots and blob version based actions. The blob will be archived if both the conditions are + satisfied.""" + + @overload + def __init__( + self, + *, + days_after_creation_greater_than: float, + days_after_last_tier_change_greater_than: Optional[float] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DateAfterModification(_Model): + """Object to define the base blob action conditions. Properties daysAfterModificationGreaterThan, + daysAfterLastAccessTimeGreaterThan and daysAfterCreationGreaterThan are mutually exclusive. The + daysAfterLastTierChangeGreaterThan property is only applicable for tierToArchive actions which + requires daysAfterModificationGreaterThan to be set, also it cannot be used in conjunction with + daysAfterLastAccessTimeGreaterThan or daysAfterCreationGreaterThan. + + :ivar days_after_modification_greater_than: Value indicating the age in days after last + modification. + :vartype days_after_modification_greater_than: float + :ivar days_after_last_access_time_greater_than: Value indicating the age in days after last + blob access. This property can only be used in conjunction with last access time tracking + policy. + :vartype days_after_last_access_time_greater_than: float + :ivar days_after_last_tier_change_greater_than: Value indicating the age in days after last + blob tier change time. This property is only applicable for tierToArchive actions and requires + daysAfterModificationGreaterThan to be set for baseBlobs based actions. The blob will be + archived if both the conditions are satisfied. + :vartype days_after_last_tier_change_greater_than: float + :ivar days_after_creation_greater_than: Value indicating the age in days after blob creation. + :vartype days_after_creation_greater_than: float + """ + + days_after_modification_greater_than: Optional[float] = rest_field( + name="daysAfterModificationGreaterThan", visibility=["read", "create", "update", "delete", "query"] + ) + """Value indicating the age in days after last modification.""" + days_after_last_access_time_greater_than: Optional[float] = rest_field( + name="daysAfterLastAccessTimeGreaterThan", visibility=["read", "create", "update", "delete", "query"] + ) + """Value indicating the age in days after last blob access. This property can only be used in + conjunction with last access time tracking policy.""" + days_after_last_tier_change_greater_than: Optional[float] = rest_field( + name="daysAfterLastTierChangeGreaterThan", visibility=["read", "create", "update", "delete", "query"] + ) + """Value indicating the age in days after last blob tier change time. This property is only + applicable for tierToArchive actions and requires daysAfterModificationGreaterThan to be set + for baseBlobs based actions. The blob will be archived if both the conditions are satisfied.""" + days_after_creation_greater_than: Optional[float] = rest_field( + name="daysAfterCreationGreaterThan", visibility=["read", "create", "update", "delete", "query"] + ) + """Value indicating the age in days after blob creation.""" + + @overload + def __init__( + self, + *, + days_after_modification_greater_than: Optional[float] = None, + days_after_last_access_time_greater_than: Optional[float] = None, + days_after_last_tier_change_greater_than: Optional[float] = None, + days_after_creation_greater_than: Optional[float] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DeletedAccount(ProxyResource): + """Deleted storage account. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar properties: Properties of the deleted account. + :vartype properties: ~azure.mgmt.storage.models.DeletedAccountProperties + """ + + properties: Optional["_models.DeletedAccountProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Properties of the deleted account.""" + + __flattened_items = [ + "storage_account_resource_id", + "location", + "restore_reference", + "creation_time", + "deletion_time", + ] + + @overload + def __init__( + self, + *, + properties: Optional["_models.DeletedAccountProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.properties is None: + return None + return getattr(self.properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.properties is None: + self.properties = self._attr_to_rest_field["properties"]._class_type() + setattr(self.properties, key, value) + else: + super().__setattr__(key, value) + + +class DeletedAccountProperties(_Model): + """Attributes of a deleted storage account. + + :ivar storage_account_resource_id: Full resource id of the original storage account. + :vartype storage_account_resource_id: str + :ivar location: Location of the deleted account. + :vartype location: str + :ivar restore_reference: Can be used to attempt recovering this deleted account via + PutStorageAccount API. + :vartype restore_reference: str + :ivar creation_time: Creation time of the deleted account. + :vartype creation_time: str + :ivar deletion_time: Deletion time of the deleted account. + :vartype deletion_time: str + """ + + storage_account_resource_id: Optional[str] = rest_field(name="storageAccountResourceId", visibility=["read"]) + """Full resource id of the original storage account.""" + location: Optional[str] = rest_field(visibility=["read"]) + """Location of the deleted account.""" + restore_reference: Optional[str] = rest_field(name="restoreReference", visibility=["read"]) + """Can be used to attempt recovering this deleted account via PutStorageAccount API.""" + creation_time: Optional[str] = rest_field(name="creationTime", visibility=["read"]) + """Creation time of the deleted account.""" + deletion_time: Optional[str] = rest_field(name="deletionTime", visibility=["read"]) + """Deletion time of the deleted account.""" + + +class DeletedShare(_Model): + """The deleted share to be restored. + + :ivar deleted_share_name: Required. Identify the name of the deleted share that will be + restored. Required. + :vartype deleted_share_name: str + :ivar deleted_share_version: Required. Identify the version of the deleted share that will be + restored. Required. + :vartype deleted_share_version: str + """ + + deleted_share_name: str = rest_field( + name="deletedShareName", visibility=["read", "create", "update", "delete", "query"] + ) + """Required. Identify the name of the deleted share that will be restored. Required.""" + deleted_share_version: str = rest_field( + name="deletedShareVersion", visibility=["read", "create", "update", "delete", "query"] + ) + """Required. Identify the version of the deleted share that will be restored. Required.""" + + @overload + def __init__( + self, + *, + deleted_share_name: str, + deleted_share_version: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DeleteRetentionPolicy(_Model): + """The service properties for soft delete. + + :ivar enabled: Indicates whether DeleteRetentionPolicy is enabled. + :vartype enabled: bool + :ivar days: Indicates the number of days that the deleted item should be retained. The minimum + specified value can be 1 and the maximum value can be 365. + :vartype days: int + :ivar allow_permanent_delete: This property when set to true allows deletion of the soft + deleted blob versions and snapshots. This property cannot be used blob restore policy. This + property only applies to blob service and does not apply to containers or file share. + :vartype allow_permanent_delete: bool + """ + + enabled: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates whether DeleteRetentionPolicy is enabled.""" + days: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates the number of days that the deleted item should be retained. The minimum specified + value can be 1 and the maximum value can be 365.""" + allow_permanent_delete: Optional[bool] = rest_field( + name="allowPermanentDelete", visibility=["read", "create", "update", "delete", "query"] + ) + """This property when set to true allows deletion of the soft deleted blob versions and snapshots. + This property cannot be used blob restore policy. This property only applies to blob service + and does not apply to containers or file share.""" + + @overload + def __init__( + self, + *, + enabled: Optional[bool] = None, + days: Optional[int] = None, + allow_permanent_delete: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Dimension(_Model): + """Dimension of blobs, possibly be blob type or access tier. + + :ivar name: Display name of dimension. + :vartype name: str + :ivar display_name: Display name of dimension. + :vartype display_name: str + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Display name of dimension.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Display name of dimension.""" + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + display_name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DualStackEndpointPreference(_Model): + """Dual-stack endpoint preference defines whether IPv6 endpoints are going to be published. + + :ivar publish_ipv6_endpoint: A boolean flag which indicates whether IPv6 storage endpoints are + to be published. + :vartype publish_ipv6_endpoint: bool + """ + + publish_ipv6_endpoint: Optional[bool] = rest_field( + name="publishIpv6Endpoint", visibility=["read", "create", "update", "delete", "query"] + ) + """A boolean flag which indicates whether IPv6 storage endpoints are to be published.""" + + @overload + def __init__( + self, + *, + publish_ipv6_endpoint: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Encryption(_Model): + """The encryption settings on the storage account. + + :ivar services: List of services which support encryption. + :vartype services: ~azure.mgmt.storage.models.EncryptionServices + :ivar key_source: The encryption keySource (provider). Possible values (case-insensitive): + Microsoft.Storage, Microsoft.Keyvault. Known values are: "Microsoft.Storage" and + "Microsoft.Keyvault". + :vartype key_source: str or ~azure.mgmt.storage.models.KeySource + :ivar require_infrastructure_encryption: A boolean indicating whether or not the service + applies a secondary layer of encryption with platform managed keys for data at rest. + :vartype require_infrastructure_encryption: bool + :ivar key_vault_properties: Properties provided by key vault. + :vartype key_vault_properties: ~azure.mgmt.storage.models.KeyVaultProperties + :ivar encryption_identity: The identity to be used with service-side encryption at rest. + :vartype encryption_identity: ~azure.mgmt.storage.models.EncryptionIdentity + """ + + services: Optional["_models.EncryptionServices"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of services which support encryption.""" + key_source: Optional[Union[str, "_models.KeySource"]] = rest_field( + name="keySource", visibility=["read", "create", "update", "delete", "query"] + ) + """The encryption keySource (provider). Possible values (case-insensitive): Microsoft.Storage, + Microsoft.Keyvault. Known values are: \"Microsoft.Storage\" and \"Microsoft.Keyvault\".""" + require_infrastructure_encryption: Optional[bool] = rest_field( + name="requireInfrastructureEncryption", visibility=["read", "create", "update", "delete", "query"] + ) + """A boolean indicating whether or not the service applies a secondary layer of encryption with + platform managed keys for data at rest.""" + key_vault_properties: Optional["_models.KeyVaultProperties"] = rest_field( + name="keyvaultproperties", visibility=["read", "create", "update", "delete", "query"] + ) + """Properties provided by key vault.""" + encryption_identity: Optional["_models.EncryptionIdentity"] = rest_field( + name="identity", visibility=["read", "create", "update", "delete", "query"] + ) + """The identity to be used with service-side encryption at rest.""" + + @overload + def __init__( + self, + *, + services: Optional["_models.EncryptionServices"] = None, + key_source: Optional[Union[str, "_models.KeySource"]] = None, + require_infrastructure_encryption: Optional[bool] = None, + key_vault_properties: Optional["_models.KeyVaultProperties"] = None, + encryption_identity: Optional["_models.EncryptionIdentity"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EncryptionIdentity(_Model): + """Encryption identity for the storage account. + + :ivar encryption_user_assigned_identity: Resource identifier of the UserAssigned identity to be + associated with server-side encryption on the storage account. + :vartype encryption_user_assigned_identity: str + :ivar encryption_federated_identity_client_id: ClientId of the multi-tenant application to be + used in conjunction with the user-assigned identity for cross-tenant customer-managed-keys + server-side encryption on the storage account. + :vartype encryption_federated_identity_client_id: str + """ + + encryption_user_assigned_identity: Optional[str] = rest_field( + name="userAssignedIdentity", visibility=["read", "create", "update", "delete", "query"] + ) + """Resource identifier of the UserAssigned identity to be associated with server-side encryption + on the storage account.""" + encryption_federated_identity_client_id: Optional[str] = rest_field( + name="federatedIdentityClientId", visibility=["read", "create", "update", "delete", "query"] + ) + """ClientId of the multi-tenant application to be used in conjunction with the user-assigned + identity for cross-tenant customer-managed-keys server-side encryption on the storage account.""" + + @overload + def __init__( + self, + *, + encryption_user_assigned_identity: Optional[str] = None, + encryption_federated_identity_client_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EncryptionInTransit(_Model): + """Encryption in transit setting. + + :ivar required: Indicates whether encryption in transit is required. + :vartype required: bool + """ + + required: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates whether encryption in transit is required.""" + + @overload + def __init__( + self, + *, + required: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EncryptionScope(ProxyResource): + """The Encryption Scope resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar encryption_scope_properties: Properties of the encryption scope. + :vartype encryption_scope_properties: ~azure.mgmt.storage.models.EncryptionScopeProperties + """ + + encryption_scope_properties: Optional["_models.EncryptionScopeProperties"] = rest_field( + name="properties", visibility=["read", "create", "update", "delete", "query"] + ) + """Properties of the encryption scope.""" + + __flattened_items = [ + "source", + "state", + "creation_time", + "last_modified_time", + "key_vault_properties", + "require_infrastructure_encryption", + ] + + @overload + def __init__( + self, + *, + encryption_scope_properties: Optional["_models.EncryptionScopeProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.encryption_scope_properties is None: + return None + return getattr(self.encryption_scope_properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.encryption_scope_properties is None: + self.encryption_scope_properties = self._attr_to_rest_field["encryption_scope_properties"]._class_type() + setattr(self.encryption_scope_properties, key, value) + else: + super().__setattr__(key, value) + + +class EncryptionScopeKeyVaultProperties(_Model): + """The key vault properties for the encryption scope. This is a required field if encryption scope + 'source' attribute is set to 'Microsoft.KeyVault'. + + :ivar key_uri: The object identifier for a key vault key object. When applied, the encryption + scope will use the key referenced by the identifier to enable customer-managed key support on + this encryption scope. + :vartype key_uri: str + :ivar current_versioned_key_identifier: The object identifier of the current versioned Key + Vault Key in use. + :vartype current_versioned_key_identifier: str + :ivar last_key_rotation_timestamp: Timestamp of last rotation of the Key Vault Key. + :vartype last_key_rotation_timestamp: ~datetime.datetime + """ + + key_uri: Optional[str] = rest_field(name="keyUri", visibility=["read", "create", "update", "delete", "query"]) + """The object identifier for a key vault key object. When applied, the encryption scope will use + the key referenced by the identifier to enable customer-managed key support on this encryption + scope.""" + current_versioned_key_identifier: Optional[str] = rest_field( + name="currentVersionedKeyIdentifier", visibility=["read"] + ) + """The object identifier of the current versioned Key Vault Key in use.""" + last_key_rotation_timestamp: Optional[datetime.datetime] = rest_field( + name="lastKeyRotationTimestamp", visibility=["read"], format="rfc3339" + ) + """Timestamp of last rotation of the Key Vault Key.""" + + @overload + def __init__( + self, + *, + key_uri: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EncryptionScopeProperties(_Model): + """Properties of the encryption scope. + + :ivar source: The provider for the encryption scope. Possible values (case-insensitive): + Microsoft.Storage, Microsoft.KeyVault. Known values are: "Microsoft.Storage" and + "Microsoft.KeyVault". + :vartype source: str or ~azure.mgmt.storage.models.EncryptionScopeSource + :ivar state: The state of the encryption scope. Possible values (case-insensitive): Enabled, + Disabled. Known values are: "Enabled" and "Disabled". + :vartype state: str or ~azure.mgmt.storage.models.EncryptionScopeState + :ivar creation_time: Gets the creation date and time of the encryption scope in UTC. + :vartype creation_time: ~datetime.datetime + :ivar last_modified_time: Gets the last modification date and time of the encryption scope in + UTC. + :vartype last_modified_time: ~datetime.datetime + :ivar key_vault_properties: The key vault properties for the encryption scope. This is a + required field if encryption scope 'source' attribute is set to 'Microsoft.KeyVault'. + :vartype key_vault_properties: ~azure.mgmt.storage.models.EncryptionScopeKeyVaultProperties + :ivar require_infrastructure_encryption: A boolean indicating whether or not the service + applies a secondary layer of encryption with platform managed keys for data at rest. + :vartype require_infrastructure_encryption: bool + """ + + source: Optional[Union[str, "_models.EncryptionScopeSource"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The provider for the encryption scope. Possible values (case-insensitive): Microsoft.Storage, + Microsoft.KeyVault. Known values are: \"Microsoft.Storage\" and \"Microsoft.KeyVault\".""" + state: Optional[Union[str, "_models.EncryptionScopeState"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The state of the encryption scope. Possible values (case-insensitive): Enabled, Disabled. + Known values are: \"Enabled\" and \"Disabled\".""" + creation_time: Optional[datetime.datetime] = rest_field(name="creationTime", visibility=["read"], format="rfc3339") + """Gets the creation date and time of the encryption scope in UTC.""" + last_modified_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedTime", visibility=["read"], format="rfc3339" + ) + """Gets the last modification date and time of the encryption scope in UTC.""" + key_vault_properties: Optional["_models.EncryptionScopeKeyVaultProperties"] = rest_field( + name="keyVaultProperties", visibility=["read", "create", "update", "delete", "query"] + ) + """The key vault properties for the encryption scope. This is a required field if encryption scope + 'source' attribute is set to 'Microsoft.KeyVault'.""" + require_infrastructure_encryption: Optional[bool] = rest_field( + name="requireInfrastructureEncryption", visibility=["read", "create", "update", "delete", "query"] + ) + """A boolean indicating whether or not the service applies a secondary layer of encryption with + platform managed keys for data at rest.""" + + @overload + def __init__( + self, + *, + source: Optional[Union[str, "_models.EncryptionScopeSource"]] = None, + state: Optional[Union[str, "_models.EncryptionScopeState"]] = None, + key_vault_properties: Optional["_models.EncryptionScopeKeyVaultProperties"] = None, + require_infrastructure_encryption: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EncryptionService(_Model): + """A service that allows server-side encryption to be used. + + :ivar enabled: A boolean indicating whether or not the service encrypts the data as it is + stored. Encryption at rest is enabled by default today and cannot be disabled. + :vartype enabled: bool + :ivar last_enabled_time: Gets a rough estimate of the date/time when the encryption was last + enabled by the user. Data is encrypted at rest by default today and cannot be disabled. + :vartype last_enabled_time: ~datetime.datetime + :ivar key_type: Encryption key type to be used for the encryption service. 'Account' key type + implies that an account-scoped encryption key will be used. 'Service' key type implies that a + default service key is used. Known values are: "Service" and "Account". + :vartype key_type: str or ~azure.mgmt.storage.models.KeyType + """ + + enabled: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A boolean indicating whether or not the service encrypts the data as it is stored. Encryption + at rest is enabled by default today and cannot be disabled.""" + last_enabled_time: Optional[datetime.datetime] = rest_field( + name="lastEnabledTime", visibility=["read"], format="rfc3339" + ) + """Gets a rough estimate of the date/time when the encryption was last enabled by the user. Data + is encrypted at rest by default today and cannot be disabled.""" + key_type: Optional[Union[str, "_models.KeyType"]] = rest_field(name="keyType", visibility=["read", "create"]) + """Encryption key type to be used for the encryption service. 'Account' key type implies that an + account-scoped encryption key will be used. 'Service' key type implies that a default service + key is used. Known values are: \"Service\" and \"Account\".""" + + @overload + def __init__( + self, + *, + enabled: Optional[bool] = None, + key_type: Optional[Union[str, "_models.KeyType"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EncryptionServices(_Model): + """A list of services that support encryption. + + :ivar blob: The encryption function of the blob storage service. + :vartype blob: ~azure.mgmt.storage.models.EncryptionService + :ivar file: The encryption function of the file storage service. + :vartype file: ~azure.mgmt.storage.models.EncryptionService + :ivar table: The encryption function of the table storage service. + :vartype table: ~azure.mgmt.storage.models.EncryptionService + :ivar queue: The encryption function of the queue storage service. + :vartype queue: ~azure.mgmt.storage.models.EncryptionService + """ + + blob: Optional["_models.EncryptionService"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The encryption function of the blob storage service.""" + file: Optional["_models.EncryptionService"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The encryption function of the file storage service.""" + table: Optional["_models.EncryptionService"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The encryption function of the table storage service.""" + queue: Optional["_models.EncryptionService"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The encryption function of the queue storage service.""" + + @overload + def __init__( + self, + *, + blob: Optional["_models.EncryptionService"] = None, + file: Optional["_models.EncryptionService"] = None, + table: Optional["_models.EncryptionService"] = None, + queue: Optional["_models.EncryptionService"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Endpoints(_Model): + """The URIs that are used to perform a retrieval of a public blob, queue, table, web or dfs + object. + + :ivar blob: Gets the blob endpoint. + :vartype blob: str + :ivar queue: Gets the queue endpoint. + :vartype queue: str + :ivar table: Gets the table endpoint. + :vartype table: str + :ivar file: Gets the file endpoint. + :vartype file: str + :ivar web: Gets the web endpoint. + :vartype web: str + :ivar dfs: Gets the dfs endpoint. + :vartype dfs: str + :ivar microsoft_endpoints: Gets the microsoft routing storage endpoints. + :vartype microsoft_endpoints: ~azure.mgmt.storage.models.StorageAccountMicrosoftEndpoints + :ivar internet_endpoints: Gets the internet routing storage endpoints. + :vartype internet_endpoints: ~azure.mgmt.storage.models.StorageAccountInternetEndpoints + :ivar ipv6_endpoints: Gets the IPv6 storage endpoints. + :vartype ipv6_endpoints: ~azure.mgmt.storage.models.StorageAccountIpv6Endpoints + """ + + blob: Optional[str] = rest_field(visibility=["read"]) + """Gets the blob endpoint.""" + queue: Optional[str] = rest_field(visibility=["read"]) + """Gets the queue endpoint.""" + table: Optional[str] = rest_field(visibility=["read"]) + """Gets the table endpoint.""" + file: Optional[str] = rest_field(visibility=["read"]) + """Gets the file endpoint.""" + web: Optional[str] = rest_field(visibility=["read"]) + """Gets the web endpoint.""" + dfs: Optional[str] = rest_field(visibility=["read"]) + """Gets the dfs endpoint.""" + microsoft_endpoints: Optional["_models.StorageAccountMicrosoftEndpoints"] = rest_field( + name="microsoftEndpoints", visibility=["read", "create", "update", "delete", "query"] + ) + """Gets the microsoft routing storage endpoints.""" + internet_endpoints: Optional["_models.StorageAccountInternetEndpoints"] = rest_field( + name="internetEndpoints", visibility=["read", "create", "update", "delete", "query"] + ) + """Gets the internet routing storage endpoints.""" + ipv6_endpoints: Optional["_models.StorageAccountIpv6Endpoints"] = rest_field( + name="ipv6Endpoints", visibility=["read", "create", "update", "delete", "query"] + ) + """Gets the IPv6 storage endpoints.""" + + @overload + def __init__( + self, + *, + microsoft_endpoints: Optional["_models.StorageAccountMicrosoftEndpoints"] = None, + internet_endpoints: Optional["_models.StorageAccountInternetEndpoints"] = None, + ipv6_endpoints: Optional["_models.StorageAccountIpv6Endpoints"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ErrorAdditionalInfo(_Model): + """The resource management error additional info. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: any + """ + + type: Optional[str] = rest_field(visibility=["read"]) + """The additional info type.""" + info: Optional[Any] = rest_field(visibility=["read"]) + """The additional info.""" + + +class ErrorDetail(_Model): + """The error detail. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~azure.mgmt.storage.models.ErrorDetail] + :ivar additional_info: The error additional info. + :vartype additional_info: list[~azure.mgmt.storage.models.ErrorAdditionalInfo] + """ + + code: Optional[str] = rest_field(visibility=["read"]) + """The error code.""" + message: Optional[str] = rest_field(visibility=["read"]) + """The error message.""" + target: Optional[str] = rest_field(visibility=["read"]) + """The error target.""" + details: Optional[list["_models.ErrorDetail"]] = rest_field(visibility=["read"]) + """The error details.""" + additional_info: Optional[list["_models.ErrorAdditionalInfo"]] = rest_field( + name="additionalInfo", visibility=["read"] + ) + """The error additional info.""" + + +class ErrorResponse(_Model): + """An error response from the storage resource provider. + + :ivar error: Azure Storage Resource Provider error response body. + :vartype error: ~azure.mgmt.storage.models.ErrorResponseBody + """ + + error: Optional["_models.ErrorResponseBody"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Azure Storage Resource Provider error response body.""" + + @overload + def __init__( + self, + *, + error: Optional["_models.ErrorResponseBody"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ErrorResponseAutoGenerated(_Model): + """Error response. + + :ivar error: The error object. + :vartype error: ~azure.mgmt.storage.models.ErrorDetail + """ + + error: Optional["_models.ErrorDetail"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error object.""" + + @overload + def __init__( + self, + *, + error: Optional["_models.ErrorDetail"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ErrorResponseBody(_Model): + """Error response body contract. + + :ivar code: An identifier for the error. Codes are invariant and are intended to be consumed + programmatically. + :vartype code: str + :ivar message: A message describing the error, intended to be suitable for display in a user + interface. + :vartype message: str + """ + + code: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An identifier for the error. Codes are invariant and are intended to be consumed + programmatically.""" + message: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A message describing the error, intended to be suitable for display in a user interface.""" + + @overload + def __init__( + self, + *, + code: Optional[str] = None, + message: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ExecutionTarget(_Model): + """Target helps provide filter parameters for the objects in the storage account and forms the + execution context for the storage task. + + :ivar prefix: Required list of object prefixes to be included for task execution. + :vartype prefix: list[str] + :ivar exclude_prefix: List of object prefixes to be excluded from task execution. If there is a + conflict between include and exclude prefixes, the exclude prefix will be the determining + factor. + :vartype exclude_prefix: list[str] + """ + + prefix: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required list of object prefixes to be included for task execution.""" + exclude_prefix: Optional[list[str]] = rest_field( + name="excludePrefix", visibility=["read", "create", "update", "delete", "query"] + ) + """List of object prefixes to be excluded from task execution. If there is a conflict between + include and exclude prefixes, the exclude prefix will be the determining factor.""" + + @overload + def __init__( + self, + *, + prefix: Optional[list[str]] = None, + exclude_prefix: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ExecutionTrigger(_Model): + """Execution trigger for storage task assignment. + + :ivar type: The trigger type of the storage task assignment execution. Required. Known values + are: "RunOnce", "OnSchedule", and "MockRun". + :vartype type: str or ~azure.mgmt.storage.models.TriggerType + :ivar parameters: The trigger parameters of the storage task assignment execution. Required. + :vartype parameters: ~azure.mgmt.storage.models.TriggerParameters + """ + + type: Union[str, "_models.TriggerType"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The trigger type of the storage task assignment execution. Required. Known values are: + \"RunOnce\", \"OnSchedule\", and \"MockRun\".""" + parameters: "_models.TriggerParameters" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The trigger parameters of the storage task assignment execution. Required.""" + + @overload + def __init__( + self, + *, + type: Union[str, "_models.TriggerType"], + parameters: "_models.TriggerParameters", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ExecutionTriggerUpdate(_Model): + """Execution trigger update for storage task assignment. + + :ivar type: The trigger type of the storage task assignment execution. Known values are: + "RunOnce", "OnSchedule", and "MockRun". + :vartype type: str or ~azure.mgmt.storage.models.TriggerType + :ivar parameters: The trigger parameters of the storage task assignment execution. + :vartype parameters: ~azure.mgmt.storage.models.TriggerParametersUpdate + """ + + type: Optional[Union[str, "_models.TriggerType"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The trigger type of the storage task assignment execution. Known values are: \"RunOnce\", + \"OnSchedule\", and \"MockRun\".""" + parameters: Optional["_models.TriggerParametersUpdate"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The trigger parameters of the storage task assignment execution.""" + + @overload + def __init__( + self, + *, + type: Optional[Union[str, "_models.TriggerType"]] = None, + parameters: Optional["_models.TriggerParametersUpdate"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ExtendedLocation(_Model): + """The complex type of the extended location. + + :ivar name: The name of the extended location. + :vartype name: str + :ivar type: The type of the extended location. "EdgeZone" + :vartype type: str or ~azure.mgmt.storage.models.ExtendedLocationTypes + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the extended location.""" + type: Optional[Union[str, "_models.ExtendedLocationTypes"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The type of the extended location. \"EdgeZone\"""" + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + type: Optional[Union[str, "_models.ExtendedLocationTypes"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileServiceItems(_Model): + """FileServiceItems. + + :ivar value: List of file services returned. + :vartype value: list[~azure.mgmt.storage.models.FileServiceProperties] + """ + + value: Optional[list["_models.FileServiceProperties"]] = rest_field(visibility=["read"]) + """List of file services returned.""" + + +class FileServiceProperties(ProxyResource): + """The properties of File services in storage account. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar file_service_properties: The properties of File services in storage account. + :vartype file_service_properties: ~azure.mgmt.storage.models.FileServicePropertiesProperties + :ivar sku: Sku name and tier. + :vartype sku: ~azure.mgmt.storage.models.Sku + """ + + file_service_properties: Optional["_models.FileServicePropertiesProperties"] = rest_field( + name="properties", visibility=["read", "create", "update", "delete", "query"] + ) + """The properties of File services in storage account.""" + sku: Optional["_models.Sku"] = rest_field(visibility=["read"]) + """Sku name and tier.""" + + __flattened_items = ["cors", "share_delete_retention_policy", "protocol_settings"] + + @overload + def __init__( + self, + *, + file_service_properties: Optional["_models.FileServicePropertiesProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.file_service_properties is None: + return None + return getattr(self.file_service_properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.file_service_properties is None: + self.file_service_properties = self._attr_to_rest_field["file_service_properties"]._class_type() + setattr(self.file_service_properties, key, value) + else: + super().__setattr__(key, value) + + +class FileServicePropertiesProperties(_Model): + """The properties of File services in storage account. + + :ivar cors: Specifies CORS rules for the File service. You can include up to five CorsRule + elements in the request. If no CorsRule elements are included in the request body, all CORS + rules will be deleted, and CORS will be disabled for the File service. + :vartype cors: ~azure.mgmt.storage.models.CorsRules + :ivar share_delete_retention_policy: The file service properties for share soft delete. + :vartype share_delete_retention_policy: ~azure.mgmt.storage.models.DeleteRetentionPolicy + :ivar protocol_settings: Protocol settings for file service. + :vartype protocol_settings: ~azure.mgmt.storage.models.ProtocolSettings + """ + + cors: Optional["_models.CorsRules"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Specifies CORS rules for the File service. You can include up to five CorsRule elements in the + request. If no CorsRule elements are included in the request body, all CORS rules will be + deleted, and CORS will be disabled for the File service.""" + share_delete_retention_policy: Optional["_models.DeleteRetentionPolicy"] = rest_field( + name="shareDeleteRetentionPolicy", visibility=["read", "create", "update", "delete", "query"] + ) + """The file service properties for share soft delete.""" + protocol_settings: Optional["_models.ProtocolSettings"] = rest_field( + name="protocolSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Protocol settings for file service.""" + + @overload + def __init__( + self, + *, + cors: Optional["_models.CorsRules"] = None, + share_delete_retention_policy: Optional["_models.DeleteRetentionPolicy"] = None, + protocol_settings: Optional["_models.ProtocolSettings"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileServiceUsage(ProxyResource): + """The usage of file service in storage account. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar properties: File service usage in storage account including account limits, file share + limits and constants used in recommendations and bursting formula. + :vartype properties: ~azure.mgmt.storage.models.FileServiceUsageProperties + """ + + properties: Optional["_models.FileServiceUsageProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """File service usage in storage account including account limits, file share limits and constants + used in recommendations and bursting formula.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.FileServiceUsageProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileServiceUsageProperties(_Model): + """File service usage in storage account including account limits, file share limits and constants + used in recommendations and bursting formula. + + :ivar storage_account_limits: Maximum provisioned storage, IOPS, bandwidth and number of file + shares limits for the storage account. + :vartype storage_account_limits: ~azure.mgmt.storage.models.AccountLimits + :ivar file_share_limits: Minimum and maximum provisioned storage, IOPS and bandwidth limits for + a file share in the storage account. + :vartype file_share_limits: ~azure.mgmt.storage.models.FileShareLimits + :ivar file_share_recommendations: Constants used for calculating recommended provisioned IOPS + and bandwidth for a file share in the storage account. + :vartype file_share_recommendations: ~azure.mgmt.storage.models.FileShareRecommendations + :ivar bursting_constants: Constants used for calculating included burst IOPS and maximum burst + credits for IOPS for a file share in the storage account. + :vartype bursting_constants: ~azure.mgmt.storage.models.BurstingConstants + :ivar storage_account_usage: Usage of provisioned storage, IOPS, bandwidth and number of file + shares across all live shares and soft-deleted shares in the account. + :vartype storage_account_usage: ~azure.mgmt.storage.models.AccountUsage + """ + + storage_account_limits: Optional["_models.AccountLimits"] = rest_field( + name="storageAccountLimits", visibility=["read"] + ) + """Maximum provisioned storage, IOPS, bandwidth and number of file shares limits for the storage + account.""" + file_share_limits: Optional["_models.FileShareLimits"] = rest_field(name="fileShareLimits", visibility=["read"]) + """Minimum and maximum provisioned storage, IOPS and bandwidth limits for a file share in the + storage account.""" + file_share_recommendations: Optional["_models.FileShareRecommendations"] = rest_field( + name="fileShareRecommendations", visibility=["read"] + ) + """Constants used for calculating recommended provisioned IOPS and bandwidth for a file share in + the storage account.""" + bursting_constants: Optional["_models.BurstingConstants"] = rest_field( + name="burstingConstants", visibility=["read"] + ) + """Constants used for calculating included burst IOPS and maximum burst credits for IOPS for a + file share in the storage account.""" + storage_account_usage: Optional["_models.AccountUsage"] = rest_field( + name="storageAccountUsage", visibility=["read"] + ) + """Usage of provisioned storage, IOPS, bandwidth and number of file shares across all live shares + and soft-deleted shares in the account.""" + + +class FileShare(ProxyResource): + """Properties of the file share, including Id, resource name, resource type, Etag. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar file_share_properties: Properties of the file share. + :vartype file_share_properties: ~azure.mgmt.storage.models.FileShareProperties + :ivar etag: Resource Etag. + :vartype etag: str + """ + + file_share_properties: Optional["_models.FileShareProperties"] = rest_field( + name="properties", visibility=["read", "create", "update", "delete", "query"] + ) + """Properties of the file share.""" + etag: Optional[str] = rest_field(visibility=["read"]) + """Resource Etag.""" + + __flattened_items = [ + "last_modified_time", + "metadata", + "share_quota", + "provisioned_iops", + "provisioned_bandwidth_mibps", + "included_burst_iops", + "max_burst_credits_for_iops", + "next_allowed_quota_downgrade_time", + "next_allowed_provisioned_iops_downgrade_time", + "next_allowed_provisioned_bandwidth_downgrade_time", + "enabled_protocols", + "root_squash", + "version", + "deleted", + "deleted_time", + "remaining_retention_days", + "access_tier", + "access_tier_change_time", + "access_tier_status", + "share_usage_bytes", + "lease_status", + "lease_state", + "lease_duration", + "signed_identifiers", + "snapshot_time", + "file_share_paid_bursting", + ] + + @overload + def __init__( + self, + *, + file_share_properties: Optional["_models.FileShareProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.file_share_properties is None: + return None + return getattr(self.file_share_properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.file_share_properties is None: + self.file_share_properties = self._attr_to_rest_field["file_share_properties"]._class_type() + setattr(self.file_share_properties, key, value) + else: + super().__setattr__(key, value) + + +class FileShareItem(AzureEntityResource): + """The file share properties be listed out. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar etag: Resource Etag. + :vartype etag: str + :ivar properties: The file share properties be listed out. + :vartype properties: ~azure.mgmt.storage.models.FileShareProperties + """ + + properties: Optional["_models.FileShareProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The file share properties be listed out.""" + + __flattened_items = [ + "last_modified_time", + "metadata", + "share_quota", + "provisioned_iops", + "provisioned_bandwidth_mibps", + "included_burst_iops", + "max_burst_credits_for_iops", + "next_allowed_quota_downgrade_time", + "next_allowed_provisioned_iops_downgrade_time", + "next_allowed_provisioned_bandwidth_downgrade_time", + "enabled_protocols", + "root_squash", + "version", + "deleted", + "deleted_time", + "remaining_retention_days", + "access_tier", + "access_tier_change_time", + "access_tier_status", + "share_usage_bytes", + "lease_status", + "lease_state", + "lease_duration", + "signed_identifiers", + "snapshot_time", + "file_share_paid_bursting", + ] + + @overload + def __init__( + self, + *, + properties: Optional["_models.FileShareProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.properties is None: + return None + return getattr(self.properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.properties is None: + self.properties = self._attr_to_rest_field["properties"]._class_type() + setattr(self.properties, key, value) + else: + super().__setattr__(key, value) + + +class FileShareLimits(_Model): + """Minimum and maximum provisioned storage, IOPS and bandwidth limits for a file share in the + storage account. + + :ivar min_provisioned_storage_gi_b: The minimum provisioned storage quota limit in gibibytes + for a file share in the storage account. + :vartype min_provisioned_storage_gi_b: int + :ivar max_provisioned_storage_gi_b: The maximum provisioned storage quota limit in gibibytes + for a file share in the storage account. + :vartype max_provisioned_storage_gi_b: int + :ivar min_provisioned_iops: The minimum provisioned IOPS limit for a file share in the storage + account. + :vartype min_provisioned_iops: int + :ivar max_provisioned_iops: The maximum provisioned IOPS limit for a file share in the storage + account. + :vartype max_provisioned_iops: int + :ivar min_provisioned_bandwidth_mi_b_per_sec: The minimum provisioned bandwidth limit in + mebibytes per second for a file share in the storage account. + :vartype min_provisioned_bandwidth_mi_b_per_sec: int + :ivar max_provisioned_bandwidth_mi_b_per_sec: The maximum provisioned bandwidth limit in + mebibytes per second for a file share in the storage account. + :vartype max_provisioned_bandwidth_mi_b_per_sec: int + """ + + min_provisioned_storage_gi_b: Optional[int] = rest_field(name="minProvisionedStorageGiB", visibility=["read"]) + """The minimum provisioned storage quota limit in gibibytes for a file share in the storage + account.""" + max_provisioned_storage_gi_b: Optional[int] = rest_field(name="maxProvisionedStorageGiB", visibility=["read"]) + """The maximum provisioned storage quota limit in gibibytes for a file share in the storage + account.""" + min_provisioned_iops: Optional[int] = rest_field(name="minProvisionedIOPS", visibility=["read"]) + """The minimum provisioned IOPS limit for a file share in the storage account.""" + max_provisioned_iops: Optional[int] = rest_field(name="maxProvisionedIOPS", visibility=["read"]) + """The maximum provisioned IOPS limit for a file share in the storage account.""" + min_provisioned_bandwidth_mi_b_per_sec: Optional[int] = rest_field( + name="minProvisionedBandwidthMiBPerSec", visibility=["read"] + ) + """The minimum provisioned bandwidth limit in mebibytes per second for a file share in the storage + account.""" + max_provisioned_bandwidth_mi_b_per_sec: Optional[int] = rest_field( + name="maxProvisionedBandwidthMiBPerSec", visibility=["read"] + ) + """The maximum provisioned bandwidth limit in mebibytes per second for a file share in the storage + account.""" + + +class FileShareProperties(_Model): + """The properties of the file share. + + :ivar last_modified_time: Returns the date and time the share was last modified. + :vartype last_modified_time: ~datetime.datetime + :ivar metadata: A name-value pair to associate with the share as metadata. + :vartype metadata: dict[str, str] + :ivar share_quota: The provisioned size of the share, in gibibytes. Must be greater than 0, and + less than or equal to 5TB (5120). For Large File Shares, the maximum size is 102400. For file + shares created under Files Provisioned v2 account type, please refer to the GetFileServiceUsage + API response for the minimum and maximum allowed provisioned storage size. + :vartype share_quota: int + :ivar provisioned_iops: The provisioned IOPS of the share. This property is only for file + shares created under Files Provisioned v2 account type. Please refer to the GetFileServiceUsage + API response for the minimum and maximum allowed value for provisioned IOPS. + :vartype provisioned_iops: int + :ivar provisioned_bandwidth_mibps: The provisioned bandwidth of the share, in mebibytes per + second. This property is only for file shares created under Files Provisioned v2 account type. + Please refer to the GetFileServiceUsage API response for the minimum and maximum allowed value + for provisioned bandwidth. + :vartype provisioned_bandwidth_mibps: int + :ivar included_burst_iops: The calculated burst IOPS of the share. This property is only for + file shares created under Files Provisioned v2 account type. + :vartype included_burst_iops: int + :ivar max_burst_credits_for_iops: The calculated maximum burst credits for the share. This + property is only for file shares created under Files Provisioned v2 account type. + :vartype max_burst_credits_for_iops: int + :ivar next_allowed_quota_downgrade_time: Returns the next allowed provisioned storage size + downgrade time for the share. This property is only for file shares created under Files + Provisioned v1 SSD and Files Provisioned v2 account type. + :vartype next_allowed_quota_downgrade_time: ~datetime.datetime + :ivar next_allowed_provisioned_iops_downgrade_time: Returns the next allowed provisioned IOPS + downgrade time for the share. This property is only for file shares created under Files + Provisioned v2 account type. + :vartype next_allowed_provisioned_iops_downgrade_time: ~datetime.datetime + :ivar next_allowed_provisioned_bandwidth_downgrade_time: Returns the next allowed provisioned + bandwidth downgrade time for the share. This property is only for file shares created under + Files Provisioned v2 account type. + :vartype next_allowed_provisioned_bandwidth_downgrade_time: ~datetime.datetime + :ivar enabled_protocols: The authentication protocol that is used for the file share. Can only + be specified when creating a share. Known values are: "SMB" and "NFS". + :vartype enabled_protocols: str or ~azure.mgmt.storage.models.EnabledProtocols + :ivar root_squash: The property is for NFS share only. The default is NoRootSquash. Known + values are: "NoRootSquash", "RootSquash", and "AllSquash". + :vartype root_squash: str or ~azure.mgmt.storage.models.RootSquashType + :ivar version: The version of the share. + :vartype version: str + :ivar deleted: Indicates whether the share was deleted. + :vartype deleted: bool + :ivar deleted_time: The deleted time if the share was deleted. + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: Remaining retention days for share that was soft deleted. + :vartype remaining_retention_days: int + :ivar access_tier: Access tier for specific share. GpV2 account can choose between + TransactionOptimized (default), Hot, and Cool. FileStorage account can choose Premium. Known + values are: "TransactionOptimized", "Hot", "Cool", and "Premium". + :vartype access_tier: str or ~azure.mgmt.storage.models.ShareAccessTier + :ivar access_tier_change_time: Indicates the last modification time for share access tier. + :vartype access_tier_change_time: ~datetime.datetime + :ivar access_tier_status: Indicates if there is a pending transition for access tier. + :vartype access_tier_status: str + :ivar share_usage_bytes: The approximate size of the data stored on the share. Note that this + value may not include all recently created or recently resized files. + :vartype share_usage_bytes: int + :ivar lease_status: The lease status of the share. Known values are: "Locked" and "Unlocked". + :vartype lease_status: str or ~azure.mgmt.storage.models.LeaseStatus + :ivar lease_state: Lease state of the share. Known values are: "Available", "Leased", + "Expired", "Breaking", and "Broken". + :vartype lease_state: str or ~azure.mgmt.storage.models.LeaseState + :ivar lease_duration: Specifies whether the lease on a share is of infinite or fixed duration, + only when the share is leased. Known values are: "Infinite" and "Fixed". + :vartype lease_duration: str or ~azure.mgmt.storage.models.LeaseDuration + :ivar signed_identifiers: List of stored access policies specified on the share. + :vartype signed_identifiers: list[~azure.mgmt.storage.models.SignedIdentifier] + :ivar snapshot_time: Creation time of share snapshot returned in the response of list shares + with expand param "snapshots". + :vartype snapshot_time: ~datetime.datetime + :ivar file_share_paid_bursting: File Share Paid Bursting properties. + :vartype file_share_paid_bursting: + ~azure.mgmt.storage.models.FileSharePropertiesFileSharePaidBursting + """ + + last_modified_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedTime", visibility=["read"], format="rfc3339" + ) + """Returns the date and time the share was last modified.""" + metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A name-value pair to associate with the share as metadata.""" + share_quota: Optional[int] = rest_field( + name="shareQuota", visibility=["read", "create", "update", "delete", "query"] + ) + """The provisioned size of the share, in gibibytes. Must be greater than 0, and less than or equal + to 5TB (5120). For Large File Shares, the maximum size is 102400. For file shares created under + Files Provisioned v2 account type, please refer to the GetFileServiceUsage API response for the + minimum and maximum allowed provisioned storage size.""" + provisioned_iops: Optional[int] = rest_field( + name="provisionedIops", visibility=["read", "create", "update", "delete", "query"] + ) + """The provisioned IOPS of the share. This property is only for file shares created under Files + Provisioned v2 account type. Please refer to the GetFileServiceUsage API response for the + minimum and maximum allowed value for provisioned IOPS.""" + provisioned_bandwidth_mibps: Optional[int] = rest_field( + name="provisionedBandwidthMibps", visibility=["read", "create", "update", "delete", "query"] + ) + """The provisioned bandwidth of the share, in mebibytes per second. This property is only for file + shares created under Files Provisioned v2 account type. Please refer to the GetFileServiceUsage + API response for the minimum and maximum allowed value for provisioned bandwidth.""" + included_burst_iops: Optional[int] = rest_field(name="includedBurstIops", visibility=["read"]) + """The calculated burst IOPS of the share. This property is only for file shares created under + Files Provisioned v2 account type.""" + max_burst_credits_for_iops: Optional[int] = rest_field(name="maxBurstCreditsForIops", visibility=["read"]) + """The calculated maximum burst credits for the share. This property is only for file shares + created under Files Provisioned v2 account type.""" + next_allowed_quota_downgrade_time: Optional[datetime.datetime] = rest_field( + name="nextAllowedQuotaDowngradeTime", visibility=["read"], format="rfc7231" + ) + """Returns the next allowed provisioned storage size downgrade time for the share. This property + is only for file shares created under Files Provisioned v1 SSD and Files Provisioned v2 account + type.""" + next_allowed_provisioned_iops_downgrade_time: Optional[datetime.datetime] = rest_field( + name="nextAllowedProvisionedIopsDowngradeTime", visibility=["read"], format="rfc7231" + ) + """Returns the next allowed provisioned IOPS downgrade time for the share. This property is only + for file shares created under Files Provisioned v2 account type.""" + next_allowed_provisioned_bandwidth_downgrade_time: Optional[datetime.datetime] = rest_field( + name="nextAllowedProvisionedBandwidthDowngradeTime", visibility=["read"], format="rfc7231" + ) + """Returns the next allowed provisioned bandwidth downgrade time for the share. This property is + only for file shares created under Files Provisioned v2 account type.""" + enabled_protocols: Optional[Union[str, "_models.EnabledProtocols"]] = rest_field( + name="enabledProtocols", visibility=["read", "create"] + ) + """The authentication protocol that is used for the file share. Can only be specified when + creating a share. Known values are: \"SMB\" and \"NFS\".""" + root_squash: Optional[Union[str, "_models.RootSquashType"]] = rest_field( + name="rootSquash", visibility=["read", "create", "update", "delete", "query"] + ) + """The property is for NFS share only. The default is NoRootSquash. Known values are: + \"NoRootSquash\", \"RootSquash\", and \"AllSquash\".""" + version: Optional[str] = rest_field(visibility=["read"]) + """The version of the share.""" + deleted: Optional[bool] = rest_field(visibility=["read"]) + """Indicates whether the share was deleted.""" + deleted_time: Optional[datetime.datetime] = rest_field(name="deletedTime", visibility=["read"], format="rfc3339") + """The deleted time if the share was deleted.""" + remaining_retention_days: Optional[int] = rest_field(name="remainingRetentionDays", visibility=["read"]) + """Remaining retention days for share that was soft deleted.""" + access_tier: Optional[Union[str, "_models.ShareAccessTier"]] = rest_field( + name="accessTier", visibility=["read", "create", "update", "delete", "query"] + ) + """Access tier for specific share. GpV2 account can choose between TransactionOptimized (default), + Hot, and Cool. FileStorage account can choose Premium. Known values are: + \"TransactionOptimized\", \"Hot\", \"Cool\", and \"Premium\".""" + access_tier_change_time: Optional[datetime.datetime] = rest_field( + name="accessTierChangeTime", visibility=["read"], format="rfc3339" + ) + """Indicates the last modification time for share access tier.""" + access_tier_status: Optional[str] = rest_field(name="accessTierStatus", visibility=["read"]) + """Indicates if there is a pending transition for access tier.""" + share_usage_bytes: Optional[int] = rest_field(name="shareUsageBytes", visibility=["read"]) + """The approximate size of the data stored on the share. Note that this value may not include all + recently created or recently resized files.""" + lease_status: Optional[Union[str, "_models.LeaseStatus"]] = rest_field(name="leaseStatus", visibility=["read"]) + """The lease status of the share. Known values are: \"Locked\" and \"Unlocked\".""" + lease_state: Optional[Union[str, "_models.LeaseState"]] = rest_field(name="leaseState", visibility=["read"]) + """Lease state of the share. Known values are: \"Available\", \"Leased\", \"Expired\", + \"Breaking\", and \"Broken\".""" + lease_duration: Optional[Union[str, "_models.LeaseDuration"]] = rest_field( + name="leaseDuration", visibility=["read"] + ) + """Specifies whether the lease on a share is of infinite or fixed duration, only when the share is + leased. Known values are: \"Infinite\" and \"Fixed\".""" + signed_identifiers: Optional[list["_models.SignedIdentifier"]] = rest_field( + name="signedIdentifiers", visibility=["read", "create", "update", "delete", "query"] + ) + """List of stored access policies specified on the share.""" + snapshot_time: Optional[datetime.datetime] = rest_field(name="snapshotTime", visibility=["read"], format="rfc3339") + """Creation time of share snapshot returned in the response of list shares with expand param + \"snapshots\".""" + file_share_paid_bursting: Optional["_models.FileSharePropertiesFileSharePaidBursting"] = rest_field( + name="fileSharePaidBursting", visibility=["read", "create", "update", "delete", "query"] + ) + """File Share Paid Bursting properties.""" + + @overload + def __init__( # pylint: disable=too-many-locals + self, + *, + metadata: Optional[dict[str, str]] = None, + share_quota: Optional[int] = None, + provisioned_iops: Optional[int] = None, + provisioned_bandwidth_mibps: Optional[int] = None, + enabled_protocols: Optional[Union[str, "_models.EnabledProtocols"]] = None, + root_squash: Optional[Union[str, "_models.RootSquashType"]] = None, + access_tier: Optional[Union[str, "_models.ShareAccessTier"]] = None, + signed_identifiers: Optional[list["_models.SignedIdentifier"]] = None, + file_share_paid_bursting: Optional["_models.FileSharePropertiesFileSharePaidBursting"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileSharePropertiesFileSharePaidBursting(_Model): + """File Share Paid Bursting properties. + + :ivar paid_bursting_enabled: Indicates whether paid bursting is enabled for the share. This + property is only for file shares created under Files Provisioned v1 SSD account type. + :vartype paid_bursting_enabled: bool + :ivar paid_bursting_max_iops: The maximum paid bursting IOPS for the share. This property is + only for file shares created under Files Provisioned v1 SSD account type. The maximum allowed + value is 102400 which is the maximum allowed IOPS for a share. + :vartype paid_bursting_max_iops: int + :ivar paid_bursting_max_bandwidth_mibps: The maximum paid bursting bandwidth for the share, in + mebibytes per second. This property is only for file shares created under Files Provisioned v1 + SSD account type. The maximum allowed value is 10340 which is the maximum allowed bandwidth for + a share. + :vartype paid_bursting_max_bandwidth_mibps: int + """ + + paid_bursting_enabled: Optional[bool] = rest_field( + name="paidBurstingEnabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates whether paid bursting is enabled for the share. This property is only for file shares + created under Files Provisioned v1 SSD account type.""" + paid_bursting_max_iops: Optional[int] = rest_field( + name="paidBurstingMaxIops", visibility=["read", "create", "update", "delete", "query"] + ) + """The maximum paid bursting IOPS for the share. This property is only for file shares created + under Files Provisioned v1 SSD account type. The maximum allowed value is 102400 which is the + maximum allowed IOPS for a share.""" + paid_bursting_max_bandwidth_mibps: Optional[int] = rest_field( + name="paidBurstingMaxBandwidthMibps", visibility=["read", "create", "update", "delete", "query"] + ) + """The maximum paid bursting bandwidth for the share, in mebibytes per second. This property is + only for file shares created under Files Provisioned v1 SSD account type. The maximum allowed + value is 10340 which is the maximum allowed bandwidth for a share.""" + + @overload + def __init__( + self, + *, + paid_bursting_enabled: Optional[bool] = None, + paid_bursting_max_iops: Optional[int] = None, + paid_bursting_max_bandwidth_mibps: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShareRecommendations(_Model): + """Constants used for calculating recommended provisioned IOPS and bandwidth for a file share in + the storage account. + + :ivar base_iops: The base IOPS in the file share provisioned IOPS recommendation formula. + :vartype base_iops: int + :ivar io_scalar: The scalar for IO in the file share provisioned IOPS recommendation formula. + :vartype io_scalar: float + :ivar base_bandwidth_mi_b_per_sec: The base bandwidth in the file share provisioned bandwidth + recommendation formula. + :vartype base_bandwidth_mi_b_per_sec: int + :ivar bandwidth_scalar: The scalar for bandwidth in the file share provisioned bandwidth + recommendation formula. + :vartype bandwidth_scalar: float + """ + + base_iops: Optional[int] = rest_field(name="baseIOPS", visibility=["read"]) + """The base IOPS in the file share provisioned IOPS recommendation formula.""" + io_scalar: Optional[float] = rest_field(name="ioScalar", visibility=["read"]) + """The scalar for IO in the file share provisioned IOPS recommendation formula.""" + base_bandwidth_mi_b_per_sec: Optional[int] = rest_field(name="baseBandwidthMiBPerSec", visibility=["read"]) + """The base bandwidth in the file share provisioned bandwidth recommendation formula.""" + bandwidth_scalar: Optional[float] = rest_field(name="bandwidthScalar", visibility=["read"]) + """The scalar for bandwidth in the file share provisioned bandwidth recommendation formula.""" + + +class GeoPriorityReplicationStatus(_Model): + """Geo Priority Replication enablement status for the storage account. + + :ivar is_blob_enabled: Indicates whether Blob Geo Priority Replication is enabled for the + storage account. + :vartype is_blob_enabled: bool + """ + + is_blob_enabled: Optional[bool] = rest_field( + name="isBlobEnabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates whether Blob Geo Priority Replication is enabled for the storage account.""" + + @overload + def __init__( + self, + *, + is_blob_enabled: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class GeoReplicationStats(_Model): + """Statistics related to replication for storage account's Blob, Table, Queue and File services. + It is only available when geo-redundant replication is enabled for the storage account. + + :ivar status: The status of the secondary location. Possible values are: - Live: Indicates that + the secondary location is active and operational. - Bootstrap: Indicates initial + synchronization from the primary location to the secondary location is in progress.This + typically occurs when replication is first enabled. - Unavailable: Indicates that the secondary + location is temporarily unavailable. Known values are: "Live", "Bootstrap", and "Unavailable". + :vartype status: str or ~azure.mgmt.storage.models.GeoReplicationStatus + :ivar last_sync_time: All primary writes preceding this UTC date/time value are guaranteed to + be available for read operations. Primary writes following this point in time may or may not be + available for reads. Element may be default value if value of LastSyncTime is not available, + this can happen if secondary is offline or we are in bootstrap. + :vartype last_sync_time: ~datetime.datetime + :ivar can_failover: A boolean flag which indicates whether or not account failover is supported + for the account. + :vartype can_failover: bool + :ivar can_planned_failover: A boolean flag which indicates whether or not planned account + failover is supported for the account. + :vartype can_planned_failover: bool + :ivar post_failover_redundancy: The redundancy type of the account after an account failover is + performed. Known values are: "Standard_LRS" and "Standard_ZRS". + :vartype post_failover_redundancy: str or ~azure.mgmt.storage.models.PostFailoverRedundancy + :ivar post_planned_failover_redundancy: The redundancy type of the account after a planned + account failover is performed. Known values are: "Standard_GRS", "Standard_GZRS", + "Standard_RAGRS", and "Standard_RAGZRS". + :vartype post_planned_failover_redundancy: str or + ~azure.mgmt.storage.models.PostPlannedFailoverRedundancy + """ + + status: Optional[Union[str, "_models.GeoReplicationStatus"]] = rest_field(visibility=["read"]) + """The status of the secondary location. Possible values are: - Live: Indicates that the secondary + location is active and operational. - Bootstrap: Indicates initial synchronization from the + primary location to the secondary location is in progress.This typically occurs when + replication is first enabled. - Unavailable: Indicates that the secondary location is + temporarily unavailable. Known values are: \"Live\", \"Bootstrap\", and \"Unavailable\".""" + last_sync_time: Optional[datetime.datetime] = rest_field(name="lastSyncTime", visibility=["read"], format="rfc3339") + """All primary writes preceding this UTC date/time value are guaranteed to be available for read + operations. Primary writes following this point in time may or may not be available for reads. + Element may be default value if value of LastSyncTime is not available, this can happen if + secondary is offline or we are in bootstrap.""" + can_failover: Optional[bool] = rest_field(name="canFailover", visibility=["read"]) + """A boolean flag which indicates whether or not account failover is supported for the account.""" + can_planned_failover: Optional[bool] = rest_field(name="canPlannedFailover", visibility=["read"]) + """A boolean flag which indicates whether or not planned account failover is supported for the + account.""" + post_failover_redundancy: Optional[Union[str, "_models.PostFailoverRedundancy"]] = rest_field( + name="postFailoverRedundancy", visibility=["read"] + ) + """The redundancy type of the account after an account failover is performed. Known values are: + \"Standard_LRS\" and \"Standard_ZRS\".""" + post_planned_failover_redundancy: Optional[Union[str, "_models.PostPlannedFailoverRedundancy"]] = rest_field( + name="postPlannedFailoverRedundancy", visibility=["read"] + ) + """The redundancy type of the account after a planned account failover is performed. Known values + are: \"Standard_GRS\", \"Standard_GZRS\", \"Standard_RAGRS\", and \"Standard_RAGZRS\".""" + + +class Identity(_Model): + """Identity for the resource. + + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :ivar type: The identity type. Required. Known values are: "None", "SystemAssigned", + "UserAssigned", and "SystemAssigned,UserAssigned". + :vartype type: str or ~azure.mgmt.storage.models.IdentityType + :ivar user_assigned_identities: Gets or sets a list of key value pairs that describe the set of + User Assigned identities that will be used with this storage account. The key is the ARM + resource identifier of the identity. Only 1 User Assigned identity is permitted here. + :vartype user_assigned_identities: dict[str, ~azure.mgmt.storage.models.UserAssignedIdentity] + """ + + principal_id: Optional[str] = rest_field(name="principalId", visibility=["read"]) + """The principal ID of resource identity.""" + tenant_id: Optional[str] = rest_field(name="tenantId", visibility=["read"]) + """The tenant ID of resource.""" + type: Union[str, "_models.IdentityType"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The identity type. Required. Known values are: \"None\", \"SystemAssigned\", \"UserAssigned\", + and \"SystemAssigned,UserAssigned\".""" + user_assigned_identities: Optional[dict[str, "_models.UserAssignedIdentity"]] = rest_field( + name="userAssignedIdentities", visibility=["read", "create", "update", "delete", "query"] + ) + """Gets or sets a list of key value pairs that describe the set of User Assigned identities that + will be used with this storage account. The key is the ARM resource identifier of the identity. + Only 1 User Assigned identity is permitted here.""" + + @overload + def __init__( + self, + *, + type: Union[str, "_models.IdentityType"], + user_assigned_identities: Optional[dict[str, "_models.UserAssignedIdentity"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ImmutabilityPolicy(ProxyResource): + """The ImmutabilityPolicy property of a blob container, including Id, resource name, resource + type, Etag. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar properties: The properties of an ImmutabilityPolicy of a blob container. Required. + :vartype properties: ~azure.mgmt.storage.models.ImmutabilityPolicyProperty + :ivar etag: Resource Etag. + :vartype etag: str + """ + + properties: "_models.ImmutabilityPolicyProperty" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The properties of an ImmutabilityPolicy of a blob container. Required.""" + etag: Optional[str] = rest_field(visibility=["read"]) + """Resource Etag.""" + + __flattened_items = [ + "immutability_period_since_creation_in_days", + "state", + "allow_protected_append_writes", + "allow_protected_append_writes_all", + ] + + @overload + def __init__( + self, + *, + properties: "_models.ImmutabilityPolicyProperty", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.properties is None: + return None + return getattr(self.properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.properties is None: + self.properties = self._attr_to_rest_field["properties"]._class_type() + setattr(self.properties, key, value) + else: + super().__setattr__(key, value) + + +class ImmutabilityPolicyProperties(_Model): + """The properties of an ImmutabilityPolicy of a blob container. + + :ivar properties: The properties of an ImmutabilityPolicy of a blob container. + :vartype properties: ~azure.mgmt.storage.models.ImmutabilityPolicyProperty + :ivar etag: ImmutabilityPolicy Etag. + :vartype etag: str + :ivar update_history: The ImmutabilityPolicy update history of the blob container. + :vartype update_history: list[~azure.mgmt.storage.models.UpdateHistoryProperty] + """ + + properties: Optional["_models.ImmutabilityPolicyProperty"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The properties of an ImmutabilityPolicy of a blob container.""" + etag: Optional[str] = rest_field(visibility=["read"]) + """ImmutabilityPolicy Etag.""" + update_history: Optional[list["_models.UpdateHistoryProperty"]] = rest_field( + name="updateHistory", visibility=["read"] + ) + """The ImmutabilityPolicy update history of the blob container.""" + + __flattened_items = [ + "immutability_period_since_creation_in_days", + "state", + "allow_protected_append_writes", + "allow_protected_append_writes_all", + ] + + @overload + def __init__( + self, + *, + properties: Optional["_models.ImmutabilityPolicyProperty"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.properties is None: + return None + return getattr(self.properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.properties is None: + self.properties = self._attr_to_rest_field["properties"]._class_type() + setattr(self.properties, key, value) + else: + super().__setattr__(key, value) + + +class ImmutabilityPolicyProperty(_Model): + """The properties of an ImmutabilityPolicy of a blob container. + + :ivar immutability_period_since_creation_in_days: The immutability period for the blobs in the + container since the policy creation, in days. + :vartype immutability_period_since_creation_in_days: int + :ivar state: The ImmutabilityPolicy state of a blob container, possible values include: Locked + and Unlocked. Known values are: "Locked" and "Unlocked". + :vartype state: str or ~azure.mgmt.storage.models.ImmutabilityPolicyState + :ivar allow_protected_append_writes: This property can only be changed for unlocked time-based + retention policies. When enabled, new blocks can be written to an append blob while maintaining + immutability protection and compliance. Only new blocks can be added and any existing blocks + cannot be modified or deleted. This property cannot be changed with ExtendImmutabilityPolicy + API. + :vartype allow_protected_append_writes: bool + :ivar allow_protected_append_writes_all: This property can only be changed for unlocked + time-based retention policies. When enabled, new blocks can be written to both 'Append and Bock + Blobs' while maintaining immutability protection and compliance. Only new blocks can be added + and any existing blocks cannot be modified or deleted. This property cannot be changed with + ExtendImmutabilityPolicy API. The 'allowProtectedAppendWrites' and + 'allowProtectedAppendWritesAll' properties are mutually exclusive. + :vartype allow_protected_append_writes_all: bool + """ + + immutability_period_since_creation_in_days: Optional[int] = rest_field( + name="immutabilityPeriodSinceCreationInDays", visibility=["read", "create", "update", "delete", "query"] + ) + """The immutability period for the blobs in the container since the policy creation, in days.""" + state: Optional[Union[str, "_models.ImmutabilityPolicyState"]] = rest_field(visibility=["read"]) + """The ImmutabilityPolicy state of a blob container, possible values include: Locked and Unlocked. + Known values are: \"Locked\" and \"Unlocked\".""" + allow_protected_append_writes: Optional[bool] = rest_field( + name="allowProtectedAppendWrites", visibility=["read", "create", "update", "delete", "query"] + ) + """This property can only be changed for unlocked time-based retention policies. When enabled, new + blocks can be written to an append blob while maintaining immutability protection and + compliance. Only new blocks can be added and any existing blocks cannot be modified or deleted. + This property cannot be changed with ExtendImmutabilityPolicy API.""" + allow_protected_append_writes_all: Optional[bool] = rest_field( + name="allowProtectedAppendWritesAll", visibility=["read", "create", "update", "delete", "query"] + ) + """This property can only be changed for unlocked time-based retention policies. When enabled, new + blocks can be written to both 'Append and Bock Blobs' while maintaining immutability protection + and compliance. Only new blocks can be added and any existing blocks cannot be modified or + deleted. This property cannot be changed with ExtendImmutabilityPolicy API. The + 'allowProtectedAppendWrites' and 'allowProtectedAppendWritesAll' properties are mutually + exclusive.""" + + @overload + def __init__( + self, + *, + immutability_period_since_creation_in_days: Optional[int] = None, + allow_protected_append_writes: Optional[bool] = None, + allow_protected_append_writes_all: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ImmutableStorageAccount(_Model): + """This property enables and defines account-level immutability. Enabling the feature auto-enables + Blob Versioning. + + :ivar enabled: A boolean flag which enables account-level immutability. All the containers + under such an account have object-level immutability enabled by default. + :vartype enabled: bool + :ivar immutability_policy: Specifies the default account-level immutability policy which is + inherited and applied to objects that do not possess an explicit immutability policy at the + object level. The object-level immutability policy has higher precedence than the + container-level immutability policy, which has a higher precedence than the account-level + immutability policy. + :vartype immutability_policy: ~azure.mgmt.storage.models.AccountImmutabilityPolicyProperties + """ + + enabled: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A boolean flag which enables account-level immutability. All the containers under such an + account have object-level immutability enabled by default.""" + immutability_policy: Optional["_models.AccountImmutabilityPolicyProperties"] = rest_field( + name="immutabilityPolicy", visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies the default account-level immutability policy which is inherited and applied to + objects that do not possess an explicit immutability policy at the object level. The + object-level immutability policy has higher precedence than the container-level immutability + policy, which has a higher precedence than the account-level immutability policy.""" + + @overload + def __init__( + self, + *, + enabled: Optional[bool] = None, + immutability_policy: Optional["_models.AccountImmutabilityPolicyProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ImmutableStorageWithVersioning(_Model): + """Object level immutability properties of the container. + + :ivar enabled: This is an immutable property, when set to true it enables object level + immutability at the container level. + :vartype enabled: bool + :ivar time_stamp: Returns the date and time the object level immutability was enabled. + :vartype time_stamp: ~datetime.datetime + :ivar migration_state: This property denotes the container level immutability to object level + immutability migration state. Known values are: "InProgress" and "Completed". + :vartype migration_state: str or ~azure.mgmt.storage.models.MigrationState + """ + + enabled: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """This is an immutable property, when set to true it enables object level immutability at the + container level.""" + time_stamp: Optional[datetime.datetime] = rest_field(name="timeStamp", visibility=["read"], format="rfc3339") + """Returns the date and time the object level immutability was enabled.""" + migration_state: Optional[Union[str, "_models.MigrationState"]] = rest_field( + name="migrationState", visibility=["read"] + ) + """This property denotes the container level immutability to object level immutability migration + state. Known values are: \"InProgress\" and \"Completed\".""" + + @overload + def __init__( + self, + *, + enabled: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class IPRule(_Model): + """IP rule with specific IP or IP range in CIDR format. + + :ivar ip_address_or_range: Specifies the IP or IP range in CIDR format. Required. + :vartype ip_address_or_range: str + :ivar action: The action of IP ACL rule. Default value is "Allow". + :vartype action: str + """ + + ip_address_or_range: str = rest_field(name="value", visibility=["read", "create", "update", "delete", "query"]) + """Specifies the IP or IP range in CIDR format. Required.""" + action: Optional[Literal["Allow"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The action of IP ACL rule. Default value is \"Allow\".""" + + @overload + def __init__( + self, + *, + ip_address_or_range: str, + action: Optional[Literal["Allow"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class KeyCreationTime(_Model): + """Storage account keys creation time. + + :ivar key1: + :vartype key1: ~datetime.datetime + :ivar key2: + :vartype key2: ~datetime.datetime + """ + + key1: Optional[datetime.datetime] = rest_field( + visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + key2: Optional[datetime.datetime] = rest_field( + visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + + @overload + def __init__( + self, + *, + key1: Optional[datetime.datetime] = None, + key2: Optional[datetime.datetime] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class KeyPolicy(_Model): + """KeyPolicy assigned to the storage account. + + :ivar key_expiration_period_in_days: The key expiration period in days. Required. + :vartype key_expiration_period_in_days: int + """ + + key_expiration_period_in_days: int = rest_field( + name="keyExpirationPeriodInDays", visibility=["read", "create", "update", "delete", "query"] + ) + """The key expiration period in days. Required.""" + + @overload + def __init__( + self, + *, + key_expiration_period_in_days: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class KeyVaultProperties(_Model): + """Properties of key vault. + + :ivar key_name: The name of KeyVault key. + :vartype key_name: str + :ivar key_version: The version of KeyVault key. + :vartype key_version: str + :ivar key_vault_uri: The Uri of KeyVault. + :vartype key_vault_uri: str + :ivar current_versioned_key_identifier: The object identifier of the current versioned Key + Vault Key in use. + :vartype current_versioned_key_identifier: str + :ivar last_key_rotation_timestamp: Timestamp of last rotation of the Key Vault Key. + :vartype last_key_rotation_timestamp: ~datetime.datetime + :ivar current_versioned_key_expiration_timestamp: This is a read only property that represents + the expiration time of the current version of the customer managed key used for encryption. + :vartype current_versioned_key_expiration_timestamp: ~datetime.datetime + """ + + key_name: Optional[str] = rest_field(name="keyname", visibility=["read", "create", "update", "delete", "query"]) + """The name of KeyVault key.""" + key_version: Optional[str] = rest_field( + name="keyversion", visibility=["read", "create", "update", "delete", "query"] + ) + """The version of KeyVault key.""" + key_vault_uri: Optional[str] = rest_field( + name="keyvaulturi", visibility=["read", "create", "update", "delete", "query"] + ) + """The Uri of KeyVault.""" + current_versioned_key_identifier: Optional[str] = rest_field( + name="currentVersionedKeyIdentifier", visibility=["read"] + ) + """The object identifier of the current versioned Key Vault Key in use.""" + last_key_rotation_timestamp: Optional[datetime.datetime] = rest_field( + name="lastKeyRotationTimestamp", visibility=["read"], format="rfc3339" + ) + """Timestamp of last rotation of the Key Vault Key.""" + current_versioned_key_expiration_timestamp: Optional[datetime.datetime] = rest_field( + name="currentVersionedKeyExpirationTimestamp", visibility=["read"], format="rfc3339" + ) + """This is a read only property that represents the expiration time of the current version of the + customer managed key used for encryption.""" + + @overload + def __init__( + self, + *, + key_name: Optional[str] = None, + key_version: Optional[str] = None, + key_vault_uri: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class LastAccessTimeTrackingPolicy(_Model): + """The blob service properties for Last access time based tracking policy. + + :ivar enable: When set to true last access time based tracking is enabled. Required. + :vartype enable: bool + :ivar name: Name of the policy. The valid value is AccessTimeTracking. This field is currently + read only. "AccessTimeTracking" + :vartype name: str or ~azure.mgmt.storage.models.Name + :ivar tracking_granularity_in_days: The field specifies blob object tracking granularity in + days, typically how often the blob object should be tracked.This field is currently read only + with value as 1. + :vartype tracking_granularity_in_days: int + :ivar blob_type: An array of predefined supported blob types. Only blockBlob is the supported + value. This field is currently read only. + :vartype blob_type: list[str] + """ + + enable: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """When set to true last access time based tracking is enabled. Required.""" + name: Optional[Union[str, "_models.Name"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the policy. The valid value is AccessTimeTracking. This field is currently read only. + \"AccessTimeTracking\"""" + tracking_granularity_in_days: Optional[int] = rest_field( + name="trackingGranularityInDays", visibility=["read", "create", "update", "delete", "query"] + ) + """The field specifies blob object tracking granularity in days, typically how often the blob + object should be tracked.This field is currently read only with value as 1.""" + blob_type: Optional[list[str]] = rest_field( + name="blobType", visibility=["read", "create", "update", "delete", "query"] + ) + """An array of predefined supported blob types. Only blockBlob is the supported value. This field + is currently read only.""" + + @overload + def __init__( + self, + *, + enable: bool, + name: Optional[Union[str, "_models.Name"]] = None, + tracking_granularity_in_days: Optional[int] = None, + blob_type: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class LeaseContainerRequest(_Model): + """Lease Container request schema. + + :ivar action: Specifies the lease action. Can be one of the available actions. Required. Known + values are: "Acquire", "Renew", "Change", "Release", and "Break". + :vartype action: str or ~azure.mgmt.storage.models.LeaseContainerRequestAction + :ivar lease_id: Identifies the lease. Can be specified in any valid GUID string format. + :vartype lease_id: str + :ivar break_period: Optional. For a break action, proposed duration the lease should continue + before it is broken, in seconds, between 0 and 60. + :vartype break_period: int + :ivar lease_duration: Required for acquire. Specifies the duration of the lease, in seconds, or + negative one (-1) for a lease that never expires. + :vartype lease_duration: int + :ivar proposed_lease_id: Optional for acquire, required for change. Proposed lease ID, in a + GUID string format. + :vartype proposed_lease_id: str + """ + + action: Union[str, "_models.LeaseContainerRequestAction"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies the lease action. Can be one of the available actions. Required. Known values are: + \"Acquire\", \"Renew\", \"Change\", \"Release\", and \"Break\".""" + lease_id: Optional[str] = rest_field(name="leaseId", visibility=["read", "create", "update", "delete", "query"]) + """Identifies the lease. Can be specified in any valid GUID string format.""" + break_period: Optional[int] = rest_field( + name="breakPeriod", visibility=["read", "create", "update", "delete", "query"] + ) + """Optional. For a break action, proposed duration the lease should continue before it is broken, + in seconds, between 0 and 60.""" + lease_duration: Optional[int] = rest_field( + name="leaseDuration", visibility=["read", "create", "update", "delete", "query"] + ) + """Required for acquire. Specifies the duration of the lease, in seconds, or negative one (-1) for + a lease that never expires.""" + proposed_lease_id: Optional[str] = rest_field( + name="proposedLeaseId", visibility=["read", "create", "update", "delete", "query"] + ) + """Optional for acquire, required for change. Proposed lease ID, in a GUID string format.""" + + @overload + def __init__( + self, + *, + action: Union[str, "_models.LeaseContainerRequestAction"], + lease_id: Optional[str] = None, + break_period: Optional[int] = None, + lease_duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class LeaseContainerResponse(_Model): + """Lease Container response schema. + + :ivar lease_id: Returned unique lease ID that must be included with any request to delete the + container, or to renew, change, or release the lease. + :vartype lease_id: str + :ivar lease_time_seconds: Approximate time remaining in the lease period, in seconds. + :vartype lease_time_seconds: str + """ + + lease_id: Optional[str] = rest_field(name="leaseId", visibility=["read", "create", "update", "delete", "query"]) + """Returned unique lease ID that must be included with any request to delete the container, or to + renew, change, or release the lease.""" + lease_time_seconds: Optional[str] = rest_field( + name="leaseTimeSeconds", visibility=["read", "create", "update", "delete", "query"] + ) + """Approximate time remaining in the lease period, in seconds.""" + + @overload + def __init__( + self, + *, + lease_id: Optional[str] = None, + lease_time_seconds: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class LeaseShareRequest(_Model): + """Lease Share request schema. + + :ivar action: Specifies the lease action. Can be one of the available actions. Required. Known + values are: "Acquire", "Renew", "Change", "Release", and "Break". + :vartype action: str or ~azure.mgmt.storage.models.LeaseShareAction + :ivar lease_id: Identifies the lease. Can be specified in any valid GUID string format. + :vartype lease_id: str + :ivar break_period: Optional. For a break action, proposed duration the lease should continue + before it is broken, in seconds, between 0 and 60. + :vartype break_period: int + :ivar lease_duration: Required for acquire. Specifies the duration of the lease, in seconds, or + negative one (-1) for a lease that never expires. + :vartype lease_duration: int + :ivar proposed_lease_id: Optional for acquire, required for change. Proposed lease ID, in a + GUID string format. + :vartype proposed_lease_id: str + """ + + action: Union[str, "_models.LeaseShareAction"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies the lease action. Can be one of the available actions. Required. Known values are: + \"Acquire\", \"Renew\", \"Change\", \"Release\", and \"Break\".""" + lease_id: Optional[str] = rest_field(name="leaseId", visibility=["read", "create", "update", "delete", "query"]) + """Identifies the lease. Can be specified in any valid GUID string format.""" + break_period: Optional[int] = rest_field( + name="breakPeriod", visibility=["read", "create", "update", "delete", "query"] + ) + """Optional. For a break action, proposed duration the lease should continue before it is broken, + in seconds, between 0 and 60.""" + lease_duration: Optional[int] = rest_field( + name="leaseDuration", visibility=["read", "create", "update", "delete", "query"] + ) + """Required for acquire. Specifies the duration of the lease, in seconds, or negative one (-1) for + a lease that never expires.""" + proposed_lease_id: Optional[str] = rest_field( + name="proposedLeaseId", visibility=["read", "create", "update", "delete", "query"] + ) + """Optional for acquire, required for change. Proposed lease ID, in a GUID string format.""" + + @overload + def __init__( + self, + *, + action: Union[str, "_models.LeaseShareAction"], + lease_id: Optional[str] = None, + break_period: Optional[int] = None, + lease_duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class LeaseShareResponse(_Model): + """Lease Share response schema. + + :ivar lease_id: Returned unique lease ID that must be included with any request to delete the + share, or to renew, change, or release the lease. + :vartype lease_id: str + :ivar lease_time_seconds: Approximate time remaining in the lease period, in seconds. + :vartype lease_time_seconds: str + """ + + lease_id: Optional[str] = rest_field(name="leaseId", visibility=["read", "create", "update", "delete", "query"]) + """Returned unique lease ID that must be included with any request to delete the share, or to + renew, change, or release the lease.""" + lease_time_seconds: Optional[str] = rest_field( + name="leaseTimeSeconds", visibility=["read", "create", "update", "delete", "query"] + ) + """Approximate time remaining in the lease period, in seconds.""" + + @overload + def __init__( + self, + *, + lease_id: Optional[str] = None, + lease_time_seconds: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class LegalHold(_Model): + """The LegalHold property of a blob container. + + :ivar has_legal_hold: The hasLegalHold public property is set to true by SRP if there are at + least one existing tag. The hasLegalHold public property is set to false by SRP if all existing + legal hold tags are cleared out. There can be a maximum of 1000 blob containers with + hasLegalHold=true for a given account. + :vartype has_legal_hold: bool + :ivar tags: Each tag should be 3 to 23 alphanumeric characters and is normalized to lower case + at SRP. Required. + :vartype tags: list[str] + :ivar allow_protected_append_writes_all: When enabled, new blocks can be written to both + 'Append and Bock Blobs' while maintaining legal hold protection and compliance. Only new blocks + can be added and any existing blocks cannot be modified or deleted. + :vartype allow_protected_append_writes_all: bool + """ + + has_legal_hold: Optional[bool] = rest_field(name="hasLegalHold", visibility=["read"]) + """The hasLegalHold public property is set to true by SRP if there are at least one existing tag. + The hasLegalHold public property is set to false by SRP if all existing legal hold tags are + cleared out. There can be a maximum of 1000 blob containers with hasLegalHold=true for a given + account.""" + tags: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Each tag should be 3 to 23 alphanumeric characters and is normalized to lower case at SRP. + Required.""" + allow_protected_append_writes_all: Optional[bool] = rest_field( + name="allowProtectedAppendWritesAll", visibility=["read", "create", "update", "delete", "query"] + ) + """When enabled, new blocks can be written to both 'Append and Bock Blobs' while maintaining legal + hold protection and compliance. Only new blocks can be added and any existing blocks cannot be + modified or deleted.""" + + @overload + def __init__( + self, + *, + tags: list[str], + allow_protected_append_writes_all: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class LegalHoldProperties(_Model): + """The LegalHold property of a blob container. + + :ivar has_legal_hold: The hasLegalHold public property is set to true by SRP if there are at + least one existing tag. The hasLegalHold public property is set to false by SRP if all existing + legal hold tags are cleared out. There can be a maximum of 1000 blob containers with + hasLegalHold=true for a given account. + :vartype has_legal_hold: bool + :ivar tags: The list of LegalHold tags of a blob container. + :vartype tags: list[~azure.mgmt.storage.models.TagProperty] + :ivar protected_append_writes_history: Protected append blob writes history. + :vartype protected_append_writes_history: + ~azure.mgmt.storage.models.ProtectedAppendWritesHistory + """ + + has_legal_hold: Optional[bool] = rest_field(name="hasLegalHold", visibility=["read"]) + """The hasLegalHold public property is set to true by SRP if there are at least one existing tag. + The hasLegalHold public property is set to false by SRP if all existing legal hold tags are + cleared out. There can be a maximum of 1000 blob containers with hasLegalHold=true for a given + account.""" + tags: Optional[list["_models.TagProperty"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The list of LegalHold tags of a blob container.""" + protected_append_writes_history: Optional["_models.ProtectedAppendWritesHistory"] = rest_field( + name="protectedAppendWritesHistory", visibility=["read", "create", "update", "delete", "query"] + ) + """Protected append blob writes history.""" + + @overload + def __init__( + self, + *, + tags: Optional[list["_models.TagProperty"]] = None, + protected_append_writes_history: Optional["_models.ProtectedAppendWritesHistory"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ListAccountSasResponse(_Model): + """The List SAS credentials operation response. + + :ivar account_sas_token: List SAS credentials of storage account. + :vartype account_sas_token: str + """ + + account_sas_token: Optional[str] = rest_field(name="accountSasToken", visibility=["read"]) + """List SAS credentials of storage account.""" + + +class ListContainerItem(AzureEntityResource): + """The blob container properties be listed out. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar etag: Resource Etag. + :vartype etag: str + :ivar properties: The blob container properties be listed out. + :vartype properties: ~azure.mgmt.storage.models.ContainerProperties + """ + + properties: Optional["_models.ContainerProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The blob container properties be listed out.""" + + __flattened_items = [ + "version", + "deleted", + "deleted_time", + "remaining_retention_days", + "default_encryption_scope", + "deny_encryption_scope_override", + "public_access", + "last_modified_time", + "lease_status", + "lease_state", + "lease_duration", + "metadata", + "immutability_policy", + "legal_hold", + "has_legal_hold", + "has_immutability_policy", + "immutable_storage_with_versioning", + "enable_nfs_v3_root_squash", + "enable_nfs_v3_all_squash", + ] + + @overload + def __init__( + self, + *, + properties: Optional["_models.ContainerProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.properties is None: + return None + return getattr(self.properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.properties is None: + self.properties = self._attr_to_rest_field["properties"]._class_type() + setattr(self.properties, key, value) + else: + super().__setattr__(key, value) + + +class ListQueue(ResourceAutoGenerated): + """ListQueue. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar queue_properties: List Queue resource properties. + :vartype queue_properties: ~azure.mgmt.storage.models.ListQueueProperties + """ + + queue_properties: Optional["_models.ListQueueProperties"] = rest_field( + name="properties", visibility=["read", "create", "update", "delete", "query"] + ) + """List Queue resource properties.""" + + __flattened_items = ["metadata"] + + @overload + def __init__( + self, + *, + queue_properties: Optional["_models.ListQueueProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.queue_properties is None: + return None + return getattr(self.queue_properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.queue_properties is None: + self.queue_properties = self._attr_to_rest_field["queue_properties"]._class_type() + setattr(self.queue_properties, key, value) + else: + super().__setattr__(key, value) + + +class ListQueueProperties(_Model): + """ListQueueProperties. + + :ivar metadata: A name-value pair that represents queue metadata. + :vartype metadata: dict[str, str] + """ + + metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A name-value pair that represents queue metadata.""" + + @overload + def __init__( + self, + *, + metadata: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ListQueueServices(_Model): + """ListQueueServices. + + :ivar value: List of queue services returned. + :vartype value: list[~azure.mgmt.storage.models.QueueServiceProperties] + """ + + value: Optional[list["_models.QueueServiceProperties"]] = rest_field(visibility=["read"]) + """List of queue services returned.""" + + +class ListServiceSasResponse(_Model): + """The List service SAS credentials operation response. + + :ivar service_sas_token: List service SAS credentials of specific resource. + :vartype service_sas_token: str + """ + + service_sas_token: Optional[str] = rest_field(name="serviceSasToken", visibility=["read"]) + """List service SAS credentials of specific resource.""" + + +class ListTableServices(_Model): + """ListTableServices. + + :ivar value: List of table services returned. + :vartype value: list[~azure.mgmt.storage.models.TableServiceProperties] + """ + + value: Optional[list["_models.TableServiceProperties"]] = rest_field(visibility=["read"]) + """List of table services returned.""" + + +class LocalUser(ProxyResource): + """The local user associated with the storage accounts. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar properties: Storage account local user properties. + :vartype properties: ~azure.mgmt.storage.models.LocalUserProperties + """ + + properties: Optional["_models.LocalUserProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Storage account local user properties.""" + + __flattened_items = [ + "permission_scopes", + "home_directory", + "ssh_authorized_keys", + "sid", + "has_shared_key", + "has_ssh_key", + "has_ssh_password", + "user_id", + "group_id", + "allow_acl_authorization", + "extended_groups", + "is_nf_sv3_enabled", + ] + + @overload + def __init__( + self, + *, + properties: Optional["_models.LocalUserProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.properties is None: + return None + return getattr(self.properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.properties is None: + self.properties = self._attr_to_rest_field["properties"]._class_type() + setattr(self.properties, key, value) + else: + super().__setattr__(key, value) + + +class LocalUserKeys(_Model): + """The Storage Account Local User keys. + + :ivar ssh_authorized_keys: Optional, local user ssh authorized keys for SFTP. + :vartype ssh_authorized_keys: list[~azure.mgmt.storage.models.SshPublicKey] + :ivar shared_key: Auto generated by the server for SMB authentication. + :vartype shared_key: str + """ + + ssh_authorized_keys: Optional[list["_models.SshPublicKey"]] = rest_field( + name="sshAuthorizedKeys", visibility=["read", "create", "update", "delete", "query"] + ) + """Optional, local user ssh authorized keys for SFTP.""" + shared_key: Optional[str] = rest_field(name="sharedKey", visibility=["read"]) + """Auto generated by the server for SMB authentication.""" + + @overload + def __init__( + self, + *, + ssh_authorized_keys: Optional[list["_models.SshPublicKey"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class LocalUserProperties(_Model): + """The Storage Account Local User properties. + + :ivar permission_scopes: The permission scopes of the local user. + :vartype permission_scopes: list[~azure.mgmt.storage.models.PermissionScope] + :ivar home_directory: Optional, local user home directory. + :vartype home_directory: str + :ivar ssh_authorized_keys: Optional, local user ssh authorized keys for SFTP. + :vartype ssh_authorized_keys: list[~azure.mgmt.storage.models.SshPublicKey] + :ivar sid: A unique Security Identifier that is generated by the server. + :vartype sid: str + :ivar has_shared_key: Indicates whether shared key exists. Set it to false to remove existing + shared key. + :vartype has_shared_key: bool + :ivar has_ssh_key: Indicates whether ssh key exists. Set it to false to remove existing SSH + key. + :vartype has_ssh_key: bool + :ivar has_ssh_password: Indicates whether ssh password exists. Set it to false to remove + existing SSH password. + :vartype has_ssh_password: bool + :ivar user_id: A unique Identifier that is generated by the server. + :vartype user_id: int + :ivar group_id: An identifier for associating a group of users. + :vartype group_id: int + :ivar allow_acl_authorization: Indicates whether ACL authorization is allowed for this user. + Set it to false to disallow using ACL authorization. + :vartype allow_acl_authorization: bool + :ivar extended_groups: Supplementary group membership. Only applicable for local users enabled + for NFSv3 access. + :vartype extended_groups: list[int] + :ivar is_nf_sv3_enabled: Indicates if the local user is enabled for access with NFSv3 protocol. + :vartype is_nf_sv3_enabled: bool + """ + + permission_scopes: Optional[list["_models.PermissionScope"]] = rest_field( + name="permissionScopes", visibility=["read", "create", "update", "delete", "query"] + ) + """The permission scopes of the local user.""" + home_directory: Optional[str] = rest_field( + name="homeDirectory", visibility=["read", "create", "update", "delete", "query"] + ) + """Optional, local user home directory.""" + ssh_authorized_keys: Optional[list["_models.SshPublicKey"]] = rest_field( + name="sshAuthorizedKeys", visibility=["read", "create", "update", "delete", "query"] + ) + """Optional, local user ssh authorized keys for SFTP.""" + sid: Optional[str] = rest_field(visibility=["read"]) + """A unique Security Identifier that is generated by the server.""" + has_shared_key: Optional[bool] = rest_field( + name="hasSharedKey", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates whether shared key exists. Set it to false to remove existing shared key.""" + has_ssh_key: Optional[bool] = rest_field( + name="hasSshKey", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates whether ssh key exists. Set it to false to remove existing SSH key.""" + has_ssh_password: Optional[bool] = rest_field( + name="hasSshPassword", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates whether ssh password exists. Set it to false to remove existing SSH password.""" + user_id: Optional[int] = rest_field(name="userId", visibility=["read"]) + """A unique Identifier that is generated by the server.""" + group_id: Optional[int] = rest_field(name="groupId", visibility=["read", "create", "update", "delete", "query"]) + """An identifier for associating a group of users.""" + allow_acl_authorization: Optional[bool] = rest_field( + name="allowAclAuthorization", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates whether ACL authorization is allowed for this user. Set it to false to disallow using + ACL authorization.""" + extended_groups: Optional[list[int]] = rest_field( + name="extendedGroups", visibility=["read", "create", "update", "delete", "query"] + ) + """Supplementary group membership. Only applicable for local users enabled for NFSv3 access.""" + is_nf_sv3_enabled: Optional[bool] = rest_field( + name="isNFSv3Enabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates if the local user is enabled for access with NFSv3 protocol.""" + + @overload + def __init__( + self, + *, + permission_scopes: Optional[list["_models.PermissionScope"]] = None, + home_directory: Optional[str] = None, + ssh_authorized_keys: Optional[list["_models.SshPublicKey"]] = None, + has_shared_key: Optional[bool] = None, + has_ssh_key: Optional[bool] = None, + has_ssh_password: Optional[bool] = None, + group_id: Optional[int] = None, + allow_acl_authorization: Optional[bool] = None, + extended_groups: Optional[list[int]] = None, + is_nf_sv3_enabled: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class LocalUserRegeneratePasswordResult(_Model): + """The secrets of Storage Account Local User. + + :ivar ssh_password: Auto generated password by the server for SSH authentication if + hasSshPassword is set to true on the creation of local user. + :vartype ssh_password: str + """ + + ssh_password: Optional[str] = rest_field(name="sshPassword", visibility=["read"]) + """Auto generated password by the server for SSH authentication if hasSshPassword is set to true + on the creation of local user.""" + + +class StorageConnectorAuthProperties(_Model): + """The authentication properties of the backing data source. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ManagedIdentityAuthProperties + + :ivar type: Type of the authentication properties. Controls the type of the authProperties + object. Required. "ManagedIdentity" + :vartype type: str or ~azure.mgmt.storage.models.StorageConnectorAuthType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of the authentication properties. Controls the type of the authProperties object. + Required. \"ManagedIdentity\"""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ManagedIdentityAuthProperties(StorageConnectorAuthProperties, discriminator="ManagedIdentity"): + """The managed identity auth properties for dataShare connection. + + :ivar type: The type of managed identity auth. Required. Managed Identity auth type. + :vartype type: str or ~azure.mgmt.storage.models.MANAGED_IDENTITY + :ivar identity_resource_id: ARM ResourceId of the managed identity that should be used to + authenticate to the backing data source. + :vartype identity_resource_id: str + """ + + type: Literal[StorageConnectorAuthType.MANAGED_IDENTITY] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The type of managed identity auth. Required. Managed Identity auth type.""" + identity_resource_id: Optional[str] = rest_field( + name="identityResourceId", visibility=["read", "create", "update", "delete", "query"] + ) + """ARM ResourceId of the managed identity that should be used to authenticate to the backing data + source.""" + + @overload + def __init__( + self, + *, + identity_resource_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = StorageConnectorAuthType.MANAGED_IDENTITY # type: ignore + + +class ManagementPolicy(ProxyResource): + """The Get Storage Account ManagementPolicies operation response. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar properties: Returns the Storage Account Data Policies Rules. + :vartype properties: ~azure.mgmt.storage.models.ManagementPolicyProperties + """ + + properties: Optional["_models.ManagementPolicyProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Returns the Storage Account Data Policies Rules.""" + + __flattened_items = ["last_modified_time", "policy"] + + @overload + def __init__( + self, + *, + properties: Optional["_models.ManagementPolicyProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.properties is None: + return None + return getattr(self.properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.properties is None: + self.properties = self._attr_to_rest_field["properties"]._class_type() + setattr(self.properties, key, value) + else: + super().__setattr__(key, value) + + +class ManagementPolicyAction(_Model): + """Actions are applied to the filtered blobs when the execution condition is met. + + :ivar base_blob: The management policy action for base blob. + :vartype base_blob: ~azure.mgmt.storage.models.ManagementPolicyBaseBlob + :ivar snapshot: The management policy action for snapshot. + :vartype snapshot: ~azure.mgmt.storage.models.ManagementPolicySnapShot + :ivar version: The management policy action for version. + :vartype version: ~azure.mgmt.storage.models.ManagementPolicyVersion + """ + + base_blob: Optional["_models.ManagementPolicyBaseBlob"] = rest_field( + name="baseBlob", visibility=["read", "create", "update", "delete", "query"] + ) + """The management policy action for base blob.""" + snapshot: Optional["_models.ManagementPolicySnapShot"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The management policy action for snapshot.""" + version: Optional["_models.ManagementPolicyVersion"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The management policy action for version.""" + + @overload + def __init__( + self, + *, + base_blob: Optional["_models.ManagementPolicyBaseBlob"] = None, + snapshot: Optional["_models.ManagementPolicySnapShot"] = None, + version: Optional["_models.ManagementPolicyVersion"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ManagementPolicyBaseBlob(_Model): + """Management policy action for base blob. + + :ivar tier_to_cool: The function to tier blobs to cool storage. + :vartype tier_to_cool: ~azure.mgmt.storage.models.DateAfterModification + :ivar tier_to_archive: The function to tier blobs to archive storage. + :vartype tier_to_archive: ~azure.mgmt.storage.models.DateAfterModification + :ivar tier_to_cold: The function to tier blobs to cold storage. + :vartype tier_to_cold: ~azure.mgmt.storage.models.DateAfterModification + :ivar tier_to_hot: The function to tier blobs to hot storage. This action can only be used with + Premium Block Blob Storage Accounts. + :vartype tier_to_hot: ~azure.mgmt.storage.models.DateAfterModification + :ivar delete: The function to delete the blob. + :vartype delete: ~azure.mgmt.storage.models.DateAfterModification + :ivar enable_auto_tier_to_hot_from_cool: This property enables auto tiering of a blob from cool + to hot on a blob access. This property requires tierToCool.daysAfterLastAccessTimeGreaterThan. + :vartype enable_auto_tier_to_hot_from_cool: bool + """ + + tier_to_cool: Optional["_models.DateAfterModification"] = rest_field( + name="tierToCool", visibility=["read", "create", "update", "delete", "query"] + ) + """The function to tier blobs to cool storage.""" + tier_to_archive: Optional["_models.DateAfterModification"] = rest_field( + name="tierToArchive", visibility=["read", "create", "update", "delete", "query"] + ) + """The function to tier blobs to archive storage.""" + tier_to_cold: Optional["_models.DateAfterModification"] = rest_field( + name="tierToCold", visibility=["read", "create", "update", "delete", "query"] + ) + """The function to tier blobs to cold storage.""" + tier_to_hot: Optional["_models.DateAfterModification"] = rest_field( + name="tierToHot", visibility=["read", "create", "update", "delete", "query"] + ) + """The function to tier blobs to hot storage. This action can only be used with Premium Block Blob + Storage Accounts.""" + delete: Optional["_models.DateAfterModification"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The function to delete the blob.""" + enable_auto_tier_to_hot_from_cool: Optional[bool] = rest_field( + name="enableAutoTierToHotFromCool", visibility=["read", "create", "update", "delete", "query"] + ) + """This property enables auto tiering of a blob from cool to hot on a blob access. This property + requires tierToCool.daysAfterLastAccessTimeGreaterThan.""" + + @overload + def __init__( + self, + *, + tier_to_cool: Optional["_models.DateAfterModification"] = None, + tier_to_archive: Optional["_models.DateAfterModification"] = None, + tier_to_cold: Optional["_models.DateAfterModification"] = None, + tier_to_hot: Optional["_models.DateAfterModification"] = None, + delete: Optional["_models.DateAfterModification"] = None, + enable_auto_tier_to_hot_from_cool: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ManagementPolicyDefinition(_Model): + """An object that defines the Lifecycle rule. Each definition is made up with a filters set and an + actions set. + + :ivar actions: An object that defines the action set. Required. + :vartype actions: ~azure.mgmt.storage.models.ManagementPolicyAction + :ivar filters: An object that defines the filter set. + :vartype filters: ~azure.mgmt.storage.models.ManagementPolicyFilter + """ + + actions: "_models.ManagementPolicyAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An object that defines the action set. Required.""" + filters: Optional["_models.ManagementPolicyFilter"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """An object that defines the filter set.""" + + @overload + def __init__( + self, + *, + actions: "_models.ManagementPolicyAction", + filters: Optional["_models.ManagementPolicyFilter"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ManagementPolicyFilter(_Model): + """Filters limit rule actions to a subset of blobs within the storage account. If multiple filters + are defined, a logical AND is performed on all filters. + + :ivar prefix_match: An array of strings for prefixes to be match. + :vartype prefix_match: list[str] + :ivar blob_types: An array of predefined enum values. Currently blockBlob supports all tiering + and delete actions. Only delete actions are supported for appendBlob. Required. + :vartype blob_types: list[str] + :ivar blob_index_match: An array of blob index tag based filters, there can be at most 10 tag + filters. + :vartype blob_index_match: list[~azure.mgmt.storage.models.TagFilter] + """ + + prefix_match: Optional[list[str]] = rest_field( + name="prefixMatch", visibility=["read", "create", "update", "delete", "query"] + ) + """An array of strings for prefixes to be match.""" + blob_types: list[str] = rest_field(name="blobTypes", visibility=["read", "create", "update", "delete", "query"]) + """An array of predefined enum values. Currently blockBlob supports all tiering and delete + actions. Only delete actions are supported for appendBlob. Required.""" + blob_index_match: Optional[list["_models.TagFilter"]] = rest_field( + name="blobIndexMatch", visibility=["read", "create", "update", "delete", "query"] + ) + """An array of blob index tag based filters, there can be at most 10 tag filters.""" + + @overload + def __init__( + self, + *, + blob_types: list[str], + prefix_match: Optional[list[str]] = None, + blob_index_match: Optional[list["_models.TagFilter"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ManagementPolicyProperties(_Model): + """The Storage Account ManagementPolicy properties. + + :ivar last_modified_time: Returns the date and time the ManagementPolicies was last modified. + :vartype last_modified_time: ~datetime.datetime + :ivar policy: The Storage Account ManagementPolicy, in JSON format. See more details in: + `https://learn.microsoft.com/azure/storage/blobs/lifecycle-management-overview + `_. Required. + :vartype policy: ~azure.mgmt.storage.models.ManagementPolicySchema + """ + + last_modified_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedTime", visibility=["read"], format="rfc3339" + ) + """Returns the date and time the ManagementPolicies was last modified.""" + policy: "_models.ManagementPolicySchema" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The Storage Account ManagementPolicy, in JSON format. See more details in: + `https://learn.microsoft.com/azure/storage/blobs/lifecycle-management-overview + `_. Required.""" + + @overload + def __init__( + self, + *, + policy: "_models.ManagementPolicySchema", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ManagementPolicyRule(_Model): + """An object that wraps the Lifecycle rule. Each rule is uniquely defined by name. + + :ivar enabled: Rule is enabled if set to true. + :vartype enabled: bool + :ivar name: A rule name can contain any combination of alpha numeric characters. Rule name is + case-sensitive. It must be unique within a policy. Required. + :vartype name: str + :ivar type: The valid value is Lifecycle. Required. "Lifecycle" + :vartype type: str or ~azure.mgmt.storage.models.RuleType + :ivar definition: An object that defines the Lifecycle rule. Required. + :vartype definition: ~azure.mgmt.storage.models.ManagementPolicyDefinition + """ + + enabled: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Rule is enabled if set to true.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A rule name can contain any combination of alpha numeric characters. Rule name is + case-sensitive. It must be unique within a policy. Required.""" + type: Union[str, "_models.RuleType"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The valid value is Lifecycle. Required. \"Lifecycle\"""" + definition: "_models.ManagementPolicyDefinition" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """An object that defines the Lifecycle rule. Required.""" + + @overload + def __init__( + self, + *, + name: str, + type: Union[str, "_models.RuleType"], + definition: "_models.ManagementPolicyDefinition", + enabled: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ManagementPolicySchema(_Model): + """The Storage Account ManagementPolicies Rules. See more details in: + `https://learn.microsoft.com/azure/storage/blobs/lifecycle-management-overview + `_. + + :ivar rules: The Storage Account ManagementPolicies Rules. See more details in: + `https://learn.microsoft.com/azure/storage/blobs/lifecycle-management-overview + `_. Required. + :vartype rules: list[~azure.mgmt.storage.models.ManagementPolicyRule] + """ + + rules: list["_models.ManagementPolicyRule"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The Storage Account ManagementPolicies Rules. See more details in: + `https://learn.microsoft.com/azure/storage/blobs/lifecycle-management-overview + `_. Required.""" + + @overload + def __init__( + self, + *, + rules: list["_models.ManagementPolicyRule"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ManagementPolicySnapShot(_Model): + """Management policy action for snapshot. + + :ivar tier_to_cool: The function to tier blob snapshot to cool storage. + :vartype tier_to_cool: ~azure.mgmt.storage.models.DateAfterCreation + :ivar tier_to_archive: The function to tier blob snapshot to archive storage. + :vartype tier_to_archive: ~azure.mgmt.storage.models.DateAfterCreation + :ivar tier_to_cold: The function to tier blobs to cold storage. + :vartype tier_to_cold: ~azure.mgmt.storage.models.DateAfterCreation + :ivar tier_to_hot: The function to tier blobs to hot storage. This action can only be used with + Premium Block Blob Storage Accounts. + :vartype tier_to_hot: ~azure.mgmt.storage.models.DateAfterCreation + :ivar delete: The function to delete the blob snapshot. + :vartype delete: ~azure.mgmt.storage.models.DateAfterCreation + """ + + tier_to_cool: Optional["_models.DateAfterCreation"] = rest_field( + name="tierToCool", visibility=["read", "create", "update", "delete", "query"] + ) + """The function to tier blob snapshot to cool storage.""" + tier_to_archive: Optional["_models.DateAfterCreation"] = rest_field( + name="tierToArchive", visibility=["read", "create", "update", "delete", "query"] + ) + """The function to tier blob snapshot to archive storage.""" + tier_to_cold: Optional["_models.DateAfterCreation"] = rest_field( + name="tierToCold", visibility=["read", "create", "update", "delete", "query"] + ) + """The function to tier blobs to cold storage.""" + tier_to_hot: Optional["_models.DateAfterCreation"] = rest_field( + name="tierToHot", visibility=["read", "create", "update", "delete", "query"] + ) + """The function to tier blobs to hot storage. This action can only be used with Premium Block Blob + Storage Accounts.""" + delete: Optional["_models.DateAfterCreation"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The function to delete the blob snapshot.""" + + @overload + def __init__( + self, + *, + tier_to_cool: Optional["_models.DateAfterCreation"] = None, + tier_to_archive: Optional["_models.DateAfterCreation"] = None, + tier_to_cold: Optional["_models.DateAfterCreation"] = None, + tier_to_hot: Optional["_models.DateAfterCreation"] = None, + delete: Optional["_models.DateAfterCreation"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ManagementPolicyVersion(_Model): + """Management policy action for blob version. + + :ivar tier_to_cool: The function to tier blob version to cool storage. + :vartype tier_to_cool: ~azure.mgmt.storage.models.DateAfterCreation + :ivar tier_to_archive: The function to tier blob version to archive storage. + :vartype tier_to_archive: ~azure.mgmt.storage.models.DateAfterCreation + :ivar tier_to_cold: The function to tier blobs to cold storage. + :vartype tier_to_cold: ~azure.mgmt.storage.models.DateAfterCreation + :ivar tier_to_hot: The function to tier blobs to hot storage. This action can only be used with + Premium Block Blob Storage Accounts. + :vartype tier_to_hot: ~azure.mgmt.storage.models.DateAfterCreation + :ivar delete: The function to delete the blob version. + :vartype delete: ~azure.mgmt.storage.models.DateAfterCreation + """ + + tier_to_cool: Optional["_models.DateAfterCreation"] = rest_field( + name="tierToCool", visibility=["read", "create", "update", "delete", "query"] + ) + """The function to tier blob version to cool storage.""" + tier_to_archive: Optional["_models.DateAfterCreation"] = rest_field( + name="tierToArchive", visibility=["read", "create", "update", "delete", "query"] + ) + """The function to tier blob version to archive storage.""" + tier_to_cold: Optional["_models.DateAfterCreation"] = rest_field( + name="tierToCold", visibility=["read", "create", "update", "delete", "query"] + ) + """The function to tier blobs to cold storage.""" + tier_to_hot: Optional["_models.DateAfterCreation"] = rest_field( + name="tierToHot", visibility=["read", "create", "update", "delete", "query"] + ) + """The function to tier blobs to hot storage. This action can only be used with Premium Block Blob + Storage Accounts.""" + delete: Optional["_models.DateAfterCreation"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The function to delete the blob version.""" + + @overload + def __init__( + self, + *, + tier_to_cool: Optional["_models.DateAfterCreation"] = None, + tier_to_archive: Optional["_models.DateAfterCreation"] = None, + tier_to_cold: Optional["_models.DateAfterCreation"] = None, + tier_to_hot: Optional["_models.DateAfterCreation"] = None, + delete: Optional["_models.DateAfterCreation"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MetricSpecification(_Model): + """Metric specification of operation. + + :ivar name: Name of metric specification. + :vartype name: str + :ivar display_name: Display name of metric specification. + :vartype display_name: str + :ivar display_description: Display description of metric specification. + :vartype display_description: str + :ivar unit: Unit could be Bytes or Count. + :vartype unit: str + :ivar dimensions: Dimensions of blobs, including blob type and access tier. + :vartype dimensions: list[~azure.mgmt.storage.models.Dimension] + :ivar aggregation_type: Aggregation type could be Average. + :vartype aggregation_type: str + :ivar fill_gap_with_zero: The property to decide fill gap with zero or not. + :vartype fill_gap_with_zero: bool + :ivar category: The category this metric specification belong to, could be Capacity. + :vartype category: str + :ivar resource_id_dimension_name_override: Account Resource Id. + :vartype resource_id_dimension_name_override: str + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of metric specification.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Display name of metric specification.""" + display_description: Optional[str] = rest_field( + name="displayDescription", visibility=["read", "create", "update", "delete", "query"] + ) + """Display description of metric specification.""" + unit: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unit could be Bytes or Count.""" + dimensions: Optional[list["_models.Dimension"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Dimensions of blobs, including blob type and access tier.""" + aggregation_type: Optional[str] = rest_field( + name="aggregationType", visibility=["read", "create", "update", "delete", "query"] + ) + """Aggregation type could be Average.""" + fill_gap_with_zero: Optional[bool] = rest_field( + name="fillGapWithZero", visibility=["read", "create", "update", "delete", "query"] + ) + """The property to decide fill gap with zero or not.""" + category: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The category this metric specification belong to, could be Capacity.""" + resource_id_dimension_name_override: Optional[str] = rest_field( + name="resourceIdDimensionNameOverride", visibility=["read", "create", "update", "delete", "query"] + ) + """Account Resource Id.""" + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + display_name: Optional[str] = None, + display_description: Optional[str] = None, + unit: Optional[str] = None, + dimensions: Optional[list["_models.Dimension"]] = None, + aggregation_type: Optional[str] = None, + fill_gap_with_zero: Optional[bool] = None, + category: Optional[str] = None, + resource_id_dimension_name_override: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Multichannel(_Model): + """Multichannel setting. Applies to Premium FileStorage only. + + :ivar enabled: Indicates whether multichannel is enabled. + :vartype enabled: bool + """ + + enabled: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates whether multichannel is enabled.""" + + @overload + def __init__( + self, + *, + enabled: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class NetworkRuleSet(_Model): + """Network rule set. + + :ivar bypass: Specifies whether traffic is bypassed for Logging/Metrics/AzureServices. Possible + values are any combination of Logging|Metrics|AzureServices (For example, "Logging, Metrics"), + or None to bypass none of those traffics. Known values are: "None", "Logging", "Metrics", and + "AzureServices". + :vartype bypass: str or ~azure.mgmt.storage.models.Bypass + :ivar resource_access_rules: Sets the resource access rules. + :vartype resource_access_rules: list[~azure.mgmt.storage.models.ResourceAccessRule] + :ivar virtual_network_rules: Sets the virtual network rules. + :vartype virtual_network_rules: list[~azure.mgmt.storage.models.VirtualNetworkRule] + :ivar ip_rules: Sets the IP ACL rules. + :vartype ip_rules: list[~azure.mgmt.storage.models.IPRule] + :ivar ipv6_rules: Sets the IPv6 ACL rules. + :vartype ipv6_rules: list[~azure.mgmt.storage.models.IPRule] + :ivar default_action: Specifies the default action of allow or deny when no other rules match. + Required. Known values are: "Allow" and "Deny". + :vartype default_action: str or ~azure.mgmt.storage.models.DefaultAction + """ + + bypass: Optional[Union[str, "_models.Bypass"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies whether traffic is bypassed for Logging/Metrics/AzureServices. Possible values are + any combination of Logging|Metrics|AzureServices (For example, \"Logging, Metrics\"), or None + to bypass none of those traffics. Known values are: \"None\", \"Logging\", \"Metrics\", and + \"AzureServices\".""" + resource_access_rules: Optional[list["_models.ResourceAccessRule"]] = rest_field( + name="resourceAccessRules", visibility=["read", "create", "update", "delete", "query"] + ) + """Sets the resource access rules.""" + virtual_network_rules: Optional[list["_models.VirtualNetworkRule"]] = rest_field( + name="virtualNetworkRules", visibility=["read", "create", "update", "delete", "query"] + ) + """Sets the virtual network rules.""" + ip_rules: Optional[list["_models.IPRule"]] = rest_field( + name="ipRules", visibility=["read", "create", "update", "delete", "query"] + ) + """Sets the IP ACL rules.""" + ipv6_rules: Optional[list["_models.IPRule"]] = rest_field( + name="ipv6Rules", visibility=["read", "create", "update", "delete", "query"] + ) + """Sets the IPv6 ACL rules.""" + default_action: Union[str, "_models.DefaultAction"] = rest_field( + name="defaultAction", visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies the default action of allow or deny when no other rules match. Required. Known values + are: \"Allow\" and \"Deny\".""" + + @overload + def __init__( + self, + *, + default_action: Union[str, "_models.DefaultAction"], + bypass: Optional[Union[str, "_models.Bypass"]] = None, + resource_access_rules: Optional[list["_models.ResourceAccessRule"]] = None, + virtual_network_rules: Optional[list["_models.VirtualNetworkRule"]] = None, + ip_rules: Optional[list["_models.IPRule"]] = None, + ipv6_rules: Optional[list["_models.IPRule"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class NetworkSecurityPerimeter(_Model): + """NetworkSecurityPerimeter related information. + + :ivar id: The ARM identifier of the resource. + :vartype id: str + :ivar perimeter_guid: Guid of the resource. + :vartype perimeter_guid: str + :ivar location: Location of the resource. + :vartype location: str + """ + + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ARM identifier of the resource.""" + perimeter_guid: Optional[str] = rest_field( + name="perimeterGuid", visibility=["read", "create", "update", "delete", "query"] + ) + """Guid of the resource.""" + location: Optional[str] = rest_field(visibility=["read", "create"]) + """Location of the resource.""" + + @overload + def __init__( + self, + *, + id: Optional[str] = None, # pylint: disable=redefined-builtin + perimeter_guid: Optional[str] = None, + location: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class NetworkSecurityPerimeterConfiguration(ProxyResource): + """The Network Security Perimeter configuration resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar properties: Properties of the Network Security Perimeter Configuration. + :vartype properties: ~azure.mgmt.storage.models.NetworkSecurityPerimeterConfigurationProperties + """ + + properties: Optional["_models.NetworkSecurityPerimeterConfigurationProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Properties of the Network Security Perimeter Configuration.""" + + __flattened_items = [ + "provisioning_state", + "provisioning_issues", + "network_security_perimeter", + "resource_association", + "profile", + ] + + @overload + def __init__( + self, + *, + properties: Optional["_models.NetworkSecurityPerimeterConfigurationProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.properties is None: + return None + return getattr(self.properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.properties is None: + self.properties = self._attr_to_rest_field["properties"]._class_type() + setattr(self.properties, key, value) + else: + super().__setattr__(key, value) + + +class NetworkSecurityPerimeterConfigurationProperties(_Model): # pylint: disable=name-too-long + """Properties of the Network Security Perimeter Configuration. + + :ivar provisioning_state: Provisioning state of Network Security Perimeter configuration + propagation. Known values are: "Accepted", "Succeeded", "Failed", "Deleting", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.storage.models.NetworkSecurityPerimeterConfigurationProvisioningState + :ivar provisioning_issues: List of Provisioning Issues if any. + :vartype provisioning_issues: list[~azure.mgmt.storage.models.ProvisioningIssue] + :ivar network_security_perimeter: NetworkSecurityPerimeter related information. + :vartype network_security_perimeter: ~azure.mgmt.storage.models.NetworkSecurityPerimeter + :ivar resource_association: Information about resource association. + :vartype resource_association: + ~azure.mgmt.storage.models.NetworkSecurityPerimeterConfigurationPropertiesResourceAssociation + :ivar profile: Network Security Perimeter profile. + :vartype profile: + ~azure.mgmt.storage.models.NetworkSecurityPerimeterConfigurationPropertiesProfile + """ + + provisioning_state: Optional[Union[str, "_models.NetworkSecurityPerimeterConfigurationProvisioningState"]] = ( + rest_field(name="provisioningState", visibility=["read"]) + ) + """Provisioning state of Network Security Perimeter configuration propagation. Known values are: + \"Accepted\", \"Succeeded\", \"Failed\", \"Deleting\", and \"Canceled\".""" + provisioning_issues: Optional[list["_models.ProvisioningIssue"]] = rest_field( + name="provisioningIssues", visibility=["read"] + ) + """List of Provisioning Issues if any.""" + network_security_perimeter: Optional["_models.NetworkSecurityPerimeter"] = rest_field( + name="networkSecurityPerimeter", visibility=["read"] + ) + """NetworkSecurityPerimeter related information.""" + resource_association: Optional["_models.NetworkSecurityPerimeterConfigurationPropertiesResourceAssociation"] = ( + rest_field(name="resourceAssociation", visibility=["read"]) + ) + """Information about resource association.""" + profile: Optional["_models.NetworkSecurityPerimeterConfigurationPropertiesProfile"] = rest_field( + visibility=["read"] + ) + """Network Security Perimeter profile.""" + + +class NetworkSecurityPerimeterConfigurationPropertiesProfile(_Model): # pylint: disable=name-too-long + """Network Security Perimeter profile. + + :ivar name: Name of the resource. + :vartype name: str + :ivar access_rules_version: Current access rules version. + :vartype access_rules_version: float + :ivar access_rules: List of Access Rules. + :vartype access_rules: list[~azure.mgmt.storage.models.NspAccessRule] + :ivar diagnostic_settings_version: Diagnostic settings version. + :vartype diagnostic_settings_version: float + :ivar enabled_log_categories: Enabled logging categories. + :vartype enabled_log_categories: list[str] + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the resource.""" + access_rules_version: Optional[float] = rest_field( + name="accessRulesVersion", visibility=["read", "create", "update", "delete", "query"] + ) + """Current access rules version.""" + access_rules: Optional[list["_models.NspAccessRule"]] = rest_field( + name="accessRules", visibility=["read", "create", "update", "delete", "query"] + ) + """List of Access Rules.""" + diagnostic_settings_version: Optional[float] = rest_field( + name="diagnosticSettingsVersion", visibility=["read", "create", "update", "delete", "query"] + ) + """Diagnostic settings version.""" + enabled_log_categories: Optional[list[str]] = rest_field( + name="enabledLogCategories", visibility=["read", "create", "update", "delete", "query"] + ) + """Enabled logging categories.""" + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + access_rules_version: Optional[float] = None, + access_rules: Optional[list["_models.NspAccessRule"]] = None, + diagnostic_settings_version: Optional[float] = None, + enabled_log_categories: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class NetworkSecurityPerimeterConfigurationPropertiesResourceAssociation(_Model): # pylint: disable=name-too-long + """Information about resource association. + + :ivar name: Name of the resource association. + :vartype name: str + :ivar access_mode: Access Mode of the resource association. Known values are: "Enforced", + "Learning", and "Audit". + :vartype access_mode: str or ~azure.mgmt.storage.models.ResourceAssociationAccessMode + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the resource association.""" + access_mode: Optional[Union[str, "_models.ResourceAssociationAccessMode"]] = rest_field( + name="accessMode", visibility=["read", "create", "update", "delete", "query"] + ) + """Access Mode of the resource association. Known values are: \"Enforced\", \"Learning\", and + \"Audit\".""" + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + access_mode: Optional[Union[str, "_models.ResourceAssociationAccessMode"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class NfsSetting(_Model): + """Setting for NFS protocol. + + :ivar encryption_in_transit: Encryption in transit setting. + :vartype encryption_in_transit: ~azure.mgmt.storage.models.EncryptionInTransit + """ + + encryption_in_transit: Optional["_models.EncryptionInTransit"] = rest_field( + name="encryptionInTransit", visibility=["read", "create", "update", "delete", "query"] + ) + """Encryption in transit setting.""" + + @overload + def __init__( + self, + *, + encryption_in_transit: Optional["_models.EncryptionInTransit"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class NspAccessRule(_Model): + """Information of Access Rule in Network Security Perimeter profile. + + :ivar name: Name of the resource. + :vartype name: str + :ivar properties: Properties of Access Rule. + :vartype properties: ~azure.mgmt.storage.models.NspAccessRuleProperties + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the resource.""" + properties: Optional["_models.NspAccessRuleProperties"] = rest_field(visibility=["read"]) + """Properties of Access Rule.""" + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class NspAccessRuleProperties(_Model): + """Properties of Access Rule. + + :ivar direction: Direction of Access Rule. Known values are: "Inbound" and "Outbound". + :vartype direction: str or ~azure.mgmt.storage.models.NspAccessRuleDirection + :ivar address_prefixes: Address prefixes in the CIDR format for inbound rules. + :vartype address_prefixes: list[str] + :ivar subscriptions: Subscriptions for inbound rules. + :vartype subscriptions: + list[~azure.mgmt.storage.models.NspAccessRulePropertiesSubscriptionsItem] + :ivar network_security_perimeters: NetworkSecurityPerimeters for inbound rules. + :vartype network_security_perimeters: list[~azure.mgmt.storage.models.NetworkSecurityPerimeter] + :ivar fully_qualified_domain_names: FQDN for outbound rules. + :vartype fully_qualified_domain_names: list[str] + """ + + direction: Optional[Union[str, "_models.NspAccessRuleDirection"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Direction of Access Rule. Known values are: \"Inbound\" and \"Outbound\".""" + address_prefixes: Optional[list[str]] = rest_field( + name="addressPrefixes", visibility=["read", "create", "update", "delete", "query"] + ) + """Address prefixes in the CIDR format for inbound rules.""" + subscriptions: Optional[list["_models.NspAccessRulePropertiesSubscriptionsItem"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Subscriptions for inbound rules.""" + network_security_perimeters: Optional[list["_models.NetworkSecurityPerimeter"]] = rest_field( + name="networkSecurityPerimeters", visibility=["read"] + ) + """NetworkSecurityPerimeters for inbound rules.""" + fully_qualified_domain_names: Optional[list[str]] = rest_field( + name="fullyQualifiedDomainNames", visibility=["read"] + ) + """FQDN for outbound rules.""" + + @overload + def __init__( + self, + *, + direction: Optional[Union[str, "_models.NspAccessRuleDirection"]] = None, + address_prefixes: Optional[list[str]] = None, + subscriptions: Optional[list["_models.NspAccessRulePropertiesSubscriptionsItem"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class NspAccessRulePropertiesSubscriptionsItem(_Model): + """Subscription for inbound rule. + + :ivar id: The ARM identifier of subscription. + :vartype id: str + """ + + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ARM identifier of subscription.""" + + @overload + def __init__( + self, + *, + id: Optional[str] = None, # pylint: disable=redefined-builtin + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ObjectReplicationPolicy(ProxyResource): + """The replication policy between two storage accounts. Multiple rules can be defined in one + policy. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar properties: Returns the Storage Account Object Replication Policy. + :vartype properties: ~azure.mgmt.storage.models.ObjectReplicationPolicyProperties + """ + + properties: Optional["_models.ObjectReplicationPolicyProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Returns the Storage Account Object Replication Policy.""" + + __flattened_items = [ + "policy_id", + "enabled_time", + "source_account", + "destination_account", + "rules", + "metrics", + "priority_replication", + "tags_replication", + ] + + @overload + def __init__( + self, + *, + properties: Optional["_models.ObjectReplicationPolicyProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.properties is None: + return None + return getattr(self.properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.properties is None: + self.properties = self._attr_to_rest_field["properties"]._class_type() + setattr(self.properties, key, value) + else: + super().__setattr__(key, value) + + +class ObjectReplicationPolicyFilter(_Model): + """Filters limit replication to a subset of blobs within the storage account. A logical OR is + performed on values in the filter. If multiple filters are defined, a logical AND is performed + on all filters. + + :ivar prefix_match: Optional. Filters the results to replicate only blobs whose names begin + with the specified prefix. + :vartype prefix_match: list[str] + :ivar min_creation_time: Blobs created after the time will be replicated to the destination. It + must be in datetime format 'yyyy-MM-ddTHH:mm:ssZ'. Example: 2020-02-19T16:05:00Z. + :vartype min_creation_time: str + """ + + prefix_match: Optional[list[str]] = rest_field( + name="prefixMatch", visibility=["read", "create", "update", "delete", "query"] + ) + """Optional. Filters the results to replicate only blobs whose names begin with the specified + prefix.""" + min_creation_time: Optional[str] = rest_field( + name="minCreationTime", visibility=["read", "create", "update", "delete", "query"] + ) + """Blobs created after the time will be replicated to the destination. It must be in datetime + format 'yyyy-MM-ddTHH:mm:ssZ'. Example: 2020-02-19T16:05:00Z.""" + + @overload + def __init__( + self, + *, + prefix_match: Optional[list[str]] = None, + min_creation_time: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ObjectReplicationPolicyProperties(_Model): + """The Storage Account ObjectReplicationPolicy properties. + + :ivar policy_id: A unique id for object replication policy. + :vartype policy_id: str + :ivar enabled_time: Indicates when the policy is enabled on the source account. + :vartype enabled_time: ~datetime.datetime + :ivar source_account: Required. Source account name. It should be full resource id if + allowCrossTenantReplication set to false. Required. + :vartype source_account: str + :ivar destination_account: Required. Destination account name. It should be full resource id if + allowCrossTenantReplication set to false. Required. + :vartype destination_account: str + :ivar rules: The storage account object replication rules. + :vartype rules: list[~azure.mgmt.storage.models.ObjectReplicationPolicyRule] + :ivar metrics: Optional. The object replication policy metrics feature options. + :vartype metrics: ~azure.mgmt.storage.models.ObjectReplicationPolicyPropertiesMetrics + :ivar priority_replication: Optional. The object replication policy priority replication + feature options. + :vartype priority_replication: + ~azure.mgmt.storage.models.ObjectReplicationPolicyPropertiesPriorityReplication + :ivar tags_replication: Optional. The object replication policy tags replication feature + options. + :vartype tags_replication: + ~azure.mgmt.storage.models.ObjectReplicationPolicyPropertiesTagsReplication + """ + + policy_id: Optional[str] = rest_field(name="policyId", visibility=["read"]) + """A unique id for object replication policy.""" + enabled_time: Optional[datetime.datetime] = rest_field(name="enabledTime", visibility=["read"], format="rfc3339") + """Indicates when the policy is enabled on the source account.""" + source_account: str = rest_field(name="sourceAccount", visibility=["read", "create", "update", "delete", "query"]) + """Required. Source account name. It should be full resource id if allowCrossTenantReplication set + to false. Required.""" + destination_account: str = rest_field( + name="destinationAccount", visibility=["read", "create", "update", "delete", "query"] + ) + """Required. Destination account name. It should be full resource id if + allowCrossTenantReplication set to false. Required.""" + rules: Optional[list["_models.ObjectReplicationPolicyRule"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The storage account object replication rules.""" + metrics: Optional["_models.ObjectReplicationPolicyPropertiesMetrics"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Optional. The object replication policy metrics feature options.""" + priority_replication: Optional["_models.ObjectReplicationPolicyPropertiesPriorityReplication"] = rest_field( + name="priorityReplication", visibility=["read", "create", "update", "delete", "query"] + ) + """Optional. The object replication policy priority replication feature options.""" + tags_replication: Optional["_models.ObjectReplicationPolicyPropertiesTagsReplication"] = rest_field( + name="tagsReplication", visibility=["read", "create", "update", "delete", "query"] + ) + """Optional. The object replication policy tags replication feature options.""" + + @overload + def __init__( + self, + *, + source_account: str, + destination_account: str, + rules: Optional[list["_models.ObjectReplicationPolicyRule"]] = None, + metrics: Optional["_models.ObjectReplicationPolicyPropertiesMetrics"] = None, + priority_replication: Optional["_models.ObjectReplicationPolicyPropertiesPriorityReplication"] = None, + tags_replication: Optional["_models.ObjectReplicationPolicyPropertiesTagsReplication"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ObjectReplicationPolicyPropertiesMetrics(_Model): + """Optional. The object replication policy metrics feature options. + + :ivar enabled: Indicates whether object replication metrics feature is enabled for the policy. + :vartype enabled: bool + """ + + enabled: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates whether object replication metrics feature is enabled for the policy.""" + + @overload + def __init__( + self, + *, + enabled: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ObjectReplicationPolicyPropertiesPriorityReplication(_Model): # pylint: disable=name-too-long + """Optional. The object replication policy priority replication feature options. + + :ivar enabled: Indicates whether object replication priority replication feature is enabled for + the policy. + :vartype enabled: bool + """ + + enabled: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates whether object replication priority replication feature is enabled for the policy.""" + + @overload + def __init__( + self, + *, + enabled: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ObjectReplicationPolicyPropertiesTagsReplication(_Model): # pylint: disable=name-too-long + """Optional. The object replication policy tags replication feature options. + + :ivar enabled: Indicates whether object replication tags replication feature is enabled for the + policy. + :vartype enabled: bool + """ + + enabled: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates whether object replication tags replication feature is enabled for the policy.""" + + @overload + def __init__( + self, + *, + enabled: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ObjectReplicationPolicyRule(_Model): + """The replication policy rule between two containers. + + :ivar rule_id: Rule Id is auto-generated for each new rule on destination account. It is + required for put policy on source account. + :vartype rule_id: str + :ivar source_container: Required. Source container name. Required. + :vartype source_container: str + :ivar destination_container: Required. Destination container name. Required. + :vartype destination_container: str + :ivar filters: Optional. An object that defines the filter set. + :vartype filters: ~azure.mgmt.storage.models.ObjectReplicationPolicyFilter + """ + + rule_id: Optional[str] = rest_field(name="ruleId", visibility=["read", "create", "update", "delete", "query"]) + """Rule Id is auto-generated for each new rule on destination account. It is required for put + policy on source account.""" + source_container: str = rest_field( + name="sourceContainer", visibility=["read", "create", "update", "delete", "query"] + ) + """Required. Source container name. Required.""" + destination_container: str = rest_field( + name="destinationContainer", visibility=["read", "create", "update", "delete", "query"] + ) + """Required. Destination container name. Required.""" + filters: Optional["_models.ObjectReplicationPolicyFilter"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Optional. An object that defines the filter set.""" + + @overload + def __init__( + self, + *, + source_container: str, + destination_container: str, + rule_id: Optional[str] = None, + filters: Optional["_models.ObjectReplicationPolicyFilter"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Operation(_Model): + """Storage REST API operation definition. + + :ivar name: Operation name: {provider}/{resource}/{operation}. + :vartype name: str + :ivar display: Display metadata associated with the operation. + :vartype display: ~azure.mgmt.storage.models.OperationDisplay + :ivar origin: The origin of operations. + :vartype origin: str + :ivar operation_properties: Properties of operation, include metric specifications. + :vartype operation_properties: ~azure.mgmt.storage.models.OperationProperties + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Operation name: {provider}/{resource}/{operation}.""" + display: Optional["_models.OperationDisplay"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Display metadata associated with the operation.""" + origin: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The origin of operations.""" + operation_properties: Optional["_models.OperationProperties"] = rest_field( + name="properties", visibility=["read", "create", "update", "delete", "query"] + ) + """Properties of operation, include metric specifications.""" + + __flattened_items = ["service_specification"] + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + display: Optional["_models.OperationDisplay"] = None, + origin: Optional[str] = None, + operation_properties: Optional["_models.OperationProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.operation_properties is None: + return None + return getattr(self.operation_properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.operation_properties is None: + self.operation_properties = self._attr_to_rest_field["operation_properties"]._class_type() + setattr(self.operation_properties, key, value) + else: + super().__setattr__(key, value) + + +class OperationDisplay(_Model): + """Display metadata associated with the operation. + + :ivar provider: Service provider: Microsoft Storage. + :vartype provider: str + :ivar resource: Resource on which the operation is performed etc. + :vartype resource: str + :ivar operation: Type of operation: get, read, delete, etc. + :vartype operation: str + :ivar description: Description of the operation. + :vartype description: str + """ + + provider: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Service provider: Microsoft Storage.""" + resource: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Resource on which the operation is performed etc.""" + operation: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Type of operation: get, read, delete, etc.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description of the operation.""" + + @overload + def __init__( + self, + *, + provider: Optional[str] = None, + resource: Optional[str] = None, + operation: Optional[str] = None, + description: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class OperationProperties(_Model): + """Properties of operation, include metric specifications. + + :ivar service_specification: One property of operation, include metric specifications. + :vartype service_specification: ~azure.mgmt.storage.models.ServiceSpecification + """ + + service_specification: Optional["_models.ServiceSpecification"] = rest_field( + name="serviceSpecification", visibility=["read", "create", "update", "delete", "query"] + ) + """One property of operation, include metric specifications.""" + + @overload + def __init__( + self, + *, + service_specification: Optional["_models.ServiceSpecification"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PermissionScope(_Model): + """PermissionScope. + + :ivar permissions: The permissions for the local user. Possible values include: Read (r), Write + (w), Delete (d), List (l), Create (c), Modify Ownership (o), and Modify Permissions (p). + Required. + :vartype permissions: str + :ivar service: The service used by the local user, e.g. blob, file. Required. + :vartype service: str + :ivar resource_name: The name of resource, normally the container name or the file share name, + used by the local user. Required. + :vartype resource_name: str + """ + + permissions: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The permissions for the local user. Possible values include: Read (r), Write (w), Delete (d), + List (l), Create (c), Modify Ownership (o), and Modify Permissions (p). Required.""" + service: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The service used by the local user, e.g. blob, file. Required.""" + resource_name: str = rest_field(name="resourceName", visibility=["read", "create", "update", "delete", "query"]) + """The name of resource, normally the container name or the file share name, used by the local + user. Required.""" + + @overload + def __init__( + self, + *, + permissions: str, + service: str, + resource_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Placement(_Model): + """The complex type of the zonal placement details. + + :ivar zone_placement_policy: The availability zone pinning policy for the storage account. + Known values are: "Any" and "None". + :vartype zone_placement_policy: str or ~azure.mgmt.storage.models.ZonePlacementPolicy + """ + + zone_placement_policy: Optional[Union[str, "_models.ZonePlacementPolicy"]] = rest_field( + name="zonePlacementPolicy", visibility=["read", "create", "update", "delete", "query"] + ) + """The availability zone pinning policy for the storage account. Known values are: \"Any\" and + \"None\".""" + + @overload + def __init__( + self, + *, + zone_placement_policy: Optional[Union[str, "_models.ZonePlacementPolicy"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PrivateEndpoint(_Model): + """The Private Endpoint resource. + + :ivar id: The ARM identifier for Private Endpoint. + :vartype id: str + """ + + id: Optional[str] = rest_field(visibility=["read"]) + """The ARM identifier for Private Endpoint.""" + + +class PrivateEndpointConnection(ProxyResource): + """The Private Endpoint Connection resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar properties: Resource properties. + :vartype properties: ~azure.mgmt.storage.models.PrivateEndpointConnectionProperties + """ + + properties: Optional["_models.PrivateEndpointConnectionProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Resource properties.""" + + __flattened_items = ["private_endpoint", "private_link_service_connection_state", "provisioning_state"] + + @overload + def __init__( + self, + *, + properties: Optional["_models.PrivateEndpointConnectionProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.properties is None: + return None + return getattr(self.properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.properties is None: + self.properties = self._attr_to_rest_field["properties"]._class_type() + setattr(self.properties, key, value) + else: + super().__setattr__(key, value) + + +class PrivateEndpointConnectionProperties(_Model): + """Properties of the PrivateEndpointConnectProperties. + + :ivar private_endpoint: The resource of private end point. + :vartype private_endpoint: ~azure.mgmt.storage.models.PrivateEndpoint + :ivar private_link_service_connection_state: A collection of information about the state of the + connection between service consumer and provider. Required. + :vartype private_link_service_connection_state: + ~azure.mgmt.storage.models.PrivateLinkServiceConnectionState + :ivar provisioning_state: The provisioning state of the private endpoint connection resource. + Known values are: "Succeeded", "Creating", "Deleting", and "Failed". + :vartype provisioning_state: str or + ~azure.mgmt.storage.models.PrivateEndpointConnectionProvisioningState + """ + + private_endpoint: Optional["_models.PrivateEndpoint"] = rest_field( + name="privateEndpoint", visibility=["read", "create", "update", "delete", "query"] + ) + """The resource of private end point.""" + private_link_service_connection_state: "_models.PrivateLinkServiceConnectionState" = rest_field( + name="privateLinkServiceConnectionState", visibility=["read", "create", "update", "delete", "query"] + ) + """A collection of information about the state of the connection between service consumer and + provider. Required.""" + provisioning_state: Optional[Union[str, "_models.PrivateEndpointConnectionProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The provisioning state of the private endpoint connection resource. Known values are: + \"Succeeded\", \"Creating\", \"Deleting\", and \"Failed\".""" + + @overload + def __init__( + self, + *, + private_link_service_connection_state: "_models.PrivateLinkServiceConnectionState", + private_endpoint: Optional["_models.PrivateEndpoint"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PrivateLinkResource(ResourceAutoGenerated): + """A private link resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar properties: Resource properties. + :vartype properties: ~azure.mgmt.storage.models.PrivateLinkResourceProperties + """ + + properties: Optional["_models.PrivateLinkResourceProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Resource properties.""" + + __flattened_items = ["group_id", "required_members", "required_zone_names"] + + @overload + def __init__( + self, + *, + properties: Optional["_models.PrivateLinkResourceProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.properties is None: + return None + return getattr(self.properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.properties is None: + self.properties = self._attr_to_rest_field["properties"]._class_type() + setattr(self.properties, key, value) + else: + super().__setattr__(key, value) + + +class PrivateLinkResourceListResult(_Model): + """A list of private link resources. + + :ivar value: Array of private link resources. + :vartype value: list[~azure.mgmt.storage.models.PrivateLinkResource] + """ + + value: Optional[list["_models.PrivateLinkResource"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Array of private link resources.""" + + @overload + def __init__( + self, + *, + value: Optional[list["_models.PrivateLinkResource"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PrivateLinkResourceProperties(_Model): + """Properties of a private link resource. + + :ivar group_id: The private link resource group id. + :vartype group_id: str + :ivar required_members: The private link resource required member names. + :vartype required_members: list[str] + :ivar required_zone_names: The private link resource Private link DNS zone name. + :vartype required_zone_names: list[str] + """ + + group_id: Optional[str] = rest_field(name="groupId", visibility=["read"]) + """The private link resource group id.""" + required_members: Optional[list[str]] = rest_field(name="requiredMembers", visibility=["read"]) + """The private link resource required member names.""" + required_zone_names: Optional[list[str]] = rest_field( + name="requiredZoneNames", visibility=["read", "create", "update", "delete", "query"] + ) + """The private link resource Private link DNS zone name.""" + + @overload + def __init__( + self, + *, + required_zone_names: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PrivateLinkServiceConnectionState(_Model): + """A collection of information about the state of the connection between service consumer and + provider. + + :ivar status: Indicates whether the connection has been Approved/Rejected/Removed by the owner + of the service. Known values are: "Pending", "Approved", and "Rejected". + :vartype status: str or ~azure.mgmt.storage.models.PrivateEndpointServiceConnectionStatus + :ivar description: The reason for approval/rejection of the connection. + :vartype description: str + :ivar action_required: A message indicating if changes on the service provider require any + updates on the consumer. + :vartype action_required: str + """ + + status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates whether the connection has been Approved/Rejected/Removed by the owner of the + service. Known values are: \"Pending\", \"Approved\", and \"Rejected\".""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The reason for approval/rejection of the connection.""" + action_required: Optional[str] = rest_field( + name="actionRequired", visibility=["read", "create", "update", "delete", "query"] + ) + """A message indicating if changes on the service provider require any updates on the consumer.""" + + @overload + def __init__( + self, + *, + status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = None, + description: Optional[str] = None, + action_required: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ProtectedAppendWritesHistory(_Model): + """Protected append writes history setting for the blob container with Legal holds. + + :ivar allow_protected_append_writes_all: When enabled, new blocks can be written to both + 'Append and Bock Blobs' while maintaining legal hold protection and compliance. Only new blocks + can be added and any existing blocks cannot be modified or deleted. + :vartype allow_protected_append_writes_all: bool + :ivar timestamp: Returns the date and time the tag was added. + :vartype timestamp: ~datetime.datetime + """ + + allow_protected_append_writes_all: Optional[bool] = rest_field( + name="allowProtectedAppendWritesAll", visibility=["read", "create", "update", "delete", "query"] + ) + """When enabled, new blocks can be written to both 'Append and Bock Blobs' while maintaining legal + hold protection and compliance. Only new blocks can be added and any existing blocks cannot be + modified or deleted.""" + timestamp: Optional[datetime.datetime] = rest_field(visibility=["read"], format="rfc3339") + """Returns the date and time the tag was added.""" + + @overload + def __init__( + self, + *, + allow_protected_append_writes_all: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ProtocolSettings(_Model): + """Protocol settings for file service. + + :ivar smb: Setting for SMB protocol. + :vartype smb: ~azure.mgmt.storage.models.SmbSetting + :ivar nfs: Setting for NFS protocol. + :vartype nfs: ~azure.mgmt.storage.models.NfsSetting + """ + + smb: Optional["_models.SmbSetting"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Setting for SMB protocol.""" + nfs: Optional["_models.NfsSetting"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Setting for NFS protocol.""" + + @overload + def __init__( + self, + *, + smb: Optional["_models.SmbSetting"] = None, + nfs: Optional["_models.NfsSetting"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ProvisioningIssue(_Model): + """Describes provisioning issue for given NetworkSecurityPerimeterConfiguration. + + :ivar name: Name of the issue. + :vartype name: str + :ivar properties: Properties of provisioning issue. + :vartype properties: ~azure.mgmt.storage.models.ProvisioningIssueProperties + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the issue.""" + properties: Optional["_models.ProvisioningIssueProperties"] = rest_field(visibility=["read"]) + """Properties of provisioning issue.""" + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ProvisioningIssueProperties(_Model): + """Properties of provisioning issue. + + :ivar issue_type: Type of issue. Known values are: "Unknown" and + "ConfigurationPropagationFailure". + :vartype issue_type: str or ~azure.mgmt.storage.models.IssueType + :ivar severity: Severity of the issue. Known values are: "Warning" and "Error". + :vartype severity: str or ~azure.mgmt.storage.models.Severity + :ivar description: Description of the issue. + :vartype description: str + """ + + issue_type: Optional[Union[str, "_models.IssueType"]] = rest_field( + name="issueType", visibility=["read", "create", "update", "delete", "query"] + ) + """Type of issue. Known values are: \"Unknown\" and \"ConfigurationPropagationFailure\".""" + severity: Optional[Union[str, "_models.Severity"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Severity of the issue. Known values are: \"Warning\" and \"Error\".""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description of the issue.""" + + @overload + def __init__( + self, + *, + issue_type: Optional[Union[str, "_models.IssueType"]] = None, + severity: Optional[Union[str, "_models.Severity"]] = None, + description: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class QueueProperties(_Model): + """QueueProperties. + + :ivar metadata: A name-value pair that represents queue metadata. + :vartype metadata: dict[str, str] + :ivar approximate_message_count: Integer indicating an approximate number of messages in the + queue. This number is not lower than the actual number of messages in the queue, but could be + higher. + :vartype approximate_message_count: int + """ + + metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A name-value pair that represents queue metadata.""" + approximate_message_count: Optional[int] = rest_field(name="approximateMessageCount", visibility=["read"]) + """Integer indicating an approximate number of messages in the queue. This number is not lower + than the actual number of messages in the queue, but could be higher.""" + + @overload + def __init__( + self, + *, + metadata: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class QueueServiceProperties(ProxyResource): + """The properties of a storage account’s Queue service. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar queue_service_properties: The properties of a storage account’s Queue service. + :vartype queue_service_properties: ~azure.mgmt.storage.models.QueueServicePropertiesProperties + """ + + queue_service_properties: Optional["_models.QueueServicePropertiesProperties"] = rest_field( + name="properties", visibility=["read", "create", "update", "delete", "query"] + ) + """The properties of a storage account’s Queue service.""" + + __flattened_items = ["cors"] + + @overload + def __init__( + self, + *, + queue_service_properties: Optional["_models.QueueServicePropertiesProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.queue_service_properties is None: + return None + return getattr(self.queue_service_properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.queue_service_properties is None: + self.queue_service_properties = self._attr_to_rest_field["queue_service_properties"]._class_type() + setattr(self.queue_service_properties, key, value) + else: + super().__setattr__(key, value) + + +class QueueServicePropertiesProperties(_Model): + """The properties of a storage account’s Queue service. + + :ivar cors: Specifies CORS rules for the Queue service. You can include up to five CorsRule + elements in the request. If no CorsRule elements are included in the request body, all CORS + rules will be deleted, and CORS will be disabled for the Queue service. + :vartype cors: ~azure.mgmt.storage.models.CorsRules + """ + + cors: Optional["_models.CorsRules"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Specifies CORS rules for the Queue service. You can include up to five CorsRule elements in the + request. If no CorsRule elements are included in the request body, all CORS rules will be + deleted, and CORS will be disabled for the Queue service.""" + + @overload + def __init__( + self, + *, + cors: Optional["_models.CorsRules"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResourceAccessRule(_Model): + """Resource Access Rule. + + :ivar tenant_id: Tenant Id. + :vartype tenant_id: str + :ivar resource_id: Resource Id. + :vartype resource_id: str + """ + + tenant_id: Optional[str] = rest_field(name="tenantId", visibility=["read", "create", "update", "delete", "query"]) + """Tenant Id.""" + resource_id: Optional[str] = rest_field( + name="resourceId", visibility=["read", "create", "update", "delete", "query"] + ) + """Resource Id.""" + + @overload + def __init__( + self, + *, + tenant_id: Optional[str] = None, + resource_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class RestorePolicyProperties(_Model): + """The blob service properties for blob restore policy. + + :ivar enabled: Blob restore is enabled if set to true. Required. + :vartype enabled: bool + :ivar days: how long this blob can be restored. It should be great than zero and less than + DeleteRetentionPolicy.days. + :vartype days: int + :ivar last_enabled_time: Deprecated in favor of minRestoreTime property. + :vartype last_enabled_time: ~datetime.datetime + :ivar min_restore_time: Returns the minimum date and time that the restore can be started. + :vartype min_restore_time: ~datetime.datetime + """ + + enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Blob restore is enabled if set to true. Required.""" + days: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """how long this blob can be restored. It should be great than zero and less than + DeleteRetentionPolicy.days.""" + last_enabled_time: Optional[datetime.datetime] = rest_field( + name="lastEnabledTime", visibility=["read"], format="rfc3339" + ) + """Deprecated in favor of minRestoreTime property.""" + min_restore_time: Optional[datetime.datetime] = rest_field( + name="minRestoreTime", visibility=["read"], format="rfc3339" + ) + """Returns the minimum date and time that the restore can be started.""" + + @overload + def __init__( + self, + *, + enabled: bool, + days: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Restriction(_Model): + """The restriction because of which SKU cannot be used. + + :ivar type: The type of restrictions. As of now only possible value for this is location. + :vartype type: str + :ivar values_property: The value of restrictions. If the restriction type is set to location. + This would be different locations where the SKU is restricted. + :vartype values_property: list[str] + :ivar reason_code: The reason for the restriction. As of now this can be "QuotaId" or + "NotAvailableForSubscription". Quota Id is set when the SKU has requiredQuotas parameter as the + subscription does not belong to that quota. The "NotAvailableForSubscription" is related to + capacity at DC. Known values are: "QuotaId" and "NotAvailableForSubscription". + :vartype reason_code: str or ~azure.mgmt.storage.models.ReasonCode + """ + + type: Optional[str] = rest_field(visibility=["read"]) + """The type of restrictions. As of now only possible value for this is location.""" + values_property: Optional[list[str]] = rest_field(name="values", visibility=["read"], original_tsp_name="values") + """The value of restrictions. If the restriction type is set to location. This would be different + locations where the SKU is restricted.""" + reason_code: Optional[Union[str, "_models.ReasonCode"]] = rest_field( + name="reasonCode", visibility=["read", "create", "update", "delete", "query"] + ) + """The reason for the restriction. As of now this can be \"QuotaId\" or + \"NotAvailableForSubscription\". Quota Id is set when the SKU has requiredQuotas parameter as + the subscription does not belong to that quota. The \"NotAvailableForSubscription\" is related + to capacity at DC. Known values are: \"QuotaId\" and \"NotAvailableForSubscription\".""" + + @overload + def __init__( + self, + *, + reason_code: Optional[Union[str, "_models.ReasonCode"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class RoutingPreference(_Model): + """Routing preference defines the type of network, either microsoft or internet routing to be used + to deliver the user data, the default option is microsoft routing. + + :ivar routing_choice: Routing Choice defines the kind of network routing opted by the user. + Known values are: "MicrosoftRouting" and "InternetRouting". + :vartype routing_choice: str or ~azure.mgmt.storage.models.RoutingChoice + :ivar publish_microsoft_endpoints: A boolean flag which indicates whether microsoft routing + storage endpoints are to be published. + :vartype publish_microsoft_endpoints: bool + :ivar publish_internet_endpoints: A boolean flag which indicates whether internet routing + storage endpoints are to be published. + :vartype publish_internet_endpoints: bool + """ + + routing_choice: Optional[Union[str, "_models.RoutingChoice"]] = rest_field( + name="routingChoice", visibility=["read", "create", "update", "delete", "query"] + ) + """Routing Choice defines the kind of network routing opted by the user. Known values are: + \"MicrosoftRouting\" and \"InternetRouting\".""" + publish_microsoft_endpoints: Optional[bool] = rest_field( + name="publishMicrosoftEndpoints", visibility=["read", "create", "update", "delete", "query"] + ) + """A boolean flag which indicates whether microsoft routing storage endpoints are to be published.""" + publish_internet_endpoints: Optional[bool] = rest_field( + name="publishInternetEndpoints", visibility=["read", "create", "update", "delete", "query"] + ) + """A boolean flag which indicates whether internet routing storage endpoints are to be published.""" + + @overload + def __init__( + self, + *, + routing_choice: Optional[Union[str, "_models.RoutingChoice"]] = None, + publish_microsoft_endpoints: Optional[bool] = None, + publish_internet_endpoints: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SasPolicy(_Model): + """SasPolicy assigned to the storage account. + + :ivar sas_expiration_period: The SAS expiration period, DD.HH:MM:SS. Required. + :vartype sas_expiration_period: str + :ivar expiration_action: The SAS Expiration Action defines the action to be performed when + sasPolicy.sasExpirationPeriod is violated. The 'Log' action can be used for audit purposes and + the 'Block' action can be used to block and deny the usage of SAS tokens that do not adhere to + the sas policy expiration period. Required. Known values are: "Log" and "Block". + :vartype expiration_action: str or ~azure.mgmt.storage.models.ExpirationAction + """ + + sas_expiration_period: str = rest_field( + name="sasExpirationPeriod", visibility=["read", "create", "update", "delete", "query"] + ) + """The SAS expiration period, DD.HH:MM:SS. Required.""" + expiration_action: Union[str, "_models.ExpirationAction"] = rest_field( + name="expirationAction", visibility=["read", "create", "update", "delete", "query"] + ) + """The SAS Expiration Action defines the action to be performed when sasPolicy.sasExpirationPeriod + is violated. The 'Log' action can be used for audit purposes and the 'Block' action can be used + to block and deny the usage of SAS tokens that do not adhere to the sas policy expiration + period. Required. Known values are: \"Log\" and \"Block\".""" + + @overload + def __init__( + self, + *, + sas_expiration_period: str, + expiration_action: Union[str, "_models.ExpirationAction"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ServiceSasParameters(_Model): + """The parameters to list service SAS credentials of a specific resource. + + :ivar canonicalized_resource: The canonical path to the signed resource. Required. + :vartype canonicalized_resource: str + :ivar resource: The signed services accessible with the service SAS. Possible values include: + Blob (b), Container (c), File (f), Share (s). Known values are: "b", "c", "f", and "s". + :vartype resource: str or ~azure.mgmt.storage.models.SignedResource + :ivar permissions: The signed permissions for the service SAS. Possible values include: Read + (r), Write (w), Delete (d), List (l), Add (a), Create (c), Update (u) and Process (p). Known + values are: "r", "d", "w", "l", "a", "c", "u", and "p". + :vartype permissions: str or ~azure.mgmt.storage.models.Permissions + :ivar ip_address_or_range: An IP address or a range of IP addresses from which to accept + requests. + :vartype ip_address_or_range: str + :ivar protocols: The protocol permitted for a request made with the account SAS. Known values + are: "https,http" and "https". + :vartype protocols: str or ~azure.mgmt.storage.models.HttpProtocol + :ivar shared_access_start_time: The time at which the SAS becomes valid. + :vartype shared_access_start_time: ~datetime.datetime + :ivar shared_access_expiry_time: The time at which the shared access signature becomes invalid. + :vartype shared_access_expiry_time: ~datetime.datetime + :ivar identifier: A unique value up to 64 characters in length that correlates to an access + policy specified for the container, queue, or table. + :vartype identifier: str + :ivar partition_key_start: The start of partition key. + :vartype partition_key_start: str + :ivar partition_key_end: The end of partition key. + :vartype partition_key_end: str + :ivar row_key_start: The start of row key. + :vartype row_key_start: str + :ivar row_key_end: The end of row key. + :vartype row_key_end: str + :ivar key_to_sign: The key to sign the account SAS token with. + :vartype key_to_sign: str + :ivar cache_control: The response header override for cache control. + :vartype cache_control: str + :ivar content_disposition: The response header override for content disposition. + :vartype content_disposition: str + :ivar content_encoding: The response header override for content encoding. + :vartype content_encoding: str + :ivar content_language: The response header override for content language. + :vartype content_language: str + :ivar content_type: The response header override for content type. + :vartype content_type: str + """ + + canonicalized_resource: str = rest_field( + name="canonicalizedResource", visibility=["read", "create", "update", "delete", "query"] + ) + """The canonical path to the signed resource. Required.""" + resource: Optional[Union[str, "_models.SignedResource"]] = rest_field( + name="signedResource", visibility=["read", "create", "update", "delete", "query"] + ) + """The signed services accessible with the service SAS. Possible values include: Blob (b), + Container (c), File (f), Share (s). Known values are: \"b\", \"c\", \"f\", and \"s\".""" + permissions: Optional[Union[str, "_models.Permissions"]] = rest_field( + name="signedPermission", visibility=["read", "create", "update", "delete", "query"] + ) + """The signed permissions for the service SAS. Possible values include: Read (r), Write (w), + Delete (d), List (l), Add (a), Create (c), Update (u) and Process (p). Known values are: \"r\", + \"d\", \"w\", \"l\", \"a\", \"c\", \"u\", and \"p\".""" + ip_address_or_range: Optional[str] = rest_field( + name="signedIp", visibility=["read", "create", "update", "delete", "query"] + ) + """An IP address or a range of IP addresses from which to accept requests.""" + protocols: Optional[Union[str, "_models.HttpProtocol"]] = rest_field( + name="signedProtocol", visibility=["read", "create", "update", "delete", "query"] + ) + """The protocol permitted for a request made with the account SAS. Known values are: + \"https,http\" and \"https\".""" + shared_access_start_time: Optional[datetime.datetime] = rest_field( + name="signedStart", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The time at which the SAS becomes valid.""" + shared_access_expiry_time: Optional[datetime.datetime] = rest_field( + name="signedExpiry", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The time at which the shared access signature becomes invalid.""" + identifier: Optional[str] = rest_field( + name="signedIdentifier", visibility=["read", "create", "update", "delete", "query"] + ) + """A unique value up to 64 characters in length that correlates to an access policy specified for + the container, queue, or table.""" + partition_key_start: Optional[str] = rest_field( + name="startPk", visibility=["read", "create", "update", "delete", "query"] + ) + """The start of partition key.""" + partition_key_end: Optional[str] = rest_field( + name="endPk", visibility=["read", "create", "update", "delete", "query"] + ) + """The end of partition key.""" + row_key_start: Optional[str] = rest_field( + name="startRk", visibility=["read", "create", "update", "delete", "query"] + ) + """The start of row key.""" + row_key_end: Optional[str] = rest_field(name="endRk", visibility=["read", "create", "update", "delete", "query"]) + """The end of row key.""" + key_to_sign: Optional[str] = rest_field( + name="keyToSign", visibility=["read", "create", "update", "delete", "query"] + ) + """The key to sign the account SAS token with.""" + cache_control: Optional[str] = rest_field(name="rscc", visibility=["read", "create", "update", "delete", "query"]) + """The response header override for cache control.""" + content_disposition: Optional[str] = rest_field( + name="rscd", visibility=["read", "create", "update", "delete", "query"] + ) + """The response header override for content disposition.""" + content_encoding: Optional[str] = rest_field( + name="rsce", visibility=["read", "create", "update", "delete", "query"] + ) + """The response header override for content encoding.""" + content_language: Optional[str] = rest_field( + name="rscl", visibility=["read", "create", "update", "delete", "query"] + ) + """The response header override for content language.""" + content_type: Optional[str] = rest_field(name="rsct", visibility=["read", "create", "update", "delete", "query"]) + """The response header override for content type.""" + + @overload + def __init__( + self, + *, + canonicalized_resource: str, + resource: Optional[Union[str, "_models.SignedResource"]] = None, + permissions: Optional[Union[str, "_models.Permissions"]] = None, + ip_address_or_range: Optional[str] = None, + protocols: Optional[Union[str, "_models.HttpProtocol"]] = None, + shared_access_start_time: Optional[datetime.datetime] = None, + shared_access_expiry_time: Optional[datetime.datetime] = None, + identifier: Optional[str] = None, + partition_key_start: Optional[str] = None, + partition_key_end: Optional[str] = None, + row_key_start: Optional[str] = None, + row_key_end: Optional[str] = None, + key_to_sign: Optional[str] = None, + cache_control: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ServiceSharedKeyAccessProperties(_Model): + """Defines shared key access settings for an individual storage service. + + :ivar enabled: Indicates whether shared key access is enabled for the service. + :vartype enabled: bool + """ + + enabled: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates whether shared key access is enabled for the service.""" + + @overload + def __init__( + self, + *, + enabled: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ServiceSpecification(_Model): + """One property of operation, include metric specifications. + + :ivar metric_specifications: Metric specifications of operation. + :vartype metric_specifications: list[~azure.mgmt.storage.models.MetricSpecification] + """ + + metric_specifications: Optional[list["_models.MetricSpecification"]] = rest_field( + name="metricSpecifications", visibility=["read", "create", "update", "delete", "query"] + ) + """Metric specifications of operation.""" + + @overload + def __init__( + self, + *, + metric_specifications: Optional[list["_models.MetricSpecification"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SignedIdentifier(_Model): + """SignedIdentifier. + + :ivar id: An unique identifier of the stored access policy. + :vartype id: str + :ivar access_policy: Access policy. + :vartype access_policy: ~azure.mgmt.storage.models.AccessPolicy + """ + + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An unique identifier of the stored access policy.""" + access_policy: Optional["_models.AccessPolicy"] = rest_field( + name="accessPolicy", visibility=["read", "create", "update", "delete", "query"] + ) + """Access policy.""" + + @overload + def __init__( + self, + *, + id: Optional[str] = None, # pylint: disable=redefined-builtin + access_policy: Optional["_models.AccessPolicy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Sku(_Model): + """The SKU of the storage account. + + :ivar name: The SKU name. Required for account creation; optional for update. Note that in + older versions, SKU name was called accountType. Required. Known values are: "Standard_LRS", + "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", "Premium_ZRS", + "Standard_GZRS", "Standard_RAGZRS", "StandardV2_LRS", "StandardV2_GRS", "StandardV2_ZRS", + "StandardV2_GZRS", "PremiumV2_LRS", and "PremiumV2_ZRS". + :vartype name: str or ~azure.mgmt.storage.models.SkuName + :ivar tier: The SKU tier. This is based on the SKU name. Known values are: "Standard" and + "Premium". + :vartype tier: str or ~azure.mgmt.storage.models.SkuTier + """ + + name: Union[str, "_models.SkuName"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The SKU name. Required for account creation; optional for update. Note that in older versions, + SKU name was called accountType. Required. Known values are: \"Standard_LRS\", + \"Standard_GRS\", \"Standard_RAGRS\", \"Standard_ZRS\", \"Premium_LRS\", \"Premium_ZRS\", + \"Standard_GZRS\", \"Standard_RAGZRS\", \"StandardV2_LRS\", \"StandardV2_GRS\", + \"StandardV2_ZRS\", \"StandardV2_GZRS\", \"PremiumV2_LRS\", and \"PremiumV2_ZRS\".""" + tier: Optional[Union[str, "_models.SkuTier"]] = rest_field(visibility=["read"]) + """The SKU tier. This is based on the SKU name. Known values are: \"Standard\" and \"Premium\".""" + + @overload + def __init__( + self, + *, + name: Union[str, "_models.SkuName"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SKUCapability(_Model): + """The capability information in the specified SKU, including file encryption, network ACLs, + change notification, etc. + + :ivar name: The name of capability, The capability information in the specified SKU, including + file encryption, network ACLs, change notification, etc. + :vartype name: str + :ivar value: A string value to indicate states of given capability. Possibly 'true' or 'false'. + :vartype value: str + """ + + name: Optional[str] = rest_field(visibility=["read"]) + """The name of capability, The capability information in the specified SKU, including file + encryption, network ACLs, change notification, etc.""" + value: Optional[str] = rest_field(visibility=["read"]) + """A string value to indicate states of given capability. Possibly 'true' or 'false'.""" + + +class SkuInformation(_Model): + """Storage SKU and its properties. + + :ivar name: The SKU name. Required for account creation; optional for update. Note that in + older versions, SKU name was called accountType. Required. Known values are: "Standard_LRS", + "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", "Premium_ZRS", + "Standard_GZRS", "Standard_RAGZRS", "StandardV2_LRS", "StandardV2_GRS", "StandardV2_ZRS", + "StandardV2_GZRS", "PremiumV2_LRS", and "PremiumV2_ZRS". + :vartype name: str or ~azure.mgmt.storage.models.SkuName + :ivar tier: The SKU tier. This is based on the SKU name. Known values are: "Standard" and + "Premium". + :vartype tier: str or ~azure.mgmt.storage.models.SkuTier + :ivar resource_type: The type of the resource, usually it is 'storageAccounts'. + :vartype resource_type: str + :ivar kind: Indicates the type of storage account. Known values are: "Storage", "StorageV2", + "BlobStorage", "FileStorage", and "BlockBlobStorage". + :vartype kind: str or ~azure.mgmt.storage.models.Kind + :ivar locations: The set of locations that the SKU is available. This will be supported and + registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.). + :vartype locations: list[str] + :ivar location_info: + :vartype location_info: list[~azure.mgmt.storage.models.SkuInformationLocationInfoItem] + :ivar capabilities: The capability information in the specified SKU, including file encryption, + network ACLs, change notification, etc. + :vartype capabilities: list[~azure.mgmt.storage.models.SKUCapability] + :ivar restrictions: The restrictions because of which SKU cannot be used. This is empty if + there are no restrictions. + :vartype restrictions: list[~azure.mgmt.storage.models.Restriction] + """ + + name: Union[str, "_models.SkuName"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The SKU name. Required for account creation; optional for update. Note that in older versions, + SKU name was called accountType. Required. Known values are: \"Standard_LRS\", + \"Standard_GRS\", \"Standard_RAGRS\", \"Standard_ZRS\", \"Premium_LRS\", \"Premium_ZRS\", + \"Standard_GZRS\", \"Standard_RAGZRS\", \"StandardV2_LRS\", \"StandardV2_GRS\", + \"StandardV2_ZRS\", \"StandardV2_GZRS\", \"PremiumV2_LRS\", and \"PremiumV2_ZRS\".""" + tier: Optional[Union[str, "_models.SkuTier"]] = rest_field(visibility=["read"]) + """The SKU tier. This is based on the SKU name. Known values are: \"Standard\" and \"Premium\".""" + resource_type: Optional[str] = rest_field(name="resourceType", visibility=["read"]) + """The type of the resource, usually it is 'storageAccounts'.""" + kind: Optional[Union[str, "_models.Kind"]] = rest_field(visibility=["read"]) + """Indicates the type of storage account. Known values are: \"Storage\", \"StorageV2\", + \"BlobStorage\", \"FileStorage\", and \"BlockBlobStorage\".""" + locations: Optional[list[str]] = rest_field(visibility=["read"]) + """The set of locations that the SKU is available. This will be supported and registered Azure Geo + Regions (e.g. West US, East US, Southeast Asia, etc.).""" + location_info: Optional[list["_models.SkuInformationLocationInfoItem"]] = rest_field( + name="locationInfo", visibility=["read", "create", "update", "delete", "query"] + ) + capabilities: Optional[list["_models.SKUCapability"]] = rest_field(visibility=["read"]) + """The capability information in the specified SKU, including file encryption, network ACLs, + change notification, etc.""" + restrictions: Optional[list["_models.Restriction"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The restrictions because of which SKU cannot be used. This is empty if there are no + restrictions.""" + + @overload + def __init__( + self, + *, + name: Union[str, "_models.SkuName"], + location_info: Optional[list["_models.SkuInformationLocationInfoItem"]] = None, + restrictions: Optional[list["_models.Restriction"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SkuInformationLocationInfoItem(_Model): + """SkuInformationLocationInfoItem. + + :ivar location: Describes the location for the product where storage account resource can be + created. + :vartype location: str + :ivar zones: Describes the available zones for the product where storage account resource can + be created. + :vartype zones: list[str] + """ + + location: Optional[str] = rest_field(visibility=["read"]) + """Describes the location for the product where storage account resource can be created.""" + zones: Optional[list[str]] = rest_field(visibility=["read"]) + """Describes the available zones for the product where storage account resource can be created.""" + + +class SmbOAuthSettings(_Model): + """Setting property for Managed Identity access over SMB using OAuth. + + :ivar is_smb_o_auth_enabled: Specifies if managed identities can access SMB shares using OAuth. + The default interpretation is false for this property. + :vartype is_smb_o_auth_enabled: bool + """ + + is_smb_o_auth_enabled: Optional[bool] = rest_field( + name="isSmbOAuthEnabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies if managed identities can access SMB shares using OAuth. The default interpretation + is false for this property.""" + + @overload + def __init__( + self, + *, + is_smb_o_auth_enabled: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SmbSetting(_Model): + """Setting for SMB protocol. + + :ivar multichannel: Multichannel setting. Applies to Premium FileStorage only. + :vartype multichannel: ~azure.mgmt.storage.models.Multichannel + :ivar versions: SMB protocol versions supported by server. Valid values are SMB2.1, SMB3.0, + SMB3.1.1. Should be passed as a string with delimiter ';'. + :vartype versions: str + :ivar authentication_methods: SMB authentication methods supported by server. Valid values are + NTLMv2, Kerberos. Should be passed as a string with delimiter ';'. + :vartype authentication_methods: str + :ivar kerberos_ticket_encryption: Kerberos ticket encryption supported by server. Valid values + are RC4-HMAC, AES-256. Should be passed as a string with delimiter ';'. + :vartype kerberos_ticket_encryption: str + :ivar channel_encryption: SMB channel encryption supported by server. Valid values are + AES-128-CCM, AES-128-GCM, AES-256-GCM. Should be passed as a string with delimiter ';'. + :vartype channel_encryption: str + :ivar encryption_in_transit: Encryption in transit setting. + :vartype encryption_in_transit: ~azure.mgmt.storage.models.EncryptionInTransit + """ + + multichannel: Optional["_models.Multichannel"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Multichannel setting. Applies to Premium FileStorage only.""" + versions: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """SMB protocol versions supported by server. Valid values are SMB2.1, SMB3.0, SMB3.1.1. Should be + passed as a string with delimiter ';'.""" + authentication_methods: Optional[str] = rest_field( + name="authenticationMethods", visibility=["read", "create", "update", "delete", "query"] + ) + """SMB authentication methods supported by server. Valid values are NTLMv2, Kerberos. Should be + passed as a string with delimiter ';'.""" + kerberos_ticket_encryption: Optional[str] = rest_field( + name="kerberosTicketEncryption", visibility=["read", "create", "update", "delete", "query"] + ) + """Kerberos ticket encryption supported by server. Valid values are RC4-HMAC, AES-256. Should be + passed as a string with delimiter ';'.""" + channel_encryption: Optional[str] = rest_field( + name="channelEncryption", visibility=["read", "create", "update", "delete", "query"] + ) + """SMB channel encryption supported by server. Valid values are AES-128-CCM, AES-128-GCM, + AES-256-GCM. Should be passed as a string with delimiter ';'.""" + encryption_in_transit: Optional["_models.EncryptionInTransit"] = rest_field( + name="encryptionInTransit", visibility=["read", "create", "update", "delete", "query"] + ) + """Encryption in transit setting.""" + + @overload + def __init__( + self, + *, + multichannel: Optional["_models.Multichannel"] = None, + versions: Optional[str] = None, + authentication_methods: Optional[str] = None, + kerberos_ticket_encryption: Optional[str] = None, + channel_encryption: Optional[str] = None, + encryption_in_transit: Optional["_models.EncryptionInTransit"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SshPublicKey(_Model): + """SshPublicKey. + + :ivar description: Optional. It is used to store the function/usage of the key. + :vartype description: str + :ivar key: Ssh public key base64 encoded. The format should be: ' ', e.g. + ssh-rsa AAAABBBB. + :vartype key: str + """ + + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional. It is used to store the function/usage of the key.""" + key: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Ssh public key base64 encoded. The format should be: ' ', e.g. ssh-rsa + AAAABBBB.""" + + @overload + def __init__( + self, + *, + description: Optional[str] = None, + key: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StaticWebsite(_Model): + """The static website properties for blob storage. + + :ivar enabled: Indicates whether static website support is enabled for the specified account. + Required. + :vartype enabled: bool + :ivar index_document: The webpage that Azure Storage serves for requests to the root of a + website or any subfolder (for example, index.html). The value is case-sensitive. + :vartype index_document: str + :ivar default_index_document_path: The absolute path where the default index file is present. + This absolute path is mutually exclusive to "indexDocument" and it is case-sensitive. + :vartype default_index_document_path: str + :ivar error_document404_path: The absolute path to a webpage that Azure Storage serves for + requests that don't correspond to an existing file. The contents of the page are returned with + HTTP 404 Not Found. Only a single custom 404 page is supported in each static website. + :vartype error_document404_path: str + """ + + enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates whether static website support is enabled for the specified account. Required.""" + index_document: Optional[str] = rest_field( + name="indexDocument", visibility=["read", "create", "update", "delete", "query"] + ) + """The webpage that Azure Storage serves for requests to the root of a website or any subfolder + (for example, index.html). The value is case-sensitive.""" + default_index_document_path: Optional[str] = rest_field( + name="defaultIndexDocumentPath", visibility=["read", "create", "update", "delete", "query"] + ) + """The absolute path where the default index file is present. This absolute path is mutually + exclusive to \"indexDocument\" and it is case-sensitive.""" + error_document404_path: Optional[str] = rest_field( + name="errorDocument404Path", visibility=["read", "create", "update", "delete", "query"] + ) + """The absolute path to a webpage that Azure Storage serves for requests that don't correspond to + an existing file. The contents of the page are returned with HTTP 404 Not Found. Only a single + custom 404 page is supported in each static website.""" + + @overload + def __init__( + self, + *, + enabled: bool, + index_document: Optional[str] = None, + default_index_document_path: Optional[str] = None, + error_document404_path: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageAccount(TrackedResource): + """The storage account. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: Properties of the storage account. + :vartype properties: ~azure.mgmt.storage.models.StorageAccountProperties + :ivar sku: Gets the SKU. + :vartype sku: ~azure.mgmt.storage.models.Sku + :ivar kind: Gets the Kind. Known values are: "Storage", "StorageV2", "BlobStorage", + "FileStorage", and "BlockBlobStorage". + :vartype kind: str or ~azure.mgmt.storage.models.Kind + :ivar identity: The identity of the resource. + :vartype identity: ~azure.mgmt.storage.models.Identity + :ivar extended_location: The extendedLocation of the resource. + :vartype extended_location: ~azure.mgmt.storage.models.ExtendedLocation + :ivar zones: The availability zones. + :vartype zones: list[str] + :ivar placement: Optional. Gets or sets the zonal placement details for the storage account. + :vartype placement: ~azure.mgmt.storage.models.Placement + """ + + properties: Optional["_models.StorageAccountProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Properties of the storage account.""" + sku: Optional["_models.Sku"] = rest_field(visibility=["read"]) + """Gets the SKU.""" + kind: Optional[Union[str, "_models.Kind"]] = rest_field(visibility=["read"]) + """Gets the Kind. Known values are: \"Storage\", \"StorageV2\", \"BlobStorage\", \"FileStorage\", + and \"BlockBlobStorage\".""" + identity: Optional["_models.Identity"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The identity of the resource.""" + extended_location: Optional["_models.ExtendedLocation"] = rest_field( + name="extendedLocation", visibility=["read", "create", "update", "delete", "query"] + ) + """The extendedLocation of the resource.""" + zones: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The availability zones.""" + placement: Optional["_models.Placement"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional. Gets or sets the zonal placement details for the storage account.""" + + __flattened_items = [ + "provisioning_state", + "primary_endpoints", + "primary_location", + "status_of_primary", + "last_geo_failover_time", + "secondary_location", + "status_of_secondary", + "creation_time", + "custom_domain", + "sas_policy", + "key_policy", + "key_creation_time", + "secondary_endpoints", + "encryption", + "access_tier", + "azure_files_identity_based_authentication", + "enable_https_traffic_only", + "network_rule_set", + "is_sftp_enabled", + "is_local_user_enabled", + "enable_extended_groups", + "is_hns_enabled", + "geo_replication_stats", + "failover_in_progress", + "large_file_shares_state", + "private_endpoint_connections", + "routing_preference", + "dual_stack_endpoint_preference", + "blob_restore_status", + "allow_blob_public_access", + "minimum_tls_version", + "allow_shared_key_access", + "enable_nfs_v3", + "allow_cross_tenant_replication", + "default_to_o_auth_authentication", + "public_network_access", + "immutable_storage_with_versioning", + "allowed_copy_scope", + "storage_account_sku_conversion_status", + "dns_endpoint_type", + "is_sku_conversion_blocked", + "account_migration_in_progress", + "geo_priority_replication_status", + "allow_shared_key_access_for_services", + "data_collaboration_policy_properties", + ] + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.StorageAccountProperties"] = None, + identity: Optional["_models.Identity"] = None, + extended_location: Optional["_models.ExtendedLocation"] = None, + zones: Optional[list[str]] = None, + placement: Optional["_models.Placement"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.properties is None: + return None + return getattr(self.properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.properties is None: + self.properties = self._attr_to_rest_field["properties"]._class_type() + setattr(self.properties, key, value) + else: + super().__setattr__(key, value) + + +class StorageAccountCheckNameAvailabilityParameters(_Model): # pylint: disable=name-too-long + """The parameters used to check the availability of the storage account name. + + :ivar name: The storage account name. Required. + :vartype name: str + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Required. Default value is + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The storage account name. Required.""" + type: Literal["Microsoft.Storage/storageAccounts"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The type of resource, Microsoft.Storage/storageAccounts. Required. Default value is + \"Microsoft.Storage/storageAccounts\".""" + + @overload + def __init__( + self, + *, + name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type: Literal["Microsoft.Storage/storageAccounts"] = "Microsoft.Storage/storageAccounts" + + +class StorageAccountCreateParameters(_Model): + """The parameters used when creating a storage account. + + :ivar sku: Required. Gets or sets the SKU name. Required. + :vartype sku: ~azure.mgmt.storage.models.Sku + :ivar kind: Required. Indicates the type of storage account. Required. Known values are: + "Storage", "StorageV2", "BlobStorage", "FileStorage", and "BlockBlobStorage". + :vartype kind: str or ~azure.mgmt.storage.models.Kind + :ivar location: Required. Gets or sets the location of the resource. This will be one of the + supported and registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.). The + geo region of a resource cannot be changed once it is created, but if an identical geo region + is specified on update, the request will succeed. Required. + :vartype location: str + :ivar extended_location: Optional. Set the extended location of the resource. If not set, the + storage account will be created in Azure main region. Otherwise it will be created in the + specified extended location. + :vartype extended_location: ~azure.mgmt.storage.models.ExtendedLocation + :ivar zones: Optional. Gets or sets the pinned logical availability zone for the storage + account. + :vartype zones: list[str] + :ivar placement: Optional. Gets or sets the zonal placement details for the storage account. + :vartype placement: ~azure.mgmt.storage.models.Placement + :ivar tags: Gets or sets a list of key value pairs that describe the resource. These tags can + be used for viewing and grouping this resource (across resource groups). A maximum of 15 tags + can be provided for a resource. Each tag must have a key with a length no greater than 128 + characters and a value with a length no greater than 256 characters. + :vartype tags: dict[str, str] + :ivar identity: The identity of the resource. + :vartype identity: ~azure.mgmt.storage.models.Identity + :ivar properties: The parameters used to create the storage account. + :vartype properties: ~azure.mgmt.storage.models.StorageAccountPropertiesCreateParameters + """ + + sku: "_models.Sku" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required. Gets or sets the SKU name. Required.""" + kind: Union[str, "_models.Kind"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required. Indicates the type of storage account. Required. Known values are: \"Storage\", + \"StorageV2\", \"BlobStorage\", \"FileStorage\", and \"BlockBlobStorage\".""" + location: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required. Gets or sets the location of the resource. This will be one of the supported and + registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.). The geo region of a + resource cannot be changed once it is created, but if an identical geo region is specified on + update, the request will succeed. Required.""" + extended_location: Optional["_models.ExtendedLocation"] = rest_field( + name="extendedLocation", visibility=["read", "create", "update", "delete", "query"] + ) + """Optional. Set the extended location of the resource. If not set, the storage account will be + created in Azure main region. Otherwise it will be created in the specified extended location.""" + zones: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional. Gets or sets the pinned logical availability zone for the storage account.""" + placement: Optional["_models.Placement"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional. Gets or sets the zonal placement details for the storage account.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Gets or sets a list of key value pairs that describe the resource. These tags can be used for + viewing and grouping this resource (across resource groups). A maximum of 15 tags can be + provided for a resource. Each tag must have a key with a length no greater than 128 characters + and a value with a length no greater than 256 characters.""" + identity: Optional["_models.Identity"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The identity of the resource.""" + properties: Optional["_models.StorageAccountPropertiesCreateParameters"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The parameters used to create the storage account.""" + + __flattened_items = [ + "allowed_copy_scope", + "public_network_access", + "sas_policy", + "key_policy", + "custom_domain", + "encryption", + "network_rule_set", + "access_tier", + "azure_files_identity_based_authentication", + "enable_https_traffic_only", + "is_sftp_enabled", + "is_local_user_enabled", + "enable_extended_groups", + "is_hns_enabled", + "large_file_shares_state", + "routing_preference", + "dual_stack_endpoint_preference", + "allow_blob_public_access", + "minimum_tls_version", + "allow_shared_key_access", + "enable_nfs_v3", + "allow_cross_tenant_replication", + "default_to_o_auth_authentication", + "immutable_storage_with_versioning", + "dns_endpoint_type", + "geo_priority_replication_status", + "allow_shared_key_access_for_services", + "data_collaboration_policy_properties", + ] + + @overload + def __init__( + self, + *, + sku: "_models.Sku", + kind: Union[str, "_models.Kind"], + location: str, + extended_location: Optional["_models.ExtendedLocation"] = None, + zones: Optional[list[str]] = None, + placement: Optional["_models.Placement"] = None, + tags: Optional[dict[str, str]] = None, + identity: Optional["_models.Identity"] = None, + properties: Optional["_models.StorageAccountPropertiesCreateParameters"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.properties is None: + return None + return getattr(self.properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.properties is None: + self.properties = self._attr_to_rest_field["properties"]._class_type() + setattr(self.properties, key, value) + else: + super().__setattr__(key, value) + + +class StorageAccountInternetEndpoints(_Model): + """The URIs that are used to perform a retrieval of a public blob, file, web or dfs object via a + internet routing endpoint. + + :ivar blob: Gets the blob endpoint. + :vartype blob: str + :ivar file: Gets the file endpoint. + :vartype file: str + :ivar web: Gets the web endpoint. + :vartype web: str + :ivar dfs: Gets the dfs endpoint. + :vartype dfs: str + """ + + blob: Optional[str] = rest_field(visibility=["read"]) + """Gets the blob endpoint.""" + file: Optional[str] = rest_field(visibility=["read"]) + """Gets the file endpoint.""" + web: Optional[str] = rest_field(visibility=["read"]) + """Gets the web endpoint.""" + dfs: Optional[str] = rest_field(visibility=["read"]) + """Gets the dfs endpoint.""" + + +class StorageAccountIpv6Endpoints(_Model): + """The URIs that are used to perform a retrieval of a public blob, queue, table, web or dfs object + via an IPv6 endpoint. + + :ivar blob: Gets the blob endpoint. + :vartype blob: str + :ivar queue: Gets the queue endpoint. + :vartype queue: str + :ivar table: Gets the table endpoint. + :vartype table: str + :ivar file: Gets the file endpoint. + :vartype file: str + :ivar web: Gets the web endpoint. + :vartype web: str + :ivar dfs: Gets the dfs endpoint. + :vartype dfs: str + :ivar microsoft_endpoints: Gets the microsoft routing storage endpoints. + :vartype microsoft_endpoints: ~azure.mgmt.storage.models.StorageAccountMicrosoftEndpoints + :ivar internet_endpoints: Gets the internet routing storage endpoints. + :vartype internet_endpoints: ~azure.mgmt.storage.models.StorageAccountInternetEndpoints + """ + + blob: Optional[str] = rest_field(visibility=["read"]) + """Gets the blob endpoint.""" + queue: Optional[str] = rest_field(visibility=["read"]) + """Gets the queue endpoint.""" + table: Optional[str] = rest_field(visibility=["read"]) + """Gets the table endpoint.""" + file: Optional[str] = rest_field(visibility=["read"]) + """Gets the file endpoint.""" + web: Optional[str] = rest_field(visibility=["read"]) + """Gets the web endpoint.""" + dfs: Optional[str] = rest_field(visibility=["read"]) + """Gets the dfs endpoint.""" + microsoft_endpoints: Optional["_models.StorageAccountMicrosoftEndpoints"] = rest_field( + name="microsoftEndpoints", visibility=["read", "create", "update", "delete", "query"] + ) + """Gets the microsoft routing storage endpoints.""" + internet_endpoints: Optional["_models.StorageAccountInternetEndpoints"] = rest_field( + name="internetEndpoints", visibility=["read", "create", "update", "delete", "query"] + ) + """Gets the internet routing storage endpoints.""" + + @overload + def __init__( + self, + *, + microsoft_endpoints: Optional["_models.StorageAccountMicrosoftEndpoints"] = None, + internet_endpoints: Optional["_models.StorageAccountInternetEndpoints"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageAccountKey(_Model): + """An access key for the storage account. + + :ivar key_name: Name of the key. + :vartype key_name: str + :ivar value: Base 64-encoded value of the key. + :vartype value: str + :ivar permissions: Permissions for the key -- read-only or full permissions. Known values are: + "Read" and "Full". + :vartype permissions: str or ~azure.mgmt.storage.models.KeyPermission + :ivar creation_time: Creation time of the key, in round trip date format. + :vartype creation_time: ~datetime.datetime + """ + + key_name: Optional[str] = rest_field(name="keyName", visibility=["read"]) + """Name of the key.""" + value: Optional[str] = rest_field(visibility=["read"]) + """Base 64-encoded value of the key.""" + permissions: Optional[Union[str, "_models.KeyPermission"]] = rest_field(visibility=["read"]) + """Permissions for the key -- read-only or full permissions. Known values are: \"Read\" and + \"Full\".""" + creation_time: Optional[datetime.datetime] = rest_field(name="creationTime", visibility=["read"], format="rfc3339") + """Creation time of the key, in round trip date format.""" + + +class StorageAccountListKeysResult(_Model): + """The response from the ListKeys operation. + + :ivar keys_property: Gets the list of storage account keys and their properties for the + specified storage account. + :vartype keys_property: list[~azure.mgmt.storage.models.StorageAccountKey] + """ + + keys_property: Optional[list["_models.StorageAccountKey"]] = rest_field( + name="keys", visibility=["read"], original_tsp_name="keys" + ) + """Gets the list of storage account keys and their properties for the specified storage account.""" + + +class StorageAccountMicrosoftEndpoints(_Model): + """The URIs that are used to perform a retrieval of a public blob, queue, table, web or dfs object + via a microsoft routing endpoint. + + :ivar blob: Gets the blob endpoint. + :vartype blob: str + :ivar queue: Gets the queue endpoint. + :vartype queue: str + :ivar table: Gets the table endpoint. + :vartype table: str + :ivar file: Gets the file endpoint. + :vartype file: str + :ivar web: Gets the web endpoint. + :vartype web: str + :ivar dfs: Gets the dfs endpoint. + :vartype dfs: str + """ + + blob: Optional[str] = rest_field(visibility=["read"]) + """Gets the blob endpoint.""" + queue: Optional[str] = rest_field(visibility=["read"]) + """Gets the queue endpoint.""" + table: Optional[str] = rest_field(visibility=["read"]) + """Gets the table endpoint.""" + file: Optional[str] = rest_field(visibility=["read"]) + """Gets the file endpoint.""" + web: Optional[str] = rest_field(visibility=["read"]) + """Gets the web endpoint.""" + dfs: Optional[str] = rest_field(visibility=["read"]) + """Gets the dfs endpoint.""" + + +class StorageAccountMigration(ProxyResource): + """The parameters or status associated with an ongoing or enqueued storage account migration in + order to update its current SKU or region. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar storage_account_migration_details: The properties of a storage account’s ongoing or + enqueued migration. Required. + :vartype storage_account_migration_details: + ~azure.mgmt.storage.models.StorageAccountMigrationProperties + """ + + storage_account_migration_details: "_models.StorageAccountMigrationProperties" = rest_field( + name="properties", visibility=["read", "create", "update", "delete", "query"] + ) + """The properties of a storage account’s ongoing or enqueued migration. Required.""" + + __flattened_items = [ + "target_sku_name", + "migration_status", + "migration_failed_reason", + "migration_failed_detailed_reason", + ] + + @overload + def __init__( + self, + *, + storage_account_migration_details: "_models.StorageAccountMigrationProperties", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.storage_account_migration_details is None: + return None + return getattr(self.storage_account_migration_details, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.storage_account_migration_details is None: + self.storage_account_migration_details = self._attr_to_rest_field[ + "storage_account_migration_details" + ]._class_type() + setattr(self.storage_account_migration_details, key, value) + else: + super().__setattr__(key, value) + + +class StorageAccountMigrationProperties(_Model): + """The properties of a storage account's ongoing or enqueued migration. + + :ivar target_sku_name: Target sku name for the account. Required. Known values are: + "Standard_LRS", "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", "Premium_ZRS", + "Standard_GZRS", "Standard_RAGZRS", "StandardV2_LRS", "StandardV2_GRS", "StandardV2_ZRS", + "StandardV2_GZRS", "PremiumV2_LRS", and "PremiumV2_ZRS". + :vartype target_sku_name: str or ~azure.mgmt.storage.models.SkuName + :ivar migration_status: Current status of migration. Known values are: "Invalid", + "SubmittedForConversion", "InProgress", "Complete", and "Failed". + :vartype migration_status: str or ~azure.mgmt.storage.models.MigrationStatus + :ivar migration_failed_reason: Error code for migration failure. + :vartype migration_failed_reason: str + :ivar migration_failed_detailed_reason: Reason for migration failure. + :vartype migration_failed_detailed_reason: str + """ + + target_sku_name: Union[str, "_models.SkuName"] = rest_field( + name="targetSkuName", visibility=["read", "create", "update", "delete", "query"] + ) + """Target sku name for the account. Required. Known values are: \"Standard_LRS\", + \"Standard_GRS\", \"Standard_RAGRS\", \"Standard_ZRS\", \"Premium_LRS\", \"Premium_ZRS\", + \"Standard_GZRS\", \"Standard_RAGZRS\", \"StandardV2_LRS\", \"StandardV2_GRS\", + \"StandardV2_ZRS\", \"StandardV2_GZRS\", \"PremiumV2_LRS\", and \"PremiumV2_ZRS\".""" + migration_status: Optional[Union[str, "_models.MigrationStatus"]] = rest_field( + name="migrationStatus", visibility=["read"] + ) + """Current status of migration. Known values are: \"Invalid\", \"SubmittedForConversion\", + \"InProgress\", \"Complete\", and \"Failed\".""" + migration_failed_reason: Optional[str] = rest_field(name="migrationFailedReason", visibility=["read"]) + """Error code for migration failure.""" + migration_failed_detailed_reason: Optional[str] = rest_field( + name="migrationFailedDetailedReason", visibility=["read"] + ) + """Reason for migration failure.""" + + @overload + def __init__( + self, + *, + target_sku_name: Union[str, "_models.SkuName"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageAccountProperties(_Model): + """Properties of the storage account. + + :ivar provisioning_state: Gets the status of the storage account at the time the operation was + called. Known values are: "Creating", "ResolvingDNS", and "Succeeded". + :vartype provisioning_state: str or ~azure.mgmt.storage.models.ProvisioningState + :ivar primary_endpoints: Gets the URLs that are used to perform a retrieval of a public blob, + queue, or table object. Note that Standard_ZRS and Premium_LRS accounts only return the blob + endpoint. + :vartype primary_endpoints: ~azure.mgmt.storage.models.Endpoints + :ivar primary_location: Gets the location of the primary data center for the storage account. + :vartype primary_location: str + :ivar status_of_primary: Gets the status indicating whether the primary location of the storage + account is available or unavailable. Known values are: "available" and "unavailable". + :vartype status_of_primary: str or ~azure.mgmt.storage.models.AccountStatus + :ivar last_geo_failover_time: Gets the timestamp of the most recent instance of a failover to + the secondary location. Only the most recent timestamp is retained. This element is not + returned if there has never been a failover instance. Only available if the accountType is + Standard_GRS or Standard_RAGRS. + :vartype last_geo_failover_time: ~datetime.datetime + :ivar secondary_location: Gets the location of the geo-replicated secondary for the storage + account. Only available if the accountType is Standard_GRS or Standard_RAGRS. + :vartype secondary_location: str + :ivar status_of_secondary: Gets the status indicating whether the secondary location of the + storage account is available or unavailable. Only available if the SKU name is Standard_GRS or + Standard_RAGRS. Known values are: "available" and "unavailable". + :vartype status_of_secondary: str or ~azure.mgmt.storage.models.AccountStatus + :ivar creation_time: Gets the creation date and time of the storage account in UTC. + :vartype creation_time: ~datetime.datetime + :ivar custom_domain: Gets the custom domain the user assigned to this storage account. + :vartype custom_domain: ~azure.mgmt.storage.models.CustomDomain + :ivar sas_policy: SasPolicy assigned to the storage account. + :vartype sas_policy: ~azure.mgmt.storage.models.SasPolicy + :ivar key_policy: KeyPolicy assigned to the storage account. + :vartype key_policy: ~azure.mgmt.storage.models.KeyPolicy + :ivar key_creation_time: Storage account keys creation time. + :vartype key_creation_time: ~azure.mgmt.storage.models.KeyCreationTime + :ivar secondary_endpoints: Gets the URLs that are used to perform a retrieval of a public blob, + queue, or table object from the secondary location of the storage account. Only available if + the SKU name is Standard_RAGRS. + :vartype secondary_endpoints: ~azure.mgmt.storage.models.Endpoints + :ivar encryption: Encryption settings to be used for server-side encryption for the storage + account. + :vartype encryption: ~azure.mgmt.storage.models.Encryption + :ivar access_tier: Required for storage accounts where kind = BlobStorage. The access tier is + used for billing. The 'Premium' access tier is the default value for premium block blobs + storage account type and it cannot be changed for the premium block blobs storage account type. + Known values are: "Hot", "Cool", "Premium", "Cold", and "Smart". + :vartype access_tier: str or ~azure.mgmt.storage.models.AccessTier + :ivar azure_files_identity_based_authentication: Provides the identity based authentication + settings for Azure Files. + :vartype azure_files_identity_based_authentication: + ~azure.mgmt.storage.models.AzureFilesIdentityBasedAuthentication + :ivar enable_https_traffic_only: Allows https traffic only to storage service if sets to true. + :vartype enable_https_traffic_only: bool + :ivar network_rule_set: Network rule set. + :vartype network_rule_set: ~azure.mgmt.storage.models.NetworkRuleSet + :ivar is_sftp_enabled: Enables Secure File Transfer Protocol, if set to true. + :vartype is_sftp_enabled: bool + :ivar is_local_user_enabled: Enables local users feature, if set to true. + :vartype is_local_user_enabled: bool + :ivar enable_extended_groups: Enables extended group support with local users feature, if set + to true. + :vartype enable_extended_groups: bool + :ivar is_hns_enabled: Account HierarchicalNamespace enabled if sets to true. + :vartype is_hns_enabled: bool + :ivar geo_replication_stats: Geo Replication Stats. + :vartype geo_replication_stats: ~azure.mgmt.storage.models.GeoReplicationStats + :ivar failover_in_progress: If the failover is in progress, the value will be true, otherwise, + it will be null. + :vartype failover_in_progress: bool + :ivar large_file_shares_state: Allow large file shares if sets to Enabled. It cannot be + disabled once it is enabled. Known values are: "Disabled" and "Enabled". + :vartype large_file_shares_state: str or ~azure.mgmt.storage.models.LargeFileSharesState + :ivar private_endpoint_connections: List of private endpoint connection associated with the + specified storage account. + :vartype private_endpoint_connections: + list[~azure.mgmt.storage.models.PrivateEndpointConnection] + :ivar routing_preference: Maintains information about the network routing choice opted by the + user for data transfer. + :vartype routing_preference: ~azure.mgmt.storage.models.RoutingPreference + :ivar dual_stack_endpoint_preference: Maintains information about the Internet protocol opted + by the user. + :vartype dual_stack_endpoint_preference: ~azure.mgmt.storage.models.DualStackEndpointPreference + :ivar blob_restore_status: Blob restore status. + :vartype blob_restore_status: ~azure.mgmt.storage.models.BlobRestoreStatus + :ivar allow_blob_public_access: Allow or disallow public access to all blobs or containers in + the storage account. The default interpretation is false for this property. + :vartype allow_blob_public_access: bool + :ivar minimum_tls_version: Set the minimum TLS version to be permitted on requests to storage. + The default interpretation is TLS 1.0 for this property. Minimum TLS version 1.3 version is not + supported. Known values are: "TLS1_0", "TLS1_1", "TLS1_2", and "TLS1_3". + :vartype minimum_tls_version: str or ~azure.mgmt.storage.models.MinimumTlsVersion + :ivar allow_shared_key_access: Indicates whether the storage account permits requests to be + authorized with the account access key via Shared Key. If false, then all requests, including + shared access signatures, must be authorized with Azure Active Directory (Azure AD). The + default value is null, which is equivalent to true. + :vartype allow_shared_key_access: bool + :ivar enable_nfs_v3: NFS 3.0 protocol support enabled if set to true. + :vartype enable_nfs_v3: bool + :ivar allow_cross_tenant_replication: Allow or disallow cross AAD tenant object replication. + Set this property to true for new or existing accounts only if object replication policies will + involve storage accounts in different AAD tenants. The default interpretation is false for new + accounts to follow best security practices by default. + :vartype allow_cross_tenant_replication: bool + :ivar default_to_o_auth_authentication: A boolean flag which indicates whether the default + authentication is OAuth or not. The default interpretation is false for this property. + :vartype default_to_o_auth_authentication: bool + :ivar public_network_access: Allow, disallow, or let Network Security Perimeter configuration + to evaluate public network access to Storage Account. Known values are: "Enabled", "Disabled", + and "SecuredByPerimeter". + :vartype public_network_access: str or ~azure.mgmt.storage.models.PublicNetworkAccess + :ivar immutable_storage_with_versioning: The property is immutable and can only be set to true + at the account creation time. When set to true, it enables object level immutability for all + the containers in the account by default. + :vartype immutable_storage_with_versioning: ~azure.mgmt.storage.models.ImmutableStorageAccount + :ivar allowed_copy_scope: Restrict copy to and from Storage Accounts within an AAD tenant or + with Private Links to the same VNet. Known values are: "PrivateLink", "AAD", and "All". + :vartype allowed_copy_scope: str or ~azure.mgmt.storage.models.AllowedCopyScope + :ivar storage_account_sku_conversion_status: This property is readOnly and is set by server + during asynchronous storage account sku conversion operations. + :vartype storage_account_sku_conversion_status: + ~azure.mgmt.storage.models.StorageAccountSkuConversionStatus + :ivar dns_endpoint_type: Allows you to specify the type of endpoint. Set this to AzureDNSZone + to create a large number of accounts in a single subscription, which creates accounts in an + Azure DNS Zone and the endpoint URL will have an alphanumeric DNS Zone identifier. Known values + are: "Standard" and "AzureDnsZone". + :vartype dns_endpoint_type: str or ~azure.mgmt.storage.models.DnsEndpointType + :ivar is_sku_conversion_blocked: This property will be set to true or false on an event of + ongoing migration. Default value is null. + :vartype is_sku_conversion_blocked: bool + :ivar account_migration_in_progress: If customer initiated account migration is in progress, + the value will be true else it will be null. + :vartype account_migration_in_progress: bool + :ivar geo_priority_replication_status: Status indicating whether Geo Priority Replication is + enabled for the account. + :vartype geo_priority_replication_status: + ~azure.mgmt.storage.models.GeoPriorityReplicationStatus + :ivar allow_shared_key_access_for_services: Indicate shared key access properties at service + level. + :vartype allow_shared_key_access_for_services: + ~azure.mgmt.storage.models.StorageAccountSharedKeyAccessProperties + :ivar data_collaboration_policy_properties: Data Collaboration policy for the storage account. + :vartype data_collaboration_policy_properties: + ~azure.mgmt.storage.models.StorageDataCollaborationPolicyProperties + """ + + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """Gets the status of the storage account at the time the operation was called. Known values are: + \"Creating\", \"ResolvingDNS\", and \"Succeeded\".""" + primary_endpoints: Optional["_models.Endpoints"] = rest_field(name="primaryEndpoints", visibility=["read"]) + """Gets the URLs that are used to perform a retrieval of a public blob, queue, or table object. + Note that Standard_ZRS and Premium_LRS accounts only return the blob endpoint.""" + primary_location: Optional[str] = rest_field(name="primaryLocation", visibility=["read"]) + """Gets the location of the primary data center for the storage account.""" + status_of_primary: Optional[Union[str, "_models.AccountStatus"]] = rest_field( + name="statusOfPrimary", visibility=["read"] + ) + """Gets the status indicating whether the primary location of the storage account is available or + unavailable. Known values are: \"available\" and \"unavailable\".""" + last_geo_failover_time: Optional[datetime.datetime] = rest_field( + name="lastGeoFailoverTime", visibility=["read"], format="rfc3339" + ) + """Gets the timestamp of the most recent instance of a failover to the secondary location. Only + the most recent timestamp is retained. This element is not returned if there has never been a + failover instance. Only available if the accountType is Standard_GRS or Standard_RAGRS.""" + secondary_location: Optional[str] = rest_field(name="secondaryLocation", visibility=["read"]) + """Gets the location of the geo-replicated secondary for the storage account. Only available if + the accountType is Standard_GRS or Standard_RAGRS.""" + status_of_secondary: Optional[Union[str, "_models.AccountStatus"]] = rest_field( + name="statusOfSecondary", visibility=["read"] + ) + """Gets the status indicating whether the secondary location of the storage account is available + or unavailable. Only available if the SKU name is Standard_GRS or Standard_RAGRS. Known values + are: \"available\" and \"unavailable\".""" + creation_time: Optional[datetime.datetime] = rest_field(name="creationTime", visibility=["read"], format="rfc3339") + """Gets the creation date and time of the storage account in UTC.""" + custom_domain: Optional["_models.CustomDomain"] = rest_field(name="customDomain", visibility=["read"]) + """Gets the custom domain the user assigned to this storage account.""" + sas_policy: Optional["_models.SasPolicy"] = rest_field(name="sasPolicy", visibility=["read"]) + """SasPolicy assigned to the storage account.""" + key_policy: Optional["_models.KeyPolicy"] = rest_field(name="keyPolicy", visibility=["read"]) + """KeyPolicy assigned to the storage account.""" + key_creation_time: Optional["_models.KeyCreationTime"] = rest_field(name="keyCreationTime", visibility=["read"]) + """Storage account keys creation time.""" + secondary_endpoints: Optional["_models.Endpoints"] = rest_field(name="secondaryEndpoints", visibility=["read"]) + """Gets the URLs that are used to perform a retrieval of a public blob, queue, or table object + from the secondary location of the storage account. Only available if the SKU name is + Standard_RAGRS.""" + encryption: Optional["_models.Encryption"] = rest_field(visibility=["read"]) + """Encryption settings to be used for server-side encryption for the storage account.""" + access_tier: Optional[Union[str, "_models.AccessTier"]] = rest_field(name="accessTier", visibility=["read"]) + """Required for storage accounts where kind = BlobStorage. The access tier is used for billing. + The 'Premium' access tier is the default value for premium block blobs storage account type and + it cannot be changed for the premium block blobs storage account type. Known values are: + \"Hot\", \"Cool\", \"Premium\", \"Cold\", and \"Smart\".""" + azure_files_identity_based_authentication: Optional["_models.AzureFilesIdentityBasedAuthentication"] = rest_field( + name="azureFilesIdentityBasedAuthentication", visibility=["read", "create", "update", "delete", "query"] + ) + """Provides the identity based authentication settings for Azure Files.""" + enable_https_traffic_only: Optional[bool] = rest_field( + name="supportsHttpsTrafficOnly", visibility=["read", "create", "update", "delete", "query"] + ) + """Allows https traffic only to storage service if sets to true.""" + network_rule_set: Optional["_models.NetworkRuleSet"] = rest_field(name="networkAcls", visibility=["read"]) + """Network rule set.""" + is_sftp_enabled: Optional[bool] = rest_field( + name="isSftpEnabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Enables Secure File Transfer Protocol, if set to true.""" + is_local_user_enabled: Optional[bool] = rest_field( + name="isLocalUserEnabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Enables local users feature, if set to true.""" + enable_extended_groups: Optional[bool] = rest_field( + name="enableExtendedGroups", visibility=["read", "create", "update", "delete", "query"] + ) + """Enables extended group support with local users feature, if set to true.""" + is_hns_enabled: Optional[bool] = rest_field( + name="isHnsEnabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Account HierarchicalNamespace enabled if sets to true.""" + geo_replication_stats: Optional["_models.GeoReplicationStats"] = rest_field( + name="geoReplicationStats", visibility=["read"] + ) + """Geo Replication Stats.""" + failover_in_progress: Optional[bool] = rest_field(name="failoverInProgress", visibility=["read"]) + """If the failover is in progress, the value will be true, otherwise, it will be null.""" + large_file_shares_state: Optional[Union[str, "_models.LargeFileSharesState"]] = rest_field( + name="largeFileSharesState", visibility=["read", "create", "update", "delete", "query"] + ) + """Allow large file shares if sets to Enabled. It cannot be disabled once it is enabled. Known + values are: \"Disabled\" and \"Enabled\".""" + private_endpoint_connections: Optional[list["_models.PrivateEndpointConnection"]] = rest_field( + name="privateEndpointConnections", visibility=["read"] + ) + """List of private endpoint connection associated with the specified storage account.""" + routing_preference: Optional["_models.RoutingPreference"] = rest_field( + name="routingPreference", visibility=["read", "create", "update", "delete", "query"] + ) + """Maintains information about the network routing choice opted by the user for data transfer.""" + dual_stack_endpoint_preference: Optional["_models.DualStackEndpointPreference"] = rest_field( + name="dualStackEndpointPreference", visibility=["read", "create", "update", "delete", "query"] + ) + """Maintains information about the Internet protocol opted by the user.""" + blob_restore_status: Optional["_models.BlobRestoreStatus"] = rest_field( + name="blobRestoreStatus", visibility=["read"] + ) + """Blob restore status.""" + allow_blob_public_access: Optional[bool] = rest_field( + name="allowBlobPublicAccess", visibility=["read", "create", "update", "delete", "query"] + ) + """Allow or disallow public access to all blobs or containers in the storage account. The default + interpretation is false for this property.""" + minimum_tls_version: Optional[Union[str, "_models.MinimumTlsVersion"]] = rest_field( + name="minimumTlsVersion", visibility=["read", "create", "update", "delete", "query"] + ) + """Set the minimum TLS version to be permitted on requests to storage. The default interpretation + is TLS 1.0 for this property. Minimum TLS version 1.3 version is not supported. Known values + are: \"TLS1_0\", \"TLS1_1\", \"TLS1_2\", and \"TLS1_3\".""" + allow_shared_key_access: Optional[bool] = rest_field( + name="allowSharedKeyAccess", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates whether the storage account permits requests to be authorized with the account access + key via Shared Key. If false, then all requests, including shared access signatures, must be + authorized with Azure Active Directory (Azure AD). The default value is null, which is + equivalent to true.""" + enable_nfs_v3: Optional[bool] = rest_field( + name="isNfsV3Enabled", visibility=["read", "create", "update", "delete", "query"] + ) + """NFS 3.0 protocol support enabled if set to true.""" + allow_cross_tenant_replication: Optional[bool] = rest_field( + name="allowCrossTenantReplication", visibility=["read", "create", "update", "delete", "query"] + ) + """Allow or disallow cross AAD tenant object replication. Set this property to true for new or + existing accounts only if object replication policies will involve storage accounts in + different AAD tenants. The default interpretation is false for new accounts to follow best + security practices by default.""" + default_to_o_auth_authentication: Optional[bool] = rest_field( + name="defaultToOAuthAuthentication", visibility=["read", "create", "update", "delete", "query"] + ) + """A boolean flag which indicates whether the default authentication is OAuth or not. The default + interpretation is false for this property.""" + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = rest_field( + name="publicNetworkAccess", visibility=["read", "create", "update", "delete", "query"] + ) + """Allow, disallow, or let Network Security Perimeter configuration to evaluate public network + access to Storage Account. Known values are: \"Enabled\", \"Disabled\", and + \"SecuredByPerimeter\".""" + immutable_storage_with_versioning: Optional["_models.ImmutableStorageAccount"] = rest_field( + name="immutableStorageWithVersioning", visibility=["read", "create", "update", "delete", "query"] + ) + """The property is immutable and can only be set to true at the account creation time. When set to + true, it enables object level immutability for all the containers in the account by default.""" + allowed_copy_scope: Optional[Union[str, "_models.AllowedCopyScope"]] = rest_field( + name="allowedCopyScope", visibility=["read", "create", "update", "delete", "query"] + ) + """Restrict copy to and from Storage Accounts within an AAD tenant or with Private Links to the + same VNet. Known values are: \"PrivateLink\", \"AAD\", and \"All\".""" + storage_account_sku_conversion_status: Optional["_models.StorageAccountSkuConversionStatus"] = rest_field( + name="storageAccountSkuConversionStatus", visibility=["read", "create", "update", "delete", "query"] + ) + """This property is readOnly and is set by server during asynchronous storage account sku + conversion operations.""" + dns_endpoint_type: Optional[Union[str, "_models.DnsEndpointType"]] = rest_field( + name="dnsEndpointType", visibility=["read", "create", "update", "delete", "query"] + ) + """Allows you to specify the type of endpoint. Set this to AzureDNSZone to create a large number + of accounts in a single subscription, which creates accounts in an Azure DNS Zone and the + endpoint URL will have an alphanumeric DNS Zone identifier. Known values are: \"Standard\" and + \"AzureDnsZone\".""" + is_sku_conversion_blocked: Optional[bool] = rest_field(name="isSkuConversionBlocked", visibility=["read"]) + """This property will be set to true or false on an event of ongoing migration. Default value is + null.""" + account_migration_in_progress: Optional[bool] = rest_field(name="accountMigrationInProgress", visibility=["read"]) + """If customer initiated account migration is in progress, the value will be true else it will be + null.""" + geo_priority_replication_status: Optional["_models.GeoPriorityReplicationStatus"] = rest_field( + name="geoPriorityReplicationStatus", visibility=["read", "create", "update", "delete", "query"] + ) + """Status indicating whether Geo Priority Replication is enabled for the account.""" + allow_shared_key_access_for_services: Optional["_models.StorageAccountSharedKeyAccessProperties"] = rest_field( + name="allowSharedKeyAccessForServices", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicate shared key access properties at service level.""" + data_collaboration_policy_properties: Optional["_models.StorageDataCollaborationPolicyProperties"] = rest_field( + name="dataCollaborationPolicyProperties", visibility=["read", "create", "update", "delete", "query"] + ) + """Data Collaboration policy for the storage account.""" + + @overload + def __init__( # pylint: disable=too-many-locals + self, + *, + azure_files_identity_based_authentication: Optional["_models.AzureFilesIdentityBasedAuthentication"] = None, + enable_https_traffic_only: Optional[bool] = None, + is_sftp_enabled: Optional[bool] = None, + is_local_user_enabled: Optional[bool] = None, + enable_extended_groups: Optional[bool] = None, + is_hns_enabled: Optional[bool] = None, + large_file_shares_state: Optional[Union[str, "_models.LargeFileSharesState"]] = None, + routing_preference: Optional["_models.RoutingPreference"] = None, + dual_stack_endpoint_preference: Optional["_models.DualStackEndpointPreference"] = None, + allow_blob_public_access: Optional[bool] = None, + minimum_tls_version: Optional[Union[str, "_models.MinimumTlsVersion"]] = None, + allow_shared_key_access: Optional[bool] = None, + enable_nfs_v3: Optional[bool] = None, + allow_cross_tenant_replication: Optional[bool] = None, + default_to_o_auth_authentication: Optional[bool] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, + immutable_storage_with_versioning: Optional["_models.ImmutableStorageAccount"] = None, + allowed_copy_scope: Optional[Union[str, "_models.AllowedCopyScope"]] = None, + storage_account_sku_conversion_status: Optional["_models.StorageAccountSkuConversionStatus"] = None, + dns_endpoint_type: Optional[Union[str, "_models.DnsEndpointType"]] = None, + geo_priority_replication_status: Optional["_models.GeoPriorityReplicationStatus"] = None, + allow_shared_key_access_for_services: Optional["_models.StorageAccountSharedKeyAccessProperties"] = None, + data_collaboration_policy_properties: Optional["_models.StorageDataCollaborationPolicyProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageAccountPropertiesCreateParameters(_Model): + """The parameters used to create the storage account. + + :ivar allowed_copy_scope: Restrict copy to and from Storage Accounts within an AAD tenant or + with Private Links to the same VNet. Known values are: "PrivateLink", "AAD", and "All". + :vartype allowed_copy_scope: str or ~azure.mgmt.storage.models.AllowedCopyScope + :ivar public_network_access: Allow, disallow, or let Network Security Perimeter configuration + to evaluate public network access to Storage Account. Value is optional but if passed in, must + be 'Enabled', 'Disabled' or 'SecuredByPerimeter'. Known values are: "Enabled", "Disabled", and + "SecuredByPerimeter". + :vartype public_network_access: str or ~azure.mgmt.storage.models.PublicNetworkAccess + :ivar sas_policy: SasPolicy assigned to the storage account. + :vartype sas_policy: ~azure.mgmt.storage.models.SasPolicy + :ivar key_policy: KeyPolicy assigned to the storage account. + :vartype key_policy: ~azure.mgmt.storage.models.KeyPolicy + :ivar custom_domain: User domain assigned to the storage account. Name is the CNAME source. + Only one custom domain is supported per storage account at this time. To clear the existing + custom domain, use an empty string for the custom domain name property. + :vartype custom_domain: ~azure.mgmt.storage.models.CustomDomain + :ivar encryption: Encryption settings to be used for server-side encryption for the storage + account. + :vartype encryption: ~azure.mgmt.storage.models.Encryption + :ivar network_rule_set: Network rule set. + :vartype network_rule_set: ~azure.mgmt.storage.models.NetworkRuleSet + :ivar access_tier: Required for storage accounts where kind = BlobStorage. The access tier is + used for billing. The 'Premium' access tier is the default value for premium block blobs + storage account type and it cannot be changed for the premium block blobs storage account type. + Known values are: "Hot", "Cool", "Premium", "Cold", and "Smart". + :vartype access_tier: str or ~azure.mgmt.storage.models.AccessTier + :ivar azure_files_identity_based_authentication: Provides the identity based authentication + settings for Azure Files. + :vartype azure_files_identity_based_authentication: + ~azure.mgmt.storage.models.AzureFilesIdentityBasedAuthentication + :ivar enable_https_traffic_only: Allows https traffic only to storage service if sets to true. + The default value is true since API version 2019-04-01. + :vartype enable_https_traffic_only: bool + :ivar is_sftp_enabled: Enables Secure File Transfer Protocol, if set to true. + :vartype is_sftp_enabled: bool + :ivar is_local_user_enabled: Enables local users feature, if set to true. + :vartype is_local_user_enabled: bool + :ivar enable_extended_groups: Enables extended group support with local users feature, if set + to true. + :vartype enable_extended_groups: bool + :ivar is_hns_enabled: Account HierarchicalNamespace enabled if sets to true. + :vartype is_hns_enabled: bool + :ivar large_file_shares_state: Allow large file shares if sets to Enabled. It cannot be + disabled once it is enabled. Known values are: "Disabled" and "Enabled". + :vartype large_file_shares_state: str or ~azure.mgmt.storage.models.LargeFileSharesState + :ivar routing_preference: Maintains information about the network routing choice opted by the + user for data transfer. + :vartype routing_preference: ~azure.mgmt.storage.models.RoutingPreference + :ivar dual_stack_endpoint_preference: Maintains information about the Internet protocol opted + by the user. + :vartype dual_stack_endpoint_preference: ~azure.mgmt.storage.models.DualStackEndpointPreference + :ivar allow_blob_public_access: Allow or disallow public access to all blobs or containers in + the storage account. The default interpretation is false for this property. + :vartype allow_blob_public_access: bool + :ivar minimum_tls_version: Set the minimum TLS version to be permitted on requests to storage. + The default interpretation is TLS 1.0 for this property. Minimum TLS version 1.3 version is not + supported. Known values are: "TLS1_0", "TLS1_1", "TLS1_2", and "TLS1_3". + :vartype minimum_tls_version: str or ~azure.mgmt.storage.models.MinimumTlsVersion + :ivar allow_shared_key_access: Indicates whether the storage account permits requests to be + authorized with the account access key via Shared Key. If false, then all requests, including + shared access signatures, must be authorized with Azure Active Directory (Azure AD). The + default value is null, which is equivalent to true. + :vartype allow_shared_key_access: bool + :ivar enable_nfs_v3: NFS 3.0 protocol support enabled if set to true. + :vartype enable_nfs_v3: bool + :ivar allow_cross_tenant_replication: Allow or disallow cross AAD tenant object replication. + Set this property to true for new or existing accounts only if object replication policies will + involve storage accounts in different AAD tenants. The default interpretation is false for new + accounts to follow best security practices by default. + :vartype allow_cross_tenant_replication: bool + :ivar default_to_o_auth_authentication: A boolean flag which indicates whether the default + authentication is OAuth or not. The default interpretation is false for this property. + :vartype default_to_o_auth_authentication: bool + :ivar immutable_storage_with_versioning: The property is immutable and can only be set to true + at the account creation time. When set to true, it enables object level immutability for all + the new containers in the account by default. + :vartype immutable_storage_with_versioning: ~azure.mgmt.storage.models.ImmutableStorageAccount + :ivar dns_endpoint_type: Allows you to specify the type of endpoint. Set this to AzureDNSZone + to create a large number of accounts in a single subscription, which creates accounts in an + Azure DNS Zone and the endpoint URL will have an alphanumeric DNS Zone identifier. Known values + are: "Standard" and "AzureDnsZone". + :vartype dns_endpoint_type: str or ~azure.mgmt.storage.models.DnsEndpointType + :ivar geo_priority_replication_status: Status indicating whether Geo Priority Replication is + enabled for the account. + :vartype geo_priority_replication_status: + ~azure.mgmt.storage.models.GeoPriorityReplicationStatus + :ivar allow_shared_key_access_for_services: Indicate shared key access properties at service + level. + :vartype allow_shared_key_access_for_services: + ~azure.mgmt.storage.models.StorageAccountSharedKeyAccessProperties + :ivar data_collaboration_policy_properties: Data Collaboration policy for the storage account. + :vartype data_collaboration_policy_properties: + ~azure.mgmt.storage.models.StorageDataCollaborationPolicyProperties + """ + + allowed_copy_scope: Optional[Union[str, "_models.AllowedCopyScope"]] = rest_field( + name="allowedCopyScope", visibility=["read", "create", "update", "delete", "query"] + ) + """Restrict copy to and from Storage Accounts within an AAD tenant or with Private Links to the + same VNet. Known values are: \"PrivateLink\", \"AAD\", and \"All\".""" + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = rest_field( + name="publicNetworkAccess", visibility=["read", "create", "update", "delete", "query"] + ) + """Allow, disallow, or let Network Security Perimeter configuration to evaluate public network + access to Storage Account. Value is optional but if passed in, must be 'Enabled', 'Disabled' or + 'SecuredByPerimeter'. Known values are: \"Enabled\", \"Disabled\", and \"SecuredByPerimeter\".""" + sas_policy: Optional["_models.SasPolicy"] = rest_field( + name="sasPolicy", visibility=["read", "create", "update", "delete", "query"] + ) + """SasPolicy assigned to the storage account.""" + key_policy: Optional["_models.KeyPolicy"] = rest_field( + name="keyPolicy", visibility=["read", "create", "update", "delete", "query"] + ) + """KeyPolicy assigned to the storage account.""" + custom_domain: Optional["_models.CustomDomain"] = rest_field( + name="customDomain", visibility=["read", "create", "update", "delete", "query"] + ) + """User domain assigned to the storage account. Name is the CNAME source. Only one custom domain + is supported per storage account at this time. To clear the existing custom domain, use an + empty string for the custom domain name property.""" + encryption: Optional["_models.Encryption"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Encryption settings to be used for server-side encryption for the storage account.""" + network_rule_set: Optional["_models.NetworkRuleSet"] = rest_field( + name="networkAcls", visibility=["read", "create", "update", "delete", "query"] + ) + """Network rule set.""" + access_tier: Optional[Union[str, "_models.AccessTier"]] = rest_field( + name="accessTier", visibility=["read", "create", "update", "delete", "query"] + ) + """Required for storage accounts where kind = BlobStorage. The access tier is used for billing. + The 'Premium' access tier is the default value for premium block blobs storage account type and + it cannot be changed for the premium block blobs storage account type. Known values are: + \"Hot\", \"Cool\", \"Premium\", \"Cold\", and \"Smart\".""" + azure_files_identity_based_authentication: Optional["_models.AzureFilesIdentityBasedAuthentication"] = rest_field( + name="azureFilesIdentityBasedAuthentication", visibility=["read", "create", "update", "delete", "query"] + ) + """Provides the identity based authentication settings for Azure Files.""" + enable_https_traffic_only: Optional[bool] = rest_field( + name="supportsHttpsTrafficOnly", visibility=["read", "create", "update", "delete", "query"] + ) + """Allows https traffic only to storage service if sets to true. The default value is true since + API version 2019-04-01.""" + is_sftp_enabled: Optional[bool] = rest_field( + name="isSftpEnabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Enables Secure File Transfer Protocol, if set to true.""" + is_local_user_enabled: Optional[bool] = rest_field( + name="isLocalUserEnabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Enables local users feature, if set to true.""" + enable_extended_groups: Optional[bool] = rest_field( + name="enableExtendedGroups", visibility=["read", "create", "update", "delete", "query"] + ) + """Enables extended group support with local users feature, if set to true.""" + is_hns_enabled: Optional[bool] = rest_field( + name="isHnsEnabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Account HierarchicalNamespace enabled if sets to true.""" + large_file_shares_state: Optional[Union[str, "_models.LargeFileSharesState"]] = rest_field( + name="largeFileSharesState", visibility=["read", "create", "update", "delete", "query"] + ) + """Allow large file shares if sets to Enabled. It cannot be disabled once it is enabled. Known + values are: \"Disabled\" and \"Enabled\".""" + routing_preference: Optional["_models.RoutingPreference"] = rest_field( + name="routingPreference", visibility=["read", "create", "update", "delete", "query"] + ) + """Maintains information about the network routing choice opted by the user for data transfer.""" + dual_stack_endpoint_preference: Optional["_models.DualStackEndpointPreference"] = rest_field( + name="dualStackEndpointPreference", visibility=["read", "create", "update", "delete", "query"] + ) + """Maintains information about the Internet protocol opted by the user.""" + allow_blob_public_access: Optional[bool] = rest_field( + name="allowBlobPublicAccess", visibility=["read", "create", "update", "delete", "query"] + ) + """Allow or disallow public access to all blobs or containers in the storage account. The default + interpretation is false for this property.""" + minimum_tls_version: Optional[Union[str, "_models.MinimumTlsVersion"]] = rest_field( + name="minimumTlsVersion", visibility=["read", "create", "update", "delete", "query"] + ) + """Set the minimum TLS version to be permitted on requests to storage. The default interpretation + is TLS 1.0 for this property. Minimum TLS version 1.3 version is not supported. Known values + are: \"TLS1_0\", \"TLS1_1\", \"TLS1_2\", and \"TLS1_3\".""" + allow_shared_key_access: Optional[bool] = rest_field( + name="allowSharedKeyAccess", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates whether the storage account permits requests to be authorized with the account access + key via Shared Key. If false, then all requests, including shared access signatures, must be + authorized with Azure Active Directory (Azure AD). The default value is null, which is + equivalent to true.""" + enable_nfs_v3: Optional[bool] = rest_field( + name="isNfsV3Enabled", visibility=["read", "create", "update", "delete", "query"] + ) + """NFS 3.0 protocol support enabled if set to true.""" + allow_cross_tenant_replication: Optional[bool] = rest_field( + name="allowCrossTenantReplication", visibility=["read", "create", "update", "delete", "query"] + ) + """Allow or disallow cross AAD tenant object replication. Set this property to true for new or + existing accounts only if object replication policies will involve storage accounts in + different AAD tenants. The default interpretation is false for new accounts to follow best + security practices by default.""" + default_to_o_auth_authentication: Optional[bool] = rest_field( + name="defaultToOAuthAuthentication", visibility=["read", "create", "update", "delete", "query"] + ) + """A boolean flag which indicates whether the default authentication is OAuth or not. The default + interpretation is false for this property.""" + immutable_storage_with_versioning: Optional["_models.ImmutableStorageAccount"] = rest_field( + name="immutableStorageWithVersioning", visibility=["read", "create", "update", "delete", "query"] + ) + """The property is immutable and can only be set to true at the account creation time. When set to + true, it enables object level immutability for all the new containers in the account by + default.""" + dns_endpoint_type: Optional[Union[str, "_models.DnsEndpointType"]] = rest_field( + name="dnsEndpointType", visibility=["read", "create", "update", "delete", "query"] + ) + """Allows you to specify the type of endpoint. Set this to AzureDNSZone to create a large number + of accounts in a single subscription, which creates accounts in an Azure DNS Zone and the + endpoint URL will have an alphanumeric DNS Zone identifier. Known values are: \"Standard\" and + \"AzureDnsZone\".""" + geo_priority_replication_status: Optional["_models.GeoPriorityReplicationStatus"] = rest_field( + name="geoPriorityReplicationStatus", visibility=["read", "create", "update", "delete", "query"] + ) + """Status indicating whether Geo Priority Replication is enabled for the account.""" + allow_shared_key_access_for_services: Optional["_models.StorageAccountSharedKeyAccessProperties"] = rest_field( + name="allowSharedKeyAccessForServices", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicate shared key access properties at service level.""" + data_collaboration_policy_properties: Optional["_models.StorageDataCollaborationPolicyProperties"] = rest_field( + name="dataCollaborationPolicyProperties", visibility=["read", "create", "update", "delete", "query"] + ) + """Data Collaboration policy for the storage account.""" + + @overload + def __init__( # pylint: disable=too-many-locals + self, + *, + allowed_copy_scope: Optional[Union[str, "_models.AllowedCopyScope"]] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, + sas_policy: Optional["_models.SasPolicy"] = None, + key_policy: Optional["_models.KeyPolicy"] = None, + custom_domain: Optional["_models.CustomDomain"] = None, + encryption: Optional["_models.Encryption"] = None, + network_rule_set: Optional["_models.NetworkRuleSet"] = None, + access_tier: Optional[Union[str, "_models.AccessTier"]] = None, + azure_files_identity_based_authentication: Optional["_models.AzureFilesIdentityBasedAuthentication"] = None, + enable_https_traffic_only: Optional[bool] = None, + is_sftp_enabled: Optional[bool] = None, + is_local_user_enabled: Optional[bool] = None, + enable_extended_groups: Optional[bool] = None, + is_hns_enabled: Optional[bool] = None, + large_file_shares_state: Optional[Union[str, "_models.LargeFileSharesState"]] = None, + routing_preference: Optional["_models.RoutingPreference"] = None, + dual_stack_endpoint_preference: Optional["_models.DualStackEndpointPreference"] = None, + allow_blob_public_access: Optional[bool] = None, + minimum_tls_version: Optional[Union[str, "_models.MinimumTlsVersion"]] = None, + allow_shared_key_access: Optional[bool] = None, + enable_nfs_v3: Optional[bool] = None, + allow_cross_tenant_replication: Optional[bool] = None, + default_to_o_auth_authentication: Optional[bool] = None, + immutable_storage_with_versioning: Optional["_models.ImmutableStorageAccount"] = None, + dns_endpoint_type: Optional[Union[str, "_models.DnsEndpointType"]] = None, + geo_priority_replication_status: Optional["_models.GeoPriorityReplicationStatus"] = None, + allow_shared_key_access_for_services: Optional["_models.StorageAccountSharedKeyAccessProperties"] = None, + data_collaboration_policy_properties: Optional["_models.StorageDataCollaborationPolicyProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageAccountPropertiesUpdateParameters(_Model): + """The parameters used when updating a storage account. + + :ivar custom_domain: Custom domain assigned to the storage account by the user. Name is the + CNAME source. Only one custom domain is supported per storage account at this time. To clear + the existing custom domain, use an empty string for the custom domain name property. + :vartype custom_domain: ~azure.mgmt.storage.models.CustomDomain + :ivar encryption: Not applicable. Azure Storage encryption at rest is enabled by default for + all storage accounts and cannot be disabled. + :vartype encryption: ~azure.mgmt.storage.models.Encryption + :ivar sas_policy: SasPolicy assigned to the storage account. + :vartype sas_policy: ~azure.mgmt.storage.models.SasPolicy + :ivar key_policy: KeyPolicy assigned to the storage account. + :vartype key_policy: ~azure.mgmt.storage.models.KeyPolicy + :ivar access_tier: Required for storage accounts where kind = BlobStorage. The access tier is + used for billing. The 'Premium' access tier is the default value for premium block blobs + storage account type and it cannot be changed for the premium block blobs storage account type. + Known values are: "Hot", "Cool", "Premium", "Cold", and "Smart". + :vartype access_tier: str or ~azure.mgmt.storage.models.AccessTier + :ivar azure_files_identity_based_authentication: Provides the identity based authentication + settings for Azure Files. + :vartype azure_files_identity_based_authentication: + ~azure.mgmt.storage.models.AzureFilesIdentityBasedAuthentication + :ivar enable_https_traffic_only: Allows https traffic only to storage service if sets to true. + :vartype enable_https_traffic_only: bool + :ivar is_sftp_enabled: Enables Secure File Transfer Protocol, if set to true. + :vartype is_sftp_enabled: bool + :ivar is_local_user_enabled: Enables local users feature, if set to true. + :vartype is_local_user_enabled: bool + :ivar enable_extended_groups: Enables extended group support with local users feature, if set + to true. + :vartype enable_extended_groups: bool + :ivar network_rule_set: Network rule set. + :vartype network_rule_set: ~azure.mgmt.storage.models.NetworkRuleSet + :ivar large_file_shares_state: Allow large file shares if sets to Enabled. It cannot be + disabled once it is enabled. Known values are: "Disabled" and "Enabled". + :vartype large_file_shares_state: str or ~azure.mgmt.storage.models.LargeFileSharesState + :ivar routing_preference: Maintains information about the network routing choice opted by the + user for data transfer. + :vartype routing_preference: ~azure.mgmt.storage.models.RoutingPreference + :ivar dual_stack_endpoint_preference: Maintains information about the Internet protocol opted + by the user. + :vartype dual_stack_endpoint_preference: ~azure.mgmt.storage.models.DualStackEndpointPreference + :ivar allow_blob_public_access: Allow or disallow public access to all blobs or containers in + the storage account. The default interpretation is false for this property. + :vartype allow_blob_public_access: bool + :ivar minimum_tls_version: Set the minimum TLS version to be permitted on requests to storage. + The default interpretation is TLS 1.0 for this property. Minimum TLS version 1.3 version is not + supported. Known values are: "TLS1_0", "TLS1_1", "TLS1_2", and "TLS1_3". + :vartype minimum_tls_version: str or ~azure.mgmt.storage.models.MinimumTlsVersion + :ivar allow_shared_key_access: Indicates whether the storage account permits requests to be + authorized with the account access key via Shared Key. If false, then all requests, including + shared access signatures, must be authorized with Azure Active Directory (Azure AD). The + default value is null, which is equivalent to true. + :vartype allow_shared_key_access: bool + :ivar allow_cross_tenant_replication: Allow or disallow cross AAD tenant object replication. + Set this property to true for new or existing accounts only if object replication policies will + involve storage accounts in different AAD tenants. The default interpretation is false for new + accounts to follow best security practices by default. + :vartype allow_cross_tenant_replication: bool + :ivar default_to_o_auth_authentication: A boolean flag which indicates whether the default + authentication is OAuth or not. The default interpretation is false for this property. + :vartype default_to_o_auth_authentication: bool + :ivar public_network_access: Allow, disallow, or let Network Security Perimeter configuration + to evaluate public network access to Storage Account. Value is optional but if passed in, must + be 'Enabled', 'Disabled' or 'SecuredByPerimeter'. Known values are: "Enabled", "Disabled", and + "SecuredByPerimeter". + :vartype public_network_access: str or ~azure.mgmt.storage.models.PublicNetworkAccess + :ivar immutable_storage_with_versioning: The property is immutable and can only be set to true + at the account creation time. When set to true, it enables object level immutability for all + the containers in the account by default. + :vartype immutable_storage_with_versioning: ~azure.mgmt.storage.models.ImmutableStorageAccount + :ivar allowed_copy_scope: Restrict copy to and from Storage Accounts within an AAD tenant or + with Private Links to the same VNet. Known values are: "PrivateLink", "AAD", and "All". + :vartype allowed_copy_scope: str or ~azure.mgmt.storage.models.AllowedCopyScope + :ivar dns_endpoint_type: Allows you to specify the type of endpoint. Set this to AzureDNSZone + to create a large number of accounts in a single subscription, which creates accounts in an + Azure DNS Zone and the endpoint URL will have an alphanumeric DNS Zone identifier. Known values + are: "Standard" and "AzureDnsZone". + :vartype dns_endpoint_type: str or ~azure.mgmt.storage.models.DnsEndpointType + :ivar geo_priority_replication_status: Status indicating whether Geo Priority Replication is + enabled for the account. + :vartype geo_priority_replication_status: + ~azure.mgmt.storage.models.GeoPriorityReplicationStatus + :ivar allow_shared_key_access_for_services: Indicate shared key access properties at service + level. + :vartype allow_shared_key_access_for_services: + ~azure.mgmt.storage.models.StorageAccountSharedKeyAccessProperties + :ivar data_collaboration_policy_properties: Data Collaboration policy for the storage account. + :vartype data_collaboration_policy_properties: + ~azure.mgmt.storage.models.StorageDataCollaborationPolicyProperties + """ + + custom_domain: Optional["_models.CustomDomain"] = rest_field( + name="customDomain", visibility=["read", "create", "update", "delete", "query"] + ) + """Custom domain assigned to the storage account by the user. Name is the CNAME source. Only one + custom domain is supported per storage account at this time. To clear the existing custom + domain, use an empty string for the custom domain name property.""" + encryption: Optional["_models.Encryption"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Not applicable. Azure Storage encryption at rest is enabled by default for all storage accounts + and cannot be disabled.""" + sas_policy: Optional["_models.SasPolicy"] = rest_field( + name="sasPolicy", visibility=["read", "create", "update", "delete", "query"] + ) + """SasPolicy assigned to the storage account.""" + key_policy: Optional["_models.KeyPolicy"] = rest_field( + name="keyPolicy", visibility=["read", "create", "update", "delete", "query"] + ) + """KeyPolicy assigned to the storage account.""" + access_tier: Optional[Union[str, "_models.AccessTier"]] = rest_field( + name="accessTier", visibility=["read", "create", "update", "delete", "query"] + ) + """Required for storage accounts where kind = BlobStorage. The access tier is used for billing. + The 'Premium' access tier is the default value for premium block blobs storage account type and + it cannot be changed for the premium block blobs storage account type. Known values are: + \"Hot\", \"Cool\", \"Premium\", \"Cold\", and \"Smart\".""" + azure_files_identity_based_authentication: Optional["_models.AzureFilesIdentityBasedAuthentication"] = rest_field( + name="azureFilesIdentityBasedAuthentication", visibility=["read", "create", "update", "delete", "query"] + ) + """Provides the identity based authentication settings for Azure Files.""" + enable_https_traffic_only: Optional[bool] = rest_field( + name="supportsHttpsTrafficOnly", visibility=["read", "create", "update", "delete", "query"] + ) + """Allows https traffic only to storage service if sets to true.""" + is_sftp_enabled: Optional[bool] = rest_field( + name="isSftpEnabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Enables Secure File Transfer Protocol, if set to true.""" + is_local_user_enabled: Optional[bool] = rest_field( + name="isLocalUserEnabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Enables local users feature, if set to true.""" + enable_extended_groups: Optional[bool] = rest_field( + name="enableExtendedGroups", visibility=["read", "create", "update", "delete", "query"] + ) + """Enables extended group support with local users feature, if set to true.""" + network_rule_set: Optional["_models.NetworkRuleSet"] = rest_field( + name="networkAcls", visibility=["read", "create", "update", "delete", "query"] + ) + """Network rule set.""" + large_file_shares_state: Optional[Union[str, "_models.LargeFileSharesState"]] = rest_field( + name="largeFileSharesState", visibility=["read", "create", "update", "delete", "query"] + ) + """Allow large file shares if sets to Enabled. It cannot be disabled once it is enabled. Known + values are: \"Disabled\" and \"Enabled\".""" + routing_preference: Optional["_models.RoutingPreference"] = rest_field( + name="routingPreference", visibility=["read", "create", "update", "delete", "query"] + ) + """Maintains information about the network routing choice opted by the user for data transfer.""" + dual_stack_endpoint_preference: Optional["_models.DualStackEndpointPreference"] = rest_field( + name="dualStackEndpointPreference", visibility=["read", "create", "update", "delete", "query"] + ) + """Maintains information about the Internet protocol opted by the user.""" + allow_blob_public_access: Optional[bool] = rest_field( + name="allowBlobPublicAccess", visibility=["read", "create", "update", "delete", "query"] + ) + """Allow or disallow public access to all blobs or containers in the storage account. The default + interpretation is false for this property.""" + minimum_tls_version: Optional[Union[str, "_models.MinimumTlsVersion"]] = rest_field( + name="minimumTlsVersion", visibility=["read", "create", "update", "delete", "query"] + ) + """Set the minimum TLS version to be permitted on requests to storage. The default interpretation + is TLS 1.0 for this property. Minimum TLS version 1.3 version is not supported. Known values + are: \"TLS1_0\", \"TLS1_1\", \"TLS1_2\", and \"TLS1_3\".""" + allow_shared_key_access: Optional[bool] = rest_field( + name="allowSharedKeyAccess", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates whether the storage account permits requests to be authorized with the account access + key via Shared Key. If false, then all requests, including shared access signatures, must be + authorized with Azure Active Directory (Azure AD). The default value is null, which is + equivalent to true.""" + allow_cross_tenant_replication: Optional[bool] = rest_field( + name="allowCrossTenantReplication", visibility=["read", "create", "update", "delete", "query"] + ) + """Allow or disallow cross AAD tenant object replication. Set this property to true for new or + existing accounts only if object replication policies will involve storage accounts in + different AAD tenants. The default interpretation is false for new accounts to follow best + security practices by default.""" + default_to_o_auth_authentication: Optional[bool] = rest_field( + name="defaultToOAuthAuthentication", visibility=["read", "create", "update", "delete", "query"] + ) + """A boolean flag which indicates whether the default authentication is OAuth or not. The default + interpretation is false for this property.""" + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = rest_field( + name="publicNetworkAccess", visibility=["read", "create", "update", "delete", "query"] + ) + """Allow, disallow, or let Network Security Perimeter configuration to evaluate public network + access to Storage Account. Value is optional but if passed in, must be 'Enabled', 'Disabled' or + 'SecuredByPerimeter'. Known values are: \"Enabled\", \"Disabled\", and \"SecuredByPerimeter\".""" + immutable_storage_with_versioning: Optional["_models.ImmutableStorageAccount"] = rest_field( + name="immutableStorageWithVersioning", visibility=["read", "create", "update", "delete", "query"] + ) + """The property is immutable and can only be set to true at the account creation time. When set to + true, it enables object level immutability for all the containers in the account by default.""" + allowed_copy_scope: Optional[Union[str, "_models.AllowedCopyScope"]] = rest_field( + name="allowedCopyScope", visibility=["read", "create", "update", "delete", "query"] + ) + """Restrict copy to and from Storage Accounts within an AAD tenant or with Private Links to the + same VNet. Known values are: \"PrivateLink\", \"AAD\", and \"All\".""" + dns_endpoint_type: Optional[Union[str, "_models.DnsEndpointType"]] = rest_field( + name="dnsEndpointType", visibility=["read", "create", "update", "delete", "query"] + ) + """Allows you to specify the type of endpoint. Set this to AzureDNSZone to create a large number + of accounts in a single subscription, which creates accounts in an Azure DNS Zone and the + endpoint URL will have an alphanumeric DNS Zone identifier. Known values are: \"Standard\" and + \"AzureDnsZone\".""" + geo_priority_replication_status: Optional["_models.GeoPriorityReplicationStatus"] = rest_field( + name="geoPriorityReplicationStatus", visibility=["read", "create", "update", "delete", "query"] + ) + """Status indicating whether Geo Priority Replication is enabled for the account.""" + allow_shared_key_access_for_services: Optional["_models.StorageAccountSharedKeyAccessProperties"] = rest_field( + name="allowSharedKeyAccessForServices", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicate shared key access properties at service level.""" + data_collaboration_policy_properties: Optional["_models.StorageDataCollaborationPolicyProperties"] = rest_field( + name="dataCollaborationPolicyProperties", visibility=["read", "create", "update", "delete", "query"] + ) + """Data Collaboration policy for the storage account.""" + + @overload + def __init__( # pylint: disable=too-many-locals + self, + *, + custom_domain: Optional["_models.CustomDomain"] = None, + encryption: Optional["_models.Encryption"] = None, + sas_policy: Optional["_models.SasPolicy"] = None, + key_policy: Optional["_models.KeyPolicy"] = None, + access_tier: Optional[Union[str, "_models.AccessTier"]] = None, + azure_files_identity_based_authentication: Optional["_models.AzureFilesIdentityBasedAuthentication"] = None, + enable_https_traffic_only: Optional[bool] = None, + is_sftp_enabled: Optional[bool] = None, + is_local_user_enabled: Optional[bool] = None, + enable_extended_groups: Optional[bool] = None, + network_rule_set: Optional["_models.NetworkRuleSet"] = None, + large_file_shares_state: Optional[Union[str, "_models.LargeFileSharesState"]] = None, + routing_preference: Optional["_models.RoutingPreference"] = None, + dual_stack_endpoint_preference: Optional["_models.DualStackEndpointPreference"] = None, + allow_blob_public_access: Optional[bool] = None, + minimum_tls_version: Optional[Union[str, "_models.MinimumTlsVersion"]] = None, + allow_shared_key_access: Optional[bool] = None, + allow_cross_tenant_replication: Optional[bool] = None, + default_to_o_auth_authentication: Optional[bool] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, + immutable_storage_with_versioning: Optional["_models.ImmutableStorageAccount"] = None, + allowed_copy_scope: Optional[Union[str, "_models.AllowedCopyScope"]] = None, + dns_endpoint_type: Optional[Union[str, "_models.DnsEndpointType"]] = None, + geo_priority_replication_status: Optional["_models.GeoPriorityReplicationStatus"] = None, + allow_shared_key_access_for_services: Optional["_models.StorageAccountSharedKeyAccessProperties"] = None, + data_collaboration_policy_properties: Optional["_models.StorageDataCollaborationPolicyProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageAccountRegenerateKeyParameters(_Model): + """The parameters used to regenerate the storage account key. + + :ivar key_name: The name of storage keys that want to be regenerated, possible values are key1, + key2, kerb1, kerb2. Required. + :vartype key_name: str + """ + + key_name: str = rest_field(name="keyName", visibility=["read", "create", "update", "delete", "query"]) + """The name of storage keys that want to be regenerated, possible values are key1, key2, kerb1, + kerb2. Required.""" + + @overload + def __init__( + self, + *, + key_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageAccountSharedKeyAccessProperties(_Model): + """Defines shared key access properties for a storage account. + + :ivar blob: Shared key access settings for Blob service. + :vartype blob: ~azure.mgmt.storage.models.ServiceSharedKeyAccessProperties + :ivar file: Shared key access settings for File service. + :vartype file: ~azure.mgmt.storage.models.ServiceSharedKeyAccessProperties + :ivar table: Shared key access settings for Table service. + :vartype table: ~azure.mgmt.storage.models.ServiceSharedKeyAccessProperties + :ivar queue: Shared key access settings for Queue service. + :vartype queue: ~azure.mgmt.storage.models.ServiceSharedKeyAccessProperties + """ + + blob: Optional["_models.ServiceSharedKeyAccessProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Shared key access settings for Blob service.""" + file: Optional["_models.ServiceSharedKeyAccessProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Shared key access settings for File service.""" + table: Optional["_models.ServiceSharedKeyAccessProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Shared key access settings for Table service.""" + queue: Optional["_models.ServiceSharedKeyAccessProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Shared key access settings for Queue service.""" + + @overload + def __init__( + self, + *, + blob: Optional["_models.ServiceSharedKeyAccessProperties"] = None, + file: Optional["_models.ServiceSharedKeyAccessProperties"] = None, + table: Optional["_models.ServiceSharedKeyAccessProperties"] = None, + queue: Optional["_models.ServiceSharedKeyAccessProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageAccountSkuConversionStatus(_Model): + """This defines the sku conversion status object for asynchronous sku conversions. + + :ivar sku_conversion_status: This property indicates the current sku conversion status. Known + values are: "InProgress", "Succeeded", and "Failed". + :vartype sku_conversion_status: str or ~azure.mgmt.storage.models.SkuConversionStatus + :ivar target_sku_name: This property represents the target sku name to which the account sku is + being converted asynchronously. Known values are: "Standard_LRS", "Standard_GRS", + "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", "Premium_ZRS", "Standard_GZRS", + "Standard_RAGZRS", "StandardV2_LRS", "StandardV2_GRS", "StandardV2_ZRS", "StandardV2_GZRS", + "PremiumV2_LRS", and "PremiumV2_ZRS". + :vartype target_sku_name: str or ~azure.mgmt.storage.models.SkuName + :ivar start_time: This property represents the sku conversion start time. + :vartype start_time: str + :ivar end_time: This property represents the sku conversion end time. + :vartype end_time: str + """ + + sku_conversion_status: Optional[Union[str, "_models.SkuConversionStatus"]] = rest_field( + name="skuConversionStatus", visibility=["read"] + ) + """This property indicates the current sku conversion status. Known values are: \"InProgress\", + \"Succeeded\", and \"Failed\".""" + target_sku_name: Optional[Union[str, "_models.SkuName"]] = rest_field( + name="targetSkuName", visibility=["read", "create", "update", "delete", "query"] + ) + """This property represents the target sku name to which the account sku is being converted + asynchronously. Known values are: \"Standard_LRS\", \"Standard_GRS\", \"Standard_RAGRS\", + \"Standard_ZRS\", \"Premium_LRS\", \"Premium_ZRS\", \"Standard_GZRS\", \"Standard_RAGZRS\", + \"StandardV2_LRS\", \"StandardV2_GRS\", \"StandardV2_ZRS\", \"StandardV2_GZRS\", + \"PremiumV2_LRS\", and \"PremiumV2_ZRS\".""" + start_time: Optional[str] = rest_field(name="startTime", visibility=["read"]) + """This property represents the sku conversion start time.""" + end_time: Optional[str] = rest_field(name="endTime", visibility=["read"]) + """This property represents the sku conversion end time.""" + + @overload + def __init__( + self, + *, + target_sku_name: Optional[Union[str, "_models.SkuName"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageAccountUpdateParameters(_Model): + """The parameters that can be provided when updating the storage account properties. + + :ivar sku: Gets or sets the SKU name. Note that the SKU name cannot be updated to Standard_ZRS, + Premium_LRS or Premium_ZRS, nor can accounts of those SKU names be updated to any other value. + :vartype sku: ~azure.mgmt.storage.models.Sku + :ivar tags: Gets or sets a list of key value pairs that describe the resource. These tags can + be used in viewing and grouping this resource (across resource groups). A maximum of 15 tags + can be provided for a resource. Each tag must have a key no greater in length than 128 + characters and a value no greater in length than 256 characters. + :vartype tags: dict[str, str] + :ivar identity: The identity of the resource. + :vartype identity: ~azure.mgmt.storage.models.Identity + :ivar properties: The parameters used when updating a storage account. + :vartype properties: ~azure.mgmt.storage.models.StorageAccountPropertiesUpdateParameters + :ivar kind: Optional. Indicates the type of storage account. Currently only StorageV2 value + supported by server. Known values are: "Storage", "StorageV2", "BlobStorage", "FileStorage", + and "BlockBlobStorage". + :vartype kind: str or ~azure.mgmt.storage.models.Kind + :ivar zones: Optional. Gets or sets the pinned logical availability zone for the storage + account. + :vartype zones: list[str] + :ivar placement: Optional. Gets or sets the zonal placement details for the storage account. + :vartype placement: ~azure.mgmt.storage.models.Placement + """ + + sku: Optional["_models.Sku"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Gets or sets the SKU name. Note that the SKU name cannot be updated to Standard_ZRS, + Premium_LRS or Premium_ZRS, nor can accounts of those SKU names be updated to any other value.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Gets or sets a list of key value pairs that describe the resource. These tags can be used in + viewing and grouping this resource (across resource groups). A maximum of 15 tags can be + provided for a resource. Each tag must have a key no greater in length than 128 characters and + a value no greater in length than 256 characters.""" + identity: Optional["_models.Identity"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The identity of the resource.""" + properties: Optional["_models.StorageAccountPropertiesUpdateParameters"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The parameters used when updating a storage account.""" + kind: Optional[Union[str, "_models.Kind"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional. Indicates the type of storage account. Currently only StorageV2 value supported by + server. Known values are: \"Storage\", \"StorageV2\", \"BlobStorage\", \"FileStorage\", and + \"BlockBlobStorage\".""" + zones: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional. Gets or sets the pinned logical availability zone for the storage account.""" + placement: Optional["_models.Placement"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional. Gets or sets the zonal placement details for the storage account.""" + + __flattened_items = [ + "custom_domain", + "encryption", + "sas_policy", + "key_policy", + "access_tier", + "azure_files_identity_based_authentication", + "enable_https_traffic_only", + "is_sftp_enabled", + "is_local_user_enabled", + "enable_extended_groups", + "network_rule_set", + "large_file_shares_state", + "routing_preference", + "dual_stack_endpoint_preference", + "allow_blob_public_access", + "minimum_tls_version", + "allow_shared_key_access", + "allow_cross_tenant_replication", + "default_to_o_auth_authentication", + "public_network_access", + "immutable_storage_with_versioning", + "allowed_copy_scope", + "dns_endpoint_type", + "geo_priority_replication_status", + "allow_shared_key_access_for_services", + "data_collaboration_policy_properties", + ] + + @overload + def __init__( + self, + *, + sku: Optional["_models.Sku"] = None, + tags: Optional[dict[str, str]] = None, + identity: Optional["_models.Identity"] = None, + properties: Optional["_models.StorageAccountPropertiesUpdateParameters"] = None, + kind: Optional[Union[str, "_models.Kind"]] = None, + zones: Optional[list[str]] = None, + placement: Optional["_models.Placement"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.properties is None: + return None + return getattr(self.properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.properties is None: + self.properties = self._attr_to_rest_field["properties"]._class_type() + setattr(self.properties, key, value) + else: + super().__setattr__(key, value) + + +class StorageConnectorProperties(_Model): + """The storage connector properties. + + :ivar unique_id: System-generated GUID identifier for the Storage Connector. Not a valid input + parameter when creating. + :vartype unique_id: str + :ivar state: State - Active or Inactive. Whether or not the Storage Connector should start as + active (default: Active) (While set to false on the Storage Connector, all data plane requests + using this Storage Connector fail, and this Storage Connector is not billed if it would be + otherwise. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.storage.models.StorageConnectorState + :ivar creation_time: System-generated creation time of the Storage Connector in ISO 8601 + date-time format (YYYY-MM-DDTHH:mm:ssZ). Not a valid input parameter during creating. + :vartype creation_time: str + :ivar description: Arbitrary description of this Storage Connector. Max 250 characters. + :vartype description: str + :ivar test_connection: Test connection to backing data source before creating the storage + connector. + :vartype test_connection: bool + :ivar data_source_type: The type of backing data source for this Storage Connector. Required. + "Azure_DataShare" + :vartype data_source_type: str or ~azure.mgmt.storage.models.StorageConnectorDataSourceType + :ivar source: Information about how to communicate with and authenticate to the backing data + store. Required. + :vartype source: ~azure.mgmt.storage.models.StorageConnectorSource + :ivar provisioning_state: Represents the provisioning state of the storage connector. Known + values are: "Accepted", "Creating", "Succeeded", "Deleting", "Canceled", and "Failed". + :vartype provisioning_state: str or + ~azure.mgmt.storage.models.NativeDataSharingProvisioningState + """ + + unique_id: Optional[str] = rest_field(name="uniqueId", visibility=["read"]) + """System-generated GUID identifier for the Storage Connector. Not a valid input parameter when + creating.""" + state: Optional[Union[str, "_models.StorageConnectorState"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """State - Active or Inactive. Whether or not the Storage Connector should start as active + (default: Active) (While set to false on the Storage Connector, all data plane requests using + this Storage Connector fail, and this Storage Connector is not billed if it would be otherwise. + Known values are: \"Active\" and \"Inactive\".""" + creation_time: Optional[str] = rest_field(name="creationTime", visibility=["read"]) + """System-generated creation time of the Storage Connector in ISO 8601 date-time format + (YYYY-MM-DDTHH:mm:ssZ). Not a valid input parameter during creating.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Arbitrary description of this Storage Connector. Max 250 characters.""" + test_connection: Optional[bool] = rest_field(name="testConnection", visibility=["create", "update"]) + """Test connection to backing data source before creating the storage connector.""" + data_source_type: Union[str, "_models.StorageConnectorDataSourceType"] = rest_field( + name="dataSourceType", visibility=["read", "create"] + ) + """The type of backing data source for this Storage Connector. Required. \"Azure_DataShare\"""" + source: "_models.StorageConnectorSource" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Information about how to communicate with and authenticate to the backing data store. Required.""" + provisioning_state: Optional[Union[str, "_models.NativeDataSharingProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """Represents the provisioning state of the storage connector. Known values are: \"Accepted\", + \"Creating\", \"Succeeded\", \"Deleting\", \"Canceled\", and \"Failed\".""" + + @overload + def __init__( + self, + *, + data_source_type: Union[str, "_models.StorageConnectorDataSourceType"], + source: "_models.StorageConnectorSource", + state: Optional[Union[str, "_models.StorageConnectorState"]] = None, + description: Optional[str] = None, + test_connection: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageDataCollaborationPolicyProperties(_Model): + """Defines Data Collaboration Policy for a storage account. + + :ivar allow_storage_connectors: Indicates whether storage connectors are allowed to created or + managed on the storage account. + :vartype allow_storage_connectors: bool + :ivar allow_storage_data_shares: Indicates whether data shares are allowed to be created or + managed on the storage account. + :vartype allow_storage_data_shares: bool + :ivar allow_cross_tenant_data_sharing: Indicates whether cross-entra tenant data sharing is + allowed on the storage account. + :vartype allow_cross_tenant_data_sharing: bool + """ + + allow_storage_connectors: Optional[bool] = rest_field( + name="allowStorageConnectors", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates whether storage connectors are allowed to created or managed on the storage account.""" + allow_storage_data_shares: Optional[bool] = rest_field( + name="allowStorageDataShares", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates whether data shares are allowed to be created or managed on the storage account.""" + allow_cross_tenant_data_sharing: Optional[bool] = rest_field( + name="allowCrossTenantDataSharing", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates whether cross-entra tenant data sharing is allowed on the storage account.""" + + @overload + def __init__( + self, + *, + allow_storage_connectors: Optional[bool] = None, + allow_storage_data_shares: Optional[bool] = None, + allow_cross_tenant_data_sharing: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageDataShareAccessPolicy(_Model): + """Policy that specify the permission allowed to a managed identity. + + :ivar principal_id: The AAD principal ID of the Managed Identity. Required. + :vartype principal_id: str + :ivar tenant_id: The AAD tenant ID of the Managed Identity. Required. + :vartype tenant_id: str + :ivar permission: Allowed permissions. Currently, only supported value is Read. Required. Known + values are: "None" and "Read". + :vartype permission: str or ~azure.mgmt.storage.models.StorageDataShareAccessPolicyPermission + """ + + principal_id: str = rest_field(name="principalId", visibility=["read", "create", "update", "delete", "query"]) + """The AAD principal ID of the Managed Identity. Required.""" + tenant_id: str = rest_field(name="tenantId", visibility=["read", "create", "update", "delete", "query"]) + """The AAD tenant ID of the Managed Identity. Required.""" + permission: Union[str, "_models.StorageDataShareAccessPolicyPermission"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Allowed permissions. Currently, only supported value is Read. Required. Known values are: + \"None\" and \"Read\".""" + + @overload + def __init__( + self, + *, + principal_id: str, + tenant_id: str, + permission: Union[str, "_models.StorageDataShareAccessPolicyPermission"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageDataShareAsset(_Model): + """Properties of a shared resource. + + :ivar asset_path: Source Path to be shared. It can be a folder or a blob. The asset path should + contain container name followed by path within the container, e.g. /container1/logs/external. + Required. + :vartype asset_path: str + :ivar display_name: Consumer visible name of the original path. Required. + :vartype display_name: str + """ + + asset_path: str = rest_field(name="assetPath", visibility=["read", "create", "update", "delete", "query"]) + """Source Path to be shared. It can be a folder or a blob. The asset path should contain container + name followed by path within the container, e.g. /container1/logs/external. Required.""" + display_name: str = rest_field(name="displayName", visibility=["read", "create", "update", "delete", "query"]) + """Consumer visible name of the original path. Required.""" + + @overload + def __init__( + self, + *, + asset_path: str, + display_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageDataShareProperties(_Model): + """The storage datashare properties. + + :ivar data_share_identifier: System-generated GUID identifier for the Storage DataShare. Not a + valid input parameter when creating. + :vartype data_share_identifier: str + :ivar description: Arbitrary description of this Data Share. Max 250 characters. + :vartype description: str + :ivar data_share_uri: The DataShare URI to be shared with the consumer. URI Format - + 'azds://::'. + :vartype data_share_uri: str + :ivar access_policies: List of access policies that specify the permission allowed to a managed + identity. For Create - This property is required and cannot be null. If no access policies are + provided at creation time, specify an empty array. For Update - This property is optional. If + set to null or not passed, the existing access policies are left unchanged. If provided with a + non-null value, the existing access policies are replaced with the specified list. Required. + :vartype access_policies: list[~azure.mgmt.storage.models.StorageDataShareAccessPolicy] + :ivar assets: List of assets that specify the properties of the shared resources. For Create - + This property is required and cannot be null. If no assets are provided at creation time, + specify an empty array. For Update - This property is optional. If set to null or not passed, + the existing assets are left unchanged. If provided with a non-null value, the existing assets + are replaced with the specified list. Required. + :vartype assets: list[~azure.mgmt.storage.models.StorageDataShareAsset] + :ivar provisioning_state: Represents the provisioning state of the storage datashare. Known + values are: "Accepted", "Creating", "Succeeded", "Deleting", "Canceled", and "Failed". + :vartype provisioning_state: str or + ~azure.mgmt.storage.models.NativeDataSharingProvisioningState + """ + + data_share_identifier: Optional[str] = rest_field(name="dataShareIdentifier", visibility=["read"]) + """System-generated GUID identifier for the Storage DataShare. Not a valid input parameter when + creating.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Arbitrary description of this Data Share. Max 250 characters.""" + data_share_uri: Optional[str] = rest_field(name="dataShareUri", visibility=["read"]) + """The DataShare URI to be shared with the consumer. URI Format - + 'azds://::'.""" + access_policies: list["_models.StorageDataShareAccessPolicy"] = rest_field( + name="accessPolicies", visibility=["read", "create", "update", "delete", "query"] + ) + """List of access policies that specify the permission allowed to a managed identity. For Create - + This property is required and cannot be null. If no access policies are provided at creation + time, specify an empty array. For Update - This property is optional. If set to null or not + passed, the existing access policies are left unchanged. If provided with a non-null value, the + existing access policies are replaced with the specified list. Required.""" + assets: list["_models.StorageDataShareAsset"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of assets that specify the properties of the shared resources. For Create - This property + is required and cannot be null. If no assets are provided at creation time, specify an empty + array. For Update - This property is optional. If set to null or not passed, the existing + assets are left unchanged. If provided with a non-null value, the existing assets are replaced + with the specified list. Required.""" + provisioning_state: Optional[Union[str, "_models.NativeDataSharingProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """Represents the provisioning state of the storage datashare. Known values are: \"Accepted\", + \"Creating\", \"Succeeded\", \"Deleting\", \"Canceled\", and \"Failed\".""" + + @overload + def __init__( + self, + *, + access_policies: list["_models.StorageDataShareAccessPolicy"], + assets: list["_models.StorageDataShareAsset"], + description: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageQueue(ProxyResource): + """Concrete proxy resource types can be created by aliasing this type using a specific property + type. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar queue_properties: Queue resource properties. + :vartype queue_properties: ~azure.mgmt.storage.models.QueueProperties + """ + + queue_properties: Optional["_models.QueueProperties"] = rest_field( + name="properties", visibility=["read", "create", "update", "delete", "query"] + ) + """Queue resource properties.""" + + __flattened_items = ["metadata", "approximate_message_count"] + + @overload + def __init__( + self, + *, + queue_properties: Optional["_models.QueueProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.queue_properties is None: + return None + return getattr(self.queue_properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.queue_properties is None: + self.queue_properties = self._attr_to_rest_field["queue_properties"]._class_type() + setattr(self.queue_properties, key, value) + else: + super().__setattr__(key, value) + + +class StorageTaskAssignment(ProxyResource): + """The storage task assignment. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar properties: Properties of the storage task assignment. + :vartype properties: ~azure.mgmt.storage.models.StorageTaskAssignmentProperties + """ + + properties: Optional["_models.StorageTaskAssignmentProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Properties of the storage task assignment.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.StorageTaskAssignmentProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageTaskAssignmentExecutionContext(_Model): + """Execution context of the storage task assignment. + + :ivar target: Execution target of the storage task assignment. + :vartype target: ~azure.mgmt.storage.models.ExecutionTarget + :ivar trigger: Execution trigger of the storage task assignment. Required. + :vartype trigger: ~azure.mgmt.storage.models.ExecutionTrigger + """ + + target: Optional["_models.ExecutionTarget"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Execution target of the storage task assignment.""" + trigger: "_models.ExecutionTrigger" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Execution trigger of the storage task assignment. Required.""" + + @overload + def __init__( + self, + *, + trigger: "_models.ExecutionTrigger", + target: Optional["_models.ExecutionTarget"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageTaskAssignmentProperties(_Model): + """Properties of the storage task assignment. + + :ivar task_id: Id of the corresponding storage task. Required. + :vartype task_id: str + :ivar enabled: Whether the storage task assignment is enabled or not. Required. + :vartype enabled: bool + :ivar description: Text that describes the purpose of the storage task assignment. Required. + :vartype description: str + :ivar execution_context: The storage task assignment execution context. Required. + :vartype execution_context: ~azure.mgmt.storage.models.StorageTaskAssignmentExecutionContext + :ivar report: The storage task assignment report. Required. + :vartype report: ~azure.mgmt.storage.models.StorageTaskAssignmentReport + :ivar provisioning_state: Represents the provisioning state of the storage task assignment. + Known values are: "ValidateSubscriptionQuotaBegin", "ValidateSubscriptionQuotaEnd", "Accepted", + "Creating", "Succeeded", "Deleting", "Canceled", and "Failed". + :vartype provisioning_state: str or + ~azure.mgmt.storage.models.StorageTaskAssignmentProvisioningState + :ivar run_status: Run status of storage task assignment. + :vartype run_status: ~azure.mgmt.storage.models.StorageTaskReportProperties + """ + + task_id: str = rest_field(name="taskId", visibility=["read", "create", "update", "delete", "query"]) + """Id of the corresponding storage task. Required.""" + enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the storage task assignment is enabled or not. Required.""" + description: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Text that describes the purpose of the storage task assignment. Required.""" + execution_context: "_models.StorageTaskAssignmentExecutionContext" = rest_field( + name="executionContext", visibility=["read", "create", "update", "delete", "query"] + ) + """The storage task assignment execution context. Required.""" + report: "_models.StorageTaskAssignmentReport" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The storage task assignment report. Required.""" + provisioning_state: Optional[Union[str, "_models.StorageTaskAssignmentProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """Represents the provisioning state of the storage task assignment. Known values are: + \"ValidateSubscriptionQuotaBegin\", \"ValidateSubscriptionQuotaEnd\", \"Accepted\", + \"Creating\", \"Succeeded\", \"Deleting\", \"Canceled\", and \"Failed\".""" + run_status: Optional["_models.StorageTaskReportProperties"] = rest_field( + name="runStatus", visibility=["read", "create", "update", "delete", "query"] + ) + """Run status of storage task assignment.""" + + @overload + def __init__( + self, + *, + task_id: str, + enabled: bool, + description: str, + execution_context: "_models.StorageTaskAssignmentExecutionContext", + report: "_models.StorageTaskAssignmentReport", + run_status: Optional["_models.StorageTaskReportProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageTaskAssignmentReport(_Model): + """The storage task assignment report. + + :ivar prefix: The container prefix for the location of storage task assignment report. + Required. + :vartype prefix: str + """ + + prefix: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The container prefix for the location of storage task assignment report. Required.""" + + @overload + def __init__( + self, + *, + prefix: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageTaskAssignmentUpdateExecutionContext(_Model): # pylint: disable=name-too-long + """Execution context of the storage task assignment update. + + :ivar target: Execution target of the storage task assignment. + :vartype target: ~azure.mgmt.storage.models.ExecutionTarget + :ivar trigger: Execution trigger of the storage task assignment. + :vartype trigger: ~azure.mgmt.storage.models.ExecutionTriggerUpdate + """ + + target: Optional["_models.ExecutionTarget"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Execution target of the storage task assignment.""" + trigger: Optional["_models.ExecutionTriggerUpdate"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Execution trigger of the storage task assignment.""" + + @overload + def __init__( + self, + *, + target: Optional["_models.ExecutionTarget"] = None, + trigger: Optional["_models.ExecutionTriggerUpdate"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageTaskAssignmentUpdateParameters(_Model): + """Parameters of the storage task assignment update request. + + :ivar properties: Properties of the storage task assignment. + :vartype properties: ~azure.mgmt.storage.models.StorageTaskAssignmentUpdateProperties + """ + + properties: Optional["_models.StorageTaskAssignmentUpdateProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Properties of the storage task assignment.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.StorageTaskAssignmentUpdateProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageTaskAssignmentUpdateProperties(_Model): + """Properties of the storage task update assignment. + + :ivar task_id: Id of the corresponding storage task. + :vartype task_id: str + :ivar enabled: Whether the storage task assignment is enabled or not. + :vartype enabled: bool + :ivar description: Text that describes the purpose of the storage task assignment. + :vartype description: str + :ivar execution_context: The storage task assignment execution context. + :vartype execution_context: + ~azure.mgmt.storage.models.StorageTaskAssignmentUpdateExecutionContext + :ivar report: The storage task assignment report. + :vartype report: ~azure.mgmt.storage.models.StorageTaskAssignmentUpdateReport + :ivar provisioning_state: Represents the provisioning state of the storage task assignment. + Known values are: "ValidateSubscriptionQuotaBegin", "ValidateSubscriptionQuotaEnd", "Accepted", + "Creating", "Succeeded", "Deleting", "Canceled", and "Failed". + :vartype provisioning_state: str or + ~azure.mgmt.storage.models.StorageTaskAssignmentProvisioningState + :ivar run_status: Run status of storage task assignment. + :vartype run_status: ~azure.mgmt.storage.models.StorageTaskReportProperties + """ + + task_id: Optional[str] = rest_field(name="taskId", visibility=["read"]) + """Id of the corresponding storage task.""" + enabled: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the storage task assignment is enabled or not.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Text that describes the purpose of the storage task assignment.""" + execution_context: Optional["_models.StorageTaskAssignmentUpdateExecutionContext"] = rest_field( + name="executionContext", visibility=["read", "create", "update", "delete", "query"] + ) + """The storage task assignment execution context.""" + report: Optional["_models.StorageTaskAssignmentUpdateReport"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The storage task assignment report.""" + provisioning_state: Optional[Union[str, "_models.StorageTaskAssignmentProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """Represents the provisioning state of the storage task assignment. Known values are: + \"ValidateSubscriptionQuotaBegin\", \"ValidateSubscriptionQuotaEnd\", \"Accepted\", + \"Creating\", \"Succeeded\", \"Deleting\", \"Canceled\", and \"Failed\".""" + run_status: Optional["_models.StorageTaskReportProperties"] = rest_field( + name="runStatus", visibility=["read", "create", "update", "delete", "query"] + ) + """Run status of storage task assignment.""" + + @overload + def __init__( + self, + *, + enabled: Optional[bool] = None, + description: Optional[str] = None, + execution_context: Optional["_models.StorageTaskAssignmentUpdateExecutionContext"] = None, + report: Optional["_models.StorageTaskAssignmentUpdateReport"] = None, + run_status: Optional["_models.StorageTaskReportProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageTaskAssignmentUpdateReport(_Model): + """The storage task assignment report. + + :ivar prefix: The prefix of the storage task assignment report. + :vartype prefix: str + """ + + prefix: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The prefix of the storage task assignment report.""" + + @overload + def __init__( + self, + *, + prefix: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageTaskReportInstance(ProxyResource): + """Storage Tasks run report instance. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar properties: Storage task execution report for a run instance. + :vartype properties: ~azure.mgmt.storage.models.StorageTaskReportProperties + """ + + properties: Optional["_models.StorageTaskReportProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Storage task execution report for a run instance.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.StorageTaskReportProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageTaskReportProperties(_Model): + """Storage task execution report for a run instance. + + :ivar task_assignment_id: Represents the Storage Task Assignment Id associated with the storage + task that provided an execution context. + :vartype task_assignment_id: str + :ivar storage_account_id: Represents the Storage Account Id where the storage task definition + was applied and executed. + :vartype storage_account_id: str + :ivar start_time: Start time of the run instance. Filter options such as startTime gt + '2023-06-26T20:51:24.4494016Z' and other comparison operators can be used as described for + DateTime properties in + `https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators + `_. + :vartype start_time: str + :ivar finish_time: End time of the run instance. Filter options such as startTime gt + '2023-06-26T20:51:24.4494016Z' and other comparison operators can be used as described for + DateTime properties in + `https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators + `_. + :vartype finish_time: str + :ivar objects_targeted_count: Total number of objects that meet the condition as defined in the + storage task assignment execution context. Filter options such as objectsTargetedCount gt 50 + and other comparison operators can be used as described for Numerical properties in + `https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators + `_. + :vartype objects_targeted_count: str + :ivar objects_operated_on_count: Total number of objects that meet the storage tasks condition + and were operated upon. Filter options such as objectsOperatedOnCount ge 100 and other + comparison operators can be used as described for Numerical properties in + `https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators + `_. + :vartype objects_operated_on_count: str + :ivar object_failed_count: Total number of objects where task operation failed when was + attempted. Filter options such as objectFailedCount eq 0 and other comparison operators can be + used as described for Numerical properties in + `https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators + `_. + :vartype object_failed_count: str + :ivar objects_succeeded_count: Total number of objects where task operation succeeded when was + attempted.Filter options such as objectsSucceededCount gt 150 and other comparison operators + can be used as described for Numerical properties in + `https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators + `_. + :vartype objects_succeeded_count: str + :ivar run_status_error: Well known Azure Storage error code that represents the error + encountered during execution of the run instance. + :vartype run_status_error: str + :ivar run_status_enum: Represents the status of the execution. Known values are: "InProgress" + and "Finished". + :vartype run_status_enum: str or ~azure.mgmt.storage.models.RunStatusEnum + :ivar summary_report_path: Full path to the verbose report stored in the reporting container as + specified in the assignment execution context for the storage account. + :vartype summary_report_path: str + :ivar task_id: Storage Task Arm Id. + :vartype task_id: str + :ivar task_version: Storage Task Version. + :vartype task_version: str + :ivar run_result: Represents the overall result of the execution for the run instance. Known + values are: "Succeeded" and "Failed". + :vartype run_result: str or ~azure.mgmt.storage.models.RunResult + """ + + task_assignment_id: Optional[str] = rest_field(name="taskAssignmentId", visibility=["read"]) + """Represents the Storage Task Assignment Id associated with the storage task that provided an + execution context.""" + storage_account_id: Optional[str] = rest_field(name="storageAccountId", visibility=["read"]) + """Represents the Storage Account Id where the storage task definition was applied and executed.""" + start_time: Optional[str] = rest_field(name="startTime", visibility=["read"]) + """Start time of the run instance. Filter options such as startTime gt + '2023-06-26T20:51:24.4494016Z' and other comparison operators can be used as described for + DateTime properties in + `https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators + `_.""" + finish_time: Optional[str] = rest_field(name="finishTime", visibility=["read"]) + """End time of the run instance. Filter options such as startTime gt + '2023-06-26T20:51:24.4494016Z' and other comparison operators can be used as described for + DateTime properties in + `https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators + `_.""" + objects_targeted_count: Optional[str] = rest_field(name="objectsTargetedCount", visibility=["read"]) + """Total number of objects that meet the condition as defined in the storage task assignment + execution context. Filter options such as objectsTargetedCount gt 50 and other comparison + operators can be used as described for Numerical properties in + `https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators + `_.""" + objects_operated_on_count: Optional[str] = rest_field(name="objectsOperatedOnCount", visibility=["read"]) + """Total number of objects that meet the storage tasks condition and were operated upon. Filter + options such as objectsOperatedOnCount ge 100 and other comparison operators can be used as + described for Numerical properties in + `https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators + `_.""" + object_failed_count: Optional[str] = rest_field(name="objectFailedCount", visibility=["read"]) + """Total number of objects where task operation failed when was attempted. Filter options such as + objectFailedCount eq 0 and other comparison operators can be used as described for Numerical + properties in + `https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators + `_.""" + objects_succeeded_count: Optional[str] = rest_field(name="objectsSucceededCount", visibility=["read"]) + """Total number of objects where task operation succeeded when was attempted.Filter options such + as objectsSucceededCount gt 150 and other comparison operators can be used as described for + Numerical properties in + `https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators + `_.""" + run_status_error: Optional[str] = rest_field(name="runStatusError", visibility=["read"]) + """Well known Azure Storage error code that represents the error encountered during execution of + the run instance.""" + run_status_enum: Optional[Union[str, "_models.RunStatusEnum"]] = rest_field( + name="runStatusEnum", visibility=["read"] + ) + """Represents the status of the execution. Known values are: \"InProgress\" and \"Finished\".""" + summary_report_path: Optional[str] = rest_field(name="summaryReportPath", visibility=["read"]) + """Full path to the verbose report stored in the reporting container as specified in the + assignment execution context for the storage account.""" + task_id: Optional[str] = rest_field(name="taskId", visibility=["read"]) + """Storage Task Arm Id.""" + task_version: Optional[str] = rest_field(name="taskVersion", visibility=["read"]) + """Storage Task Version.""" + run_result: Optional[Union[str, "_models.RunResult"]] = rest_field(name="runResult", visibility=["read"]) + """Represents the overall result of the execution for the run instance. Known values are: + \"Succeeded\" and \"Failed\".""" + + +class SystemData(_Model): + """Metadata pertaining to creation and last modification of the resource. + + :ivar created_by: The identity that created the resource. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". + :vartype created_by_type: str or ~azure.mgmt.storage.models.CreatedByType + :ivar created_at: The timestamp of resource creation (UTC). + :vartype created_at: ~datetime.datetime + :ivar last_modified_by: The identity that last modified the resource. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the resource. Known values + are: "User", "Application", "ManagedIdentity", and "Key". + :vartype last_modified_by_type: str or ~azure.mgmt.storage.models.CreatedByType + :ivar last_modified_at: The timestamp of resource last modification (UTC). + :vartype last_modified_at: ~datetime.datetime + """ + + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read", "create", "update", "delete", "query"]) + """The identity that created the resource.""" + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = rest_field( + name="createdByType", visibility=["read", "create", "update", "delete", "query"] + ) + """The type of identity that created the resource. Known values are: \"User\", \"Application\", + \"ManagedIdentity\", and \"Key\".""" + created_at: Optional[datetime.datetime] = rest_field( + name="createdAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The timestamp of resource creation (UTC).""" + last_modified_by: Optional[str] = rest_field( + name="lastModifiedBy", visibility=["read", "create", "update", "delete", "query"] + ) + """The identity that last modified the resource.""" + last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = rest_field( + name="lastModifiedByType", visibility=["read", "create", "update", "delete", "query"] + ) + """The type of identity that last modified the resource. Known values are: \"User\", + \"Application\", \"ManagedIdentity\", and \"Key\".""" + last_modified_at: Optional[datetime.datetime] = rest_field( + name="lastModifiedAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The timestamp of resource last modification (UTC).""" + + @overload + def __init__( + self, + *, + created_by: Optional[str] = None, + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + created_at: Optional[datetime.datetime] = None, + last_modified_by: Optional[str] = None, + last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + last_modified_at: Optional[datetime.datetime] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Table(ProxyResource): + """Properties of the table, including Id, resource name, resource type. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar table_properties: Table resource properties. + :vartype table_properties: ~azure.mgmt.storage.models.TableProperties + """ + + table_properties: Optional["_models.TableProperties"] = rest_field( + name="properties", visibility=["read", "create", "update", "delete", "query"] + ) + """Table resource properties.""" + + __flattened_items = ["table_name", "signed_identifiers"] + + @overload + def __init__( + self, + *, + table_properties: Optional["_models.TableProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.table_properties is None: + return None + return getattr(self.table_properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.table_properties is None: + self.table_properties = self._attr_to_rest_field["table_properties"]._class_type() + setattr(self.table_properties, key, value) + else: + super().__setattr__(key, value) + + +class TableAccessPolicy(_Model): + """Table Access Policy Properties Object. + + :ivar start_time: Start time of the access policy. + :vartype start_time: ~datetime.datetime + :ivar expiry_time: Expiry time of the access policy. + :vartype expiry_time: ~datetime.datetime + :ivar permission: Required. List of abbreviated permissions. Supported permission values + include 'r','a','u','d'. Required. + :vartype permission: str + """ + + start_time: Optional[datetime.datetime] = rest_field( + name="startTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """Start time of the access policy.""" + expiry_time: Optional[datetime.datetime] = rest_field( + name="expiryTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """Expiry time of the access policy.""" + permission: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required. List of abbreviated permissions. Supported permission values include 'r','a','u','d'. + Required.""" + + @overload + def __init__( + self, + *, + permission: str, + start_time: Optional[datetime.datetime] = None, + expiry_time: Optional[datetime.datetime] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TableProperties(_Model): + """TableProperties. + + :ivar table_name: Table name under the specified account. + :vartype table_name: str + :ivar signed_identifiers: List of stored access policies specified on the table. + :vartype signed_identifiers: list[~azure.mgmt.storage.models.TableSignedIdentifier] + """ + + table_name: Optional[str] = rest_field(name="tableName", visibility=["read"]) + """Table name under the specified account.""" + signed_identifiers: Optional[list["_models.TableSignedIdentifier"]] = rest_field( + name="signedIdentifiers", visibility=["read", "create", "update", "delete", "query"] + ) + """List of stored access policies specified on the table.""" + + @overload + def __init__( + self, + *, + signed_identifiers: Optional[list["_models.TableSignedIdentifier"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TableServiceProperties(ProxyResource): + """The properties of a storage account’s Table service. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storage.models.SystemData + :ivar table_service_properties: The properties of a storage account’s Table service. + :vartype table_service_properties: ~azure.mgmt.storage.models.TableServicePropertiesProperties + """ + + table_service_properties: Optional["_models.TableServicePropertiesProperties"] = rest_field( + name="properties", visibility=["read", "create", "update", "delete", "query"] + ) + """The properties of a storage account’s Table service.""" + + __flattened_items = ["cors"] + + @overload + def __init__( + self, + *, + table_service_properties: Optional["_models.TableServicePropertiesProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items} + super().__init__(*args, **kwargs) + for k, v in _flattened_input.items(): + setattr(self, k, v) + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.table_service_properties is None: + return None + return getattr(self.table_service_properties, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.table_service_properties is None: + self.table_service_properties = self._attr_to_rest_field["table_service_properties"]._class_type() + setattr(self.table_service_properties, key, value) + else: + super().__setattr__(key, value) + + +class TableServicePropertiesProperties(_Model): + """The properties of a storage account’s Table service. + + :ivar cors: Specifies CORS rules for the Table service. You can include up to five CorsRule + elements in the request. If no CorsRule elements are included in the request body, all CORS + rules will be deleted, and CORS will be disabled for the Table service. + :vartype cors: ~azure.mgmt.storage.models.CorsRules + """ + + cors: Optional["_models.CorsRules"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Specifies CORS rules for the Table service. You can include up to five CorsRule elements in the + request. If no CorsRule elements are included in the request body, all CORS rules will be + deleted, and CORS will be disabled for the Table service.""" + + @overload + def __init__( + self, + *, + cors: Optional["_models.CorsRules"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TableSignedIdentifier(_Model): + """Object to set Table Access Policy. + + :ivar id: unique-64-character-value of the stored access policy. Required. + :vartype id: str + :ivar access_policy: Access policy. + :vartype access_policy: ~azure.mgmt.storage.models.TableAccessPolicy + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """unique-64-character-value of the stored access policy. Required.""" + access_policy: Optional["_models.TableAccessPolicy"] = rest_field( + name="accessPolicy", visibility=["read", "create", "update", "delete", "query"] + ) + """Access policy.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + access_policy: Optional["_models.TableAccessPolicy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TagFilter(_Model): + """Blob index tag based filtering for blob objects. + + :ivar name: This is the filter tag name, it can have 1 - 128 characters. Required. + :vartype name: str + :ivar op: This is the comparison operator which is used for object comparison and filtering. + Only == (equality operator) is currently supported. Required. + :vartype op: str + :ivar value: This is the filter tag value field used for tag based filtering, it can have 0 - + 256 characters. Required. + :vartype value: str + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """This is the filter tag name, it can have 1 - 128 characters. Required.""" + op: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """This is the comparison operator which is used for object comparison and filtering. Only == + (equality operator) is currently supported. Required.""" + value: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """This is the filter tag value field used for tag based filtering, it can have 0 - 256 + characters. Required.""" + + @overload + def __init__( + self, + *, + name: str, + op: str, + value: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TagProperty(_Model): + """A tag of the LegalHold of a blob container. + + :ivar tag: The tag value. + :vartype tag: str + :ivar timestamp: Returns the date and time the tag was added. + :vartype timestamp: ~datetime.datetime + :ivar object_identifier: Returns the Object ID of the user who added the tag. + :vartype object_identifier: str + :ivar tenant_id: Returns the Tenant ID that issued the token for the user who added the tag. + :vartype tenant_id: str + :ivar upn: Returns the User Principal Name of the user who added the tag. + :vartype upn: str + """ + + tag: Optional[str] = rest_field(visibility=["read"]) + """The tag value.""" + timestamp: Optional[datetime.datetime] = rest_field(visibility=["read"], format="rfc3339") + """Returns the date and time the tag was added.""" + object_identifier: Optional[str] = rest_field(name="objectIdentifier", visibility=["read"]) + """Returns the Object ID of the user who added the tag.""" + tenant_id: Optional[str] = rest_field(name="tenantId", visibility=["read"]) + """Returns the Tenant ID that issued the token for the user who added the tag.""" + upn: Optional[str] = rest_field(visibility=["read"]) + """Returns the User Principal Name of the user who added the tag.""" + + +class TestConnectionResponse(_Model): + """Test connection response properties. + + :ivar storage_connector_method_name: Indicates the method used to validate the connection to + the backing data store. Valid values are ``GetBlob`` and ``ListBlobs`` for failure, and + ``TestExistingConnection`` for success. Required. + :vartype storage_connector_method_name: str + :ivar storage_connector_error_message: A string representing the error received from the + backing data store. Format will vary depending on the data store type and will be capped at 1 + MB in size. The error message will be empty if the connection was successful. + :vartype storage_connector_error_message: str + :ivar storage_connector_request_id: The request Id associated with the request sent to the + backing data store for validation. Required. + :vartype storage_connector_request_id: str + """ + + storage_connector_method_name: str = rest_field( + name="storageConnectorMethodName", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates the method used to validate the connection to the backing data store. Valid values + are ``GetBlob`` and ``ListBlobs`` for failure, and ``TestExistingConnection`` for success. + Required.""" + storage_connector_error_message: Optional[str] = rest_field( + name="storageConnectorErrorMessage", visibility=["read", "create", "update", "delete", "query"] + ) + """A string representing the error received from the backing data store. Format will vary + depending on the data store type and will be capped at 1 MB in size. The error message will be + empty if the connection was successful.""" + storage_connector_request_id: str = rest_field( + name="storageConnectorRequestId", visibility=["read", "create", "update", "delete", "query"] + ) + """The request Id associated with the request sent to the backing data store for validation. + Required.""" + + @overload + def __init__( + self, + *, + storage_connector_method_name: str, + storage_connector_request_id: str, + storage_connector_error_message: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestExistingConnectionRequest(_Model): + """Test existing connection request properties. + + :ivar unique_id: The uniqueId of the storage connector as returned by the server. Required. + :vartype unique_id: str + """ + + unique_id: str = rest_field(name="uniqueId", visibility=["read", "create", "update", "delete", "query"]) + """The uniqueId of the storage connector as returned by the server. Required.""" + + @overload + def __init__( + self, + *, + unique_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TriggerParameters(_Model): + """The trigger parameters update for the storage task assignment execution. + + :ivar start_from: When to start task execution. This is a required field when + ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when + ExecutionTrigger.properties.type is 'RunOnce'. + :vartype start_from: ~datetime.datetime + :ivar interval: Run interval of task execution. This is a required field when + ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when + ExecutionTrigger.properties.type is 'RunOnce'. + :vartype interval: int + :ivar interval_unit: Run interval unit of task execution. This is a required field when + ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when + ExecutionTrigger.properties.type is 'RunOnce'. "Days" + :vartype interval_unit: str or ~azure.mgmt.storage.models.IntervalUnit + :ivar end_by: When to end task execution. This is a required field when + ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when + ExecutionTrigger.properties.type is 'RunOnce'. + :vartype end_by: ~datetime.datetime + :ivar start_on: When to start task execution. This is a required field when + ExecutionTrigger.properties.type is 'RunOnce'; this property should not be present when + ExecutionTrigger.properties.type is 'OnSchedule'. + :vartype start_on: ~datetime.datetime + """ + + start_from: Optional[datetime.datetime] = rest_field( + name="startFrom", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """When to start task execution. This is a required field when ExecutionTrigger.properties.type is + 'OnSchedule'; this property should not be present when ExecutionTrigger.properties.type is + 'RunOnce'.""" + interval: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Run interval of task execution. This is a required field when ExecutionTrigger.properties.type + is 'OnSchedule'; this property should not be present when ExecutionTrigger.properties.type is + 'RunOnce'.""" + interval_unit: Optional[Union[str, "_models.IntervalUnit"]] = rest_field( + name="intervalUnit", visibility=["read", "create", "update", "delete", "query"] + ) + """Run interval unit of task execution. This is a required field when + ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when + ExecutionTrigger.properties.type is 'RunOnce'. \"Days\"""" + end_by: Optional[datetime.datetime] = rest_field( + name="endBy", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """When to end task execution. This is a required field when ExecutionTrigger.properties.type is + 'OnSchedule'; this property should not be present when ExecutionTrigger.properties.type is + 'RunOnce'.""" + start_on: Optional[datetime.datetime] = rest_field( + name="startOn", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """When to start task execution. This is a required field when ExecutionTrigger.properties.type is + 'RunOnce'; this property should not be present when ExecutionTrigger.properties.type is + 'OnSchedule'.""" + + @overload + def __init__( + self, + *, + start_from: Optional[datetime.datetime] = None, + interval: Optional[int] = None, + interval_unit: Optional[Union[str, "_models.IntervalUnit"]] = None, + end_by: Optional[datetime.datetime] = None, + start_on: Optional[datetime.datetime] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TriggerParametersUpdate(_Model): + """The trigger parameters update for the storage task assignment execution. + + :ivar start_from: When to start task execution. This is a mutable field when + ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when + ExecutionTrigger.properties.type is 'RunOnce'. + :vartype start_from: ~datetime.datetime + :ivar interval: Run interval of task execution. This is a mutable field when + ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when + ExecutionTrigger.properties.type is 'RunOnce'. + :vartype interval: int + :ivar interval_unit: Run interval unit of task execution. This is a mutable field when + ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when + ExecutionTrigger.properties.type is 'RunOnce'. "Days" + :vartype interval_unit: str or ~azure.mgmt.storage.models.IntervalUnit + :ivar end_by: When to end task execution. This is a mutable field when + ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when + ExecutionTrigger.properties.type is 'RunOnce'. + :vartype end_by: ~datetime.datetime + :ivar start_on: When to start task execution. This is a mutable field when + ExecutionTrigger.properties.type is 'RunOnce'; this property should not be present when + ExecutionTrigger.properties.type is 'OnSchedule'. + :vartype start_on: ~datetime.datetime + """ + + start_from: Optional[datetime.datetime] = rest_field( + name="startFrom", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """When to start task execution. This is a mutable field when ExecutionTrigger.properties.type is + 'OnSchedule'; this property should not be present when ExecutionTrigger.properties.type is + 'RunOnce'.""" + interval: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Run interval of task execution. This is a mutable field when ExecutionTrigger.properties.type + is 'OnSchedule'; this property should not be present when ExecutionTrigger.properties.type is + 'RunOnce'.""" + interval_unit: Optional[Union[str, "_models.IntervalUnit"]] = rest_field( + name="intervalUnit", visibility=["read", "create", "update", "delete", "query"] + ) + """Run interval unit of task execution. This is a mutable field when + ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when + ExecutionTrigger.properties.type is 'RunOnce'. \"Days\"""" + end_by: Optional[datetime.datetime] = rest_field( + name="endBy", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """When to end task execution. This is a mutable field when ExecutionTrigger.properties.type is + 'OnSchedule'; this property should not be present when ExecutionTrigger.properties.type is + 'RunOnce'.""" + start_on: Optional[datetime.datetime] = rest_field( + name="startOn", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """When to start task execution. This is a mutable field when ExecutionTrigger.properties.type is + 'RunOnce'; this property should not be present when ExecutionTrigger.properties.type is + 'OnSchedule'.""" + + @overload + def __init__( + self, + *, + start_from: Optional[datetime.datetime] = None, + interval: Optional[int] = None, + interval_unit: Optional[Union[str, "_models.IntervalUnit"]] = None, + end_by: Optional[datetime.datetime] = None, + start_on: Optional[datetime.datetime] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class UpdateHistoryProperty(_Model): + """An update history of the ImmutabilityPolicy of a blob container. + + :ivar update_property: The ImmutabilityPolicy update type of a blob container, possible values + include: put, lock and extend. Known values are: "put", "lock", and "extend". + :vartype update_property: str or ~azure.mgmt.storage.models.ImmutabilityPolicyUpdateType + :ivar immutability_period_since_creation_in_days: The immutability period for the blobs in the + container since the policy creation, in days. + :vartype immutability_period_since_creation_in_days: int + :ivar timestamp: Returns the date and time the ImmutabilityPolicy was updated. + :vartype timestamp: ~datetime.datetime + :ivar object_identifier: Returns the Object ID of the user who updated the ImmutabilityPolicy. + :vartype object_identifier: str + :ivar tenant_id: Returns the Tenant ID that issued the token for the user who updated the + ImmutabilityPolicy. + :vartype tenant_id: str + :ivar upn: Returns the User Principal Name of the user who updated the ImmutabilityPolicy. + :vartype upn: str + :ivar allow_protected_append_writes: This property can only be changed for unlocked time-based + retention policies. When enabled, new blocks can be written to an append blob while maintaining + immutability protection and compliance. Only new blocks can be added and any existing blocks + cannot be modified or deleted. This property cannot be changed with ExtendImmutabilityPolicy + API. + :vartype allow_protected_append_writes: bool + :ivar allow_protected_append_writes_all: This property can only be changed for unlocked + time-based retention policies. When enabled, new blocks can be written to both 'Append and Bock + Blobs' while maintaining immutability protection and compliance. Only new blocks can be added + and any existing blocks cannot be modified or deleted. This property cannot be changed with + ExtendImmutabilityPolicy API. The 'allowProtectedAppendWrites' and + 'allowProtectedAppendWritesAll' properties are mutually exclusive. + :vartype allow_protected_append_writes_all: bool + """ + + update_property: Optional[Union[str, "_models.ImmutabilityPolicyUpdateType"]] = rest_field( + name="update", visibility=["read"], original_tsp_name="update" + ) + """The ImmutabilityPolicy update type of a blob container, possible values include: put, lock and + extend. Known values are: \"put\", \"lock\", and \"extend\".""" + immutability_period_since_creation_in_days: Optional[int] = rest_field( + name="immutabilityPeriodSinceCreationInDays", visibility=["read"] + ) + """The immutability period for the blobs in the container since the policy creation, in days.""" + timestamp: Optional[datetime.datetime] = rest_field(visibility=["read"], format="rfc3339") + """Returns the date and time the ImmutabilityPolicy was updated.""" + object_identifier: Optional[str] = rest_field(name="objectIdentifier", visibility=["read"]) + """Returns the Object ID of the user who updated the ImmutabilityPolicy.""" + tenant_id: Optional[str] = rest_field(name="tenantId", visibility=["read"]) + """Returns the Tenant ID that issued the token for the user who updated the ImmutabilityPolicy.""" + upn: Optional[str] = rest_field(visibility=["read"]) + """Returns the User Principal Name of the user who updated the ImmutabilityPolicy.""" + allow_protected_append_writes: Optional[bool] = rest_field( + name="allowProtectedAppendWrites", visibility=["read", "create", "update", "delete", "query"] + ) + """This property can only be changed for unlocked time-based retention policies. When enabled, new + blocks can be written to an append blob while maintaining immutability protection and + compliance. Only new blocks can be added and any existing blocks cannot be modified or deleted. + This property cannot be changed with ExtendImmutabilityPolicy API.""" + allow_protected_append_writes_all: Optional[bool] = rest_field( + name="allowProtectedAppendWritesAll", visibility=["read", "create", "update", "delete", "query"] + ) + """This property can only be changed for unlocked time-based retention policies. When enabled, new + blocks can be written to both 'Append and Bock Blobs' while maintaining immutability protection + and compliance. Only new blocks can be added and any existing blocks cannot be modified or + deleted. This property cannot be changed with ExtendImmutabilityPolicy API. The + 'allowProtectedAppendWrites' and 'allowProtectedAppendWritesAll' properties are mutually + exclusive.""" + + @overload + def __init__( + self, + *, + allow_protected_append_writes: Optional[bool] = None, + allow_protected_append_writes_all: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Usage(_Model): + """Describes Storage Resource Usage. + + :ivar unit: Gets the unit of measurement. Known values are: "Count", "Bytes", "Seconds", + "Percent", "CountsPerSecond", and "BytesPerSecond". + :vartype unit: str or ~azure.mgmt.storage.models.UsageUnit + :ivar current_value: Gets the current count of the allocated resources in the subscription. + :vartype current_value: int + :ivar limit: Gets the maximum count of the resources that can be allocated in the subscription. + :vartype limit: int + :ivar name: Gets the name of the type of usage. + :vartype name: ~azure.mgmt.storage.models.UsageName + """ + + unit: Optional[Union[str, "_models.UsageUnit"]] = rest_field(visibility=["read"]) + """Gets the unit of measurement. Known values are: \"Count\", \"Bytes\", \"Seconds\", \"Percent\", + \"CountsPerSecond\", and \"BytesPerSecond\".""" + current_value: Optional[int] = rest_field(name="currentValue", visibility=["read"]) + """Gets the current count of the allocated resources in the subscription.""" + limit: Optional[int] = rest_field(visibility=["read"]) + """Gets the maximum count of the resources that can be allocated in the subscription.""" + name: Optional["_models.UsageName"] = rest_field(visibility=["read"]) + """Gets the name of the type of usage.""" + + +class UsageName(_Model): + """The usage names that can be used; currently limited to StorageAccount. + + :ivar value: Gets a string describing the resource name. + :vartype value: str + :ivar localized_value: Gets a localized string describing the resource name. + :vartype localized_value: str + """ + + value: Optional[str] = rest_field(visibility=["read"]) + """Gets a string describing the resource name.""" + localized_value: Optional[str] = rest_field(name="localizedValue", visibility=["read"]) + """Gets a localized string describing the resource name.""" + + +class UserAssignedIdentity(_Model): + """UserAssignedIdentity for the resource. + + :ivar principal_id: The principal ID of the identity. + :vartype principal_id: str + :ivar client_id: The client ID of the identity. + :vartype client_id: str + """ + + principal_id: Optional[str] = rest_field(name="principalId", visibility=["read"]) + """The principal ID of the identity.""" + client_id: Optional[str] = rest_field(name="clientId", visibility=["read"]) + """The client ID of the identity.""" + + +class VirtualNetworkRule(_Model): + """Virtual Network rule. + + :ivar virtual_network_resource_id: Resource ID of a subnet, for example: + /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. + Required. + :vartype virtual_network_resource_id: str + :ivar action: The action of virtual network rule. Default value is "Allow". + :vartype action: str + :ivar state: Gets the state of virtual network rule. Known values are: "Provisioning", + "Deprovisioning", "Succeeded", "Failed", and "NetworkSourceDeleted". + :vartype state: str or ~azure.mgmt.storage.models.State + """ + + virtual_network_resource_id: str = rest_field(name="id", visibility=["read", "create", "update", "delete", "query"]) + """Resource ID of a subnet, for example: + /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. + Required.""" + action: Optional[Literal["Allow"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The action of virtual network rule. Default value is \"Allow\".""" + state: Optional[Union[str, "_models.State"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Gets the state of virtual network rule. Known values are: \"Provisioning\", \"Deprovisioning\", + \"Succeeded\", \"Failed\", and \"NetworkSourceDeleted\".""" + + @overload + def __init__( + self, + *, + virtual_network_resource_id: str, + action: Optional[Literal["Allow"]] = None, + state: Optional[Union[str, "_models.State"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_models_py3.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_models_py3.py deleted file mode 100644 index 8f9b8e17cc4f..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_models_py3.py +++ /dev/null @@ -1,10726 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from collections.abc import MutableMapping -import datetime -from typing import Any, Literal, Optional, TYPE_CHECKING, Union - -from .._utils import serialization as _serialization - -if TYPE_CHECKING: - from .. import models as _models -JSON = MutableMapping[str, Any] - - -class AccessPolicy(_serialization.Model): - """AccessPolicy. - - :ivar start_time: Start time of the access policy. - :vartype start_time: ~datetime.datetime - :ivar expiry_time: Expiry time of the access policy. - :vartype expiry_time: ~datetime.datetime - :ivar permission: List of abbreviated permissions. - :vartype permission: str - """ - - _attribute_map = { - "start_time": {"key": "startTime", "type": "iso-8601"}, - "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, - "permission": {"key": "permission", "type": "str"}, - } - - def __init__( - self, - *, - start_time: Optional[datetime.datetime] = None, - expiry_time: Optional[datetime.datetime] = None, - permission: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword start_time: Start time of the access policy. - :paramtype start_time: ~datetime.datetime - :keyword expiry_time: Expiry time of the access policy. - :paramtype expiry_time: ~datetime.datetime - :keyword permission: List of abbreviated permissions. - :paramtype permission: str - """ - super().__init__(**kwargs) - self.start_time = start_time - self.expiry_time = expiry_time - self.permission = permission - - -class AccountImmutabilityPolicyProperties(_serialization.Model): - """This defines account-level immutability policy properties. - - :ivar immutability_period_since_creation_in_days: The immutability period for the blobs in the - container since the policy creation, in days. - :vartype immutability_period_since_creation_in_days: int - :ivar state: The ImmutabilityPolicy state defines the mode of the policy. Disabled state - disables the policy, Unlocked state allows increase and decrease of immutability retention time - and also allows toggling allowProtectedAppendWrites property, Locked state only allows the - increase of the immutability retention time. A policy can only be created in a Disabled or - Unlocked state and can be toggled between the two states. Only a policy in an Unlocked state - can transition to a Locked state which cannot be reverted. Known values are: "Unlocked", - "Locked", and "Disabled". - :vartype state: str or ~azure.mgmt.storage.models.AccountImmutabilityPolicyState - :ivar allow_protected_append_writes: This property can only be changed for disabled and - unlocked time-based retention policies. When enabled, new blocks can be written to an append - blob while maintaining immutability protection and compliance. Only new blocks can be added and - any existing blocks cannot be modified or deleted. - :vartype allow_protected_append_writes: bool - """ - - _validation = { - "immutability_period_since_creation_in_days": {"maximum": 146000, "minimum": 1}, - } - - _attribute_map = { - "immutability_period_since_creation_in_days": {"key": "immutabilityPeriodSinceCreationInDays", "type": "int"}, - "state": {"key": "state", "type": "str"}, - "allow_protected_append_writes": {"key": "allowProtectedAppendWrites", "type": "bool"}, - } - - def __init__( - self, - *, - immutability_period_since_creation_in_days: Optional[int] = None, - state: Optional[Union[str, "_models.AccountImmutabilityPolicyState"]] = None, - allow_protected_append_writes: Optional[bool] = None, - **kwargs: Any - ) -> None: - """ - :keyword immutability_period_since_creation_in_days: The immutability period for the blobs in - the container since the policy creation, in days. - :paramtype immutability_period_since_creation_in_days: int - :keyword state: The ImmutabilityPolicy state defines the mode of the policy. Disabled state - disables the policy, Unlocked state allows increase and decrease of immutability retention time - and also allows toggling allowProtectedAppendWrites property, Locked state only allows the - increase of the immutability retention time. A policy can only be created in a Disabled or - Unlocked state and can be toggled between the two states. Only a policy in an Unlocked state - can transition to a Locked state which cannot be reverted. Known values are: "Unlocked", - "Locked", and "Disabled". - :paramtype state: str or ~azure.mgmt.storage.models.AccountImmutabilityPolicyState - :keyword allow_protected_append_writes: This property can only be changed for disabled and - unlocked time-based retention policies. When enabled, new blocks can be written to an append - blob while maintaining immutability protection and compliance. Only new blocks can be added and - any existing blocks cannot be modified or deleted. - :paramtype allow_protected_append_writes: bool - """ - super().__init__(**kwargs) - self.immutability_period_since_creation_in_days = immutability_period_since_creation_in_days - self.state = state - self.allow_protected_append_writes = allow_protected_append_writes - - -class AccountLimits(_serialization.Model): - """Maximum provisioned storage, IOPS, bandwidth and number of file shares limits for the storage - account. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar max_file_shares: The maximum number of file shares limit for the storage account. - :vartype max_file_shares: int - :ivar max_provisioned_storage_gi_b: The maximum provisioned storage quota limit in gibibytes - for the storage account. - :vartype max_provisioned_storage_gi_b: int - :ivar max_provisioned_iops: The maximum provisioned IOPS limit for the storage account. - :vartype max_provisioned_iops: int - :ivar max_provisioned_bandwidth_mi_b_per_sec: The maximum provisioned bandwidth limit in - mebibytes per second for the storage account. - :vartype max_provisioned_bandwidth_mi_b_per_sec: int - """ - - _validation = { - "max_file_shares": {"readonly": True}, - "max_provisioned_storage_gi_b": {"readonly": True}, - "max_provisioned_iops": {"readonly": True}, - "max_provisioned_bandwidth_mi_b_per_sec": {"readonly": True}, - } - - _attribute_map = { - "max_file_shares": {"key": "maxFileShares", "type": "int"}, - "max_provisioned_storage_gi_b": {"key": "maxProvisionedStorageGiB", "type": "int"}, - "max_provisioned_iops": {"key": "maxProvisionedIOPS", "type": "int"}, - "max_provisioned_bandwidth_mi_b_per_sec": {"key": "maxProvisionedBandwidthMiBPerSec", "type": "int"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.max_file_shares: Optional[int] = None - self.max_provisioned_storage_gi_b: Optional[int] = None - self.max_provisioned_iops: Optional[int] = None - self.max_provisioned_bandwidth_mi_b_per_sec: Optional[int] = None - - -class AccountSasParameters(_serialization.Model): - """The parameters to list SAS credentials of a storage account. - - All required parameters must be populated in order to send to server. - - :ivar services: The signed services accessible with the account SAS. Possible values include: - Blob (b), Queue (q), Table (t), File (f). Required. Known values are: "b", "q", "t", and "f". - :vartype services: str or ~azure.mgmt.storage.models.Services - :ivar resource_types: The signed resource types that are accessible with the account SAS. - Service (s): Access to service-level APIs; Container (c): Access to container-level APIs; - Object (o): Access to object-level APIs for blobs, queue messages, table entities, and files. - Required. Known values are: "s", "c", and "o". - :vartype resource_types: str or ~azure.mgmt.storage.models.SignedResourceTypes - :ivar permissions: The signed permissions for the account SAS. Possible values include: Read - (r), Write (w), Delete (d), List (l), Add (a), Create (c), Update (u) and Process (p). - Required. Known values are: "r", "d", "w", "l", "a", "c", "u", and "p". - :vartype permissions: str or ~azure.mgmt.storage.models.Permissions - :ivar ip_address_or_range: An IP address or a range of IP addresses from which to accept - requests. - :vartype ip_address_or_range: str - :ivar protocols: The protocol permitted for a request made with the account SAS. Known values - are: "https,http" and "https". - :vartype protocols: str or ~azure.mgmt.storage.models.HttpProtocol - :ivar shared_access_start_time: The time at which the SAS becomes valid. - :vartype shared_access_start_time: ~datetime.datetime - :ivar shared_access_expiry_time: The time at which the shared access signature becomes invalid. - Required. - :vartype shared_access_expiry_time: ~datetime.datetime - :ivar key_to_sign: The key to sign the account SAS token with. - :vartype key_to_sign: str - """ - - _validation = { - "services": {"required": True}, - "resource_types": {"required": True}, - "permissions": {"required": True}, - "shared_access_expiry_time": {"required": True}, - } - - _attribute_map = { - "services": {"key": "signedServices", "type": "str"}, - "resource_types": {"key": "signedResourceTypes", "type": "str"}, - "permissions": {"key": "signedPermission", "type": "str"}, - "ip_address_or_range": {"key": "signedIp", "type": "str"}, - "protocols": {"key": "signedProtocol", "type": "str"}, - "shared_access_start_time": {"key": "signedStart", "type": "iso-8601"}, - "shared_access_expiry_time": {"key": "signedExpiry", "type": "iso-8601"}, - "key_to_sign": {"key": "keyToSign", "type": "str"}, - } - - def __init__( - self, - *, - services: Union[str, "_models.Services"], - resource_types: Union[str, "_models.SignedResourceTypes"], - permissions: Union[str, "_models.Permissions"], - shared_access_expiry_time: datetime.datetime, - ip_address_or_range: Optional[str] = None, - protocols: Optional[Union[str, "_models.HttpProtocol"]] = None, - shared_access_start_time: Optional[datetime.datetime] = None, - key_to_sign: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword services: The signed services accessible with the account SAS. Possible values - include: Blob (b), Queue (q), Table (t), File (f). Required. Known values are: "b", "q", "t", - and "f". - :paramtype services: str or ~azure.mgmt.storage.models.Services - :keyword resource_types: The signed resource types that are accessible with the account SAS. - Service (s): Access to service-level APIs; Container (c): Access to container-level APIs; - Object (o): Access to object-level APIs for blobs, queue messages, table entities, and files. - Required. Known values are: "s", "c", and "o". - :paramtype resource_types: str or ~azure.mgmt.storage.models.SignedResourceTypes - :keyword permissions: The signed permissions for the account SAS. Possible values include: Read - (r), Write (w), Delete (d), List (l), Add (a), Create (c), Update (u) and Process (p). - Required. Known values are: "r", "d", "w", "l", "a", "c", "u", and "p". - :paramtype permissions: str or ~azure.mgmt.storage.models.Permissions - :keyword ip_address_or_range: An IP address or a range of IP addresses from which to accept - requests. - :paramtype ip_address_or_range: str - :keyword protocols: The protocol permitted for a request made with the account SAS. Known - values are: "https,http" and "https". - :paramtype protocols: str or ~azure.mgmt.storage.models.HttpProtocol - :keyword shared_access_start_time: The time at which the SAS becomes valid. - :paramtype shared_access_start_time: ~datetime.datetime - :keyword shared_access_expiry_time: The time at which the shared access signature becomes - invalid. Required. - :paramtype shared_access_expiry_time: ~datetime.datetime - :keyword key_to_sign: The key to sign the account SAS token with. - :paramtype key_to_sign: str - """ - super().__init__(**kwargs) - self.services = services - self.resource_types = resource_types - self.permissions = permissions - self.ip_address_or_range = ip_address_or_range - self.protocols = protocols - self.shared_access_start_time = shared_access_start_time - self.shared_access_expiry_time = shared_access_expiry_time - self.key_to_sign = key_to_sign - - -class AccountUsage(_serialization.Model): - """Usage of provisioned storage, IOPS, bandwidth and number of file shares across all live shares - and soft-deleted shares in the account. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar live_shares: Usage of provisioned storage, IOPS, bandwidth and number of file shares - across all live shares or soft-deleted shares in the account. - :vartype live_shares: ~azure.mgmt.storage.models.AccountUsageElements - :ivar soft_deleted_shares: Usage of provisioned storage, IOPS, bandwidth and number of file - shares across all live shares or soft-deleted shares in the account. - :vartype soft_deleted_shares: ~azure.mgmt.storage.models.AccountUsageElements - """ - - _validation = { - "live_shares": {"readonly": True}, - "soft_deleted_shares": {"readonly": True}, - } - - _attribute_map = { - "live_shares": {"key": "liveShares", "type": "AccountUsageElements"}, - "soft_deleted_shares": {"key": "softDeletedShares", "type": "AccountUsageElements"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.live_shares: Optional["_models.AccountUsageElements"] = None - self.soft_deleted_shares: Optional["_models.AccountUsageElements"] = None - - -class AccountUsageElements(_serialization.Model): - """Usage of provisioned storage, IOPS, bandwidth and number of file shares across all live shares - or soft-deleted shares in the account. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar file_share_count: The total number of file shares. - :vartype file_share_count: int - :ivar provisioned_storage_gi_b: The total provisioned storage quota in gibibytes. - :vartype provisioned_storage_gi_b: int - :ivar provisioned_iops: The total provisioned IOPS. - :vartype provisioned_iops: int - :ivar provisioned_bandwidth_mi_b_per_sec: The total provisioned bandwidth in mebibytes per - second. - :vartype provisioned_bandwidth_mi_b_per_sec: int - """ - - _validation = { - "file_share_count": {"readonly": True}, - "provisioned_storage_gi_b": {"readonly": True}, - "provisioned_iops": {"readonly": True}, - "provisioned_bandwidth_mi_b_per_sec": {"readonly": True}, - } - - _attribute_map = { - "file_share_count": {"key": "fileShareCount", "type": "int"}, - "provisioned_storage_gi_b": {"key": "provisionedStorageGiB", "type": "int"}, - "provisioned_iops": {"key": "provisionedIOPS", "type": "int"}, - "provisioned_bandwidth_mi_b_per_sec": {"key": "provisionedBandwidthMiBPerSec", "type": "int"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.file_share_count: Optional[int] = None - self.provisioned_storage_gi_b: Optional[int] = None - self.provisioned_iops: Optional[int] = None - self.provisioned_bandwidth_mi_b_per_sec: Optional[int] = None - - -class ActiveDirectoryProperties(_serialization.Model): - """Settings properties for Active Directory (AD). - - :ivar domain_name: Specifies the primary domain that the AD DNS server is authoritative for. - This property is required if directoryServiceOptions is set to AD (AD DS authentication). If - directoryServiceOptions is set to AADDS (Entra DS authentication), providing this property is - optional, as it will be inferred automatically if omitted. If directoryServiceOptions is set to - AADKERB (Entra authentication), this property is optional; it is needed to support - configuration of directory- and file-level permissions via Windows File Explorer, but is not - required for authentication. - :vartype domain_name: str - :ivar net_bios_domain_name: Specifies the NetBIOS domain name. If directoryServiceOptions is - set to AD (AD DS authentication), this property is required. Otherwise, it can be omitted. - :vartype net_bios_domain_name: str - :ivar forest_name: Specifies the Active Directory forest to get. If directoryServiceOptions is - set to AD (AD DS authentication), this property is required. Otherwise, it can be omitted. - :vartype forest_name: str - :ivar domain_guid: Specifies the domain GUID. If directoryServiceOptions is set to AD (AD DS - authentication), this property is required. If directoryServiceOptions is set to AADDS (Entra - DS authentication), this property can be omitted. If directoryServiceOptions is set to AADKERB - (Entra authentication), this property is optional; it is needed to support configuration of - directory- and file-level permissions via Windows File Explorer, but is not required for - authentication. - :vartype domain_guid: str - :ivar domain_sid: Specifies the security identifier (SID) of the AD domain. If - directoryServiceOptions is set to AD (AD DS authentication), this property is required. - Otherwise, it can be omitted. - :vartype domain_sid: str - :ivar azure_storage_sid: Specifies the security identifier (SID) for Azure Storage. If - directoryServiceOptions is set to AD (AD DS authentication), this property is required. - Otherwise, it can be omitted. - :vartype azure_storage_sid: str - :ivar sam_account_name: Specifies the Active Directory SAMAccountName for Azure Storage. If - directoryServiceOptions is set to AD (AD DS authentication), this property is optional. If - provided, accountType should also be provided. For directoryServiceOptions AADDS (Entra DS - authentication) or AADKERB (Entra authentication), this property can be omitted. - :vartype sam_account_name: str - :ivar account_type: Specifies the Active Directory account type for Azure Storage. If - directoryServiceOptions is set to AD (AD DS authentication), this property is optional. If - provided, samAccountName should also be provided. For directoryServiceOptions AADDS (Entra DS - authentication) or AADKERB (Entra authentication), this property can be omitted. Known values - are: "User" and "Computer". - :vartype account_type: str or ~azure.mgmt.storage.models.AccountType - """ - - _attribute_map = { - "domain_name": {"key": "domainName", "type": "str"}, - "net_bios_domain_name": {"key": "netBiosDomainName", "type": "str"}, - "forest_name": {"key": "forestName", "type": "str"}, - "domain_guid": {"key": "domainGuid", "type": "str"}, - "domain_sid": {"key": "domainSid", "type": "str"}, - "azure_storage_sid": {"key": "azureStorageSid", "type": "str"}, - "sam_account_name": {"key": "samAccountName", "type": "str"}, - "account_type": {"key": "accountType", "type": "str"}, - } - - def __init__( - self, - *, - domain_name: Optional[str] = None, - net_bios_domain_name: Optional[str] = None, - forest_name: Optional[str] = None, - domain_guid: Optional[str] = None, - domain_sid: Optional[str] = None, - azure_storage_sid: Optional[str] = None, - sam_account_name: Optional[str] = None, - account_type: Optional[Union[str, "_models.AccountType"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword domain_name: Specifies the primary domain that the AD DNS server is authoritative for. - This property is required if directoryServiceOptions is set to AD (AD DS authentication). If - directoryServiceOptions is set to AADDS (Entra DS authentication), providing this property is - optional, as it will be inferred automatically if omitted. If directoryServiceOptions is set to - AADKERB (Entra authentication), this property is optional; it is needed to support - configuration of directory- and file-level permissions via Windows File Explorer, but is not - required for authentication. - :paramtype domain_name: str - :keyword net_bios_domain_name: Specifies the NetBIOS domain name. If directoryServiceOptions is - set to AD (AD DS authentication), this property is required. Otherwise, it can be omitted. - :paramtype net_bios_domain_name: str - :keyword forest_name: Specifies the Active Directory forest to get. If directoryServiceOptions - is set to AD (AD DS authentication), this property is required. Otherwise, it can be omitted. - :paramtype forest_name: str - :keyword domain_guid: Specifies the domain GUID. If directoryServiceOptions is set to AD (AD DS - authentication), this property is required. If directoryServiceOptions is set to AADDS (Entra - DS authentication), this property can be omitted. If directoryServiceOptions is set to AADKERB - (Entra authentication), this property is optional; it is needed to support configuration of - directory- and file-level permissions via Windows File Explorer, but is not required for - authentication. - :paramtype domain_guid: str - :keyword domain_sid: Specifies the security identifier (SID) of the AD domain. If - directoryServiceOptions is set to AD (AD DS authentication), this property is required. - Otherwise, it can be omitted. - :paramtype domain_sid: str - :keyword azure_storage_sid: Specifies the security identifier (SID) for Azure Storage. If - directoryServiceOptions is set to AD (AD DS authentication), this property is required. - Otherwise, it can be omitted. - :paramtype azure_storage_sid: str - :keyword sam_account_name: Specifies the Active Directory SAMAccountName for Azure Storage. If - directoryServiceOptions is set to AD (AD DS authentication), this property is optional. If - provided, accountType should also be provided. For directoryServiceOptions AADDS (Entra DS - authentication) or AADKERB (Entra authentication), this property can be omitted. - :paramtype sam_account_name: str - :keyword account_type: Specifies the Active Directory account type for Azure Storage. If - directoryServiceOptions is set to AD (AD DS authentication), this property is optional. If - provided, samAccountName should also be provided. For directoryServiceOptions AADDS (Entra DS - authentication) or AADKERB (Entra authentication), this property can be omitted. Known values - are: "User" and "Computer". - :paramtype account_type: str or ~azure.mgmt.storage.models.AccountType - """ - super().__init__(**kwargs) - self.domain_name = domain_name - self.net_bios_domain_name = net_bios_domain_name - self.forest_name = forest_name - self.domain_guid = domain_guid - self.domain_sid = domain_sid - self.azure_storage_sid = azure_storage_sid - self.sam_account_name = sam_account_name - self.account_type = account_type - - -class Resource(_serialization.Model): - """Common fields that are returned in the response for all Azure Resource Manager resources. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.id: Optional[str] = None - self.name: Optional[str] = None - self.type: Optional[str] = None - - -class AzureEntityResource(Resource): - """The resource model definition for an Azure Resource Manager resource with an etag. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar etag: Resource Etag. - :vartype etag: str - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "etag": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "etag": {"key": "etag", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.etag: Optional[str] = None - - -class AzureFilesIdentityBasedAuthentication(_serialization.Model): - """Settings for Azure Files identity based authentication. - - All required parameters must be populated in order to send to server. - - :ivar directory_service_options: Indicates the directory service used. Note that this enum may - be extended in the future. Required. Known values are: "None", "AADDS", "AD", and "AADKERB". - :vartype directory_service_options: str or ~azure.mgmt.storage.models.DirectoryServiceOptions - :ivar active_directory_properties: Additional information about the directory service. Required - if directoryServiceOptions is AD (AD DS authentication). Optional for directoryServiceOptions - AADDS (Entra DS authentication) and AADKERB (Entra authentication). - :vartype active_directory_properties: ~azure.mgmt.storage.models.ActiveDirectoryProperties - :ivar default_share_permission: Default share permission for users using Kerberos - authentication if RBAC role is not assigned. Known values are: "None", - "StorageFileDataSmbShareReader", "StorageFileDataSmbShareContributor", and - "StorageFileDataSmbShareElevatedContributor". - :vartype default_share_permission: str or ~azure.mgmt.storage.models.DefaultSharePermission - :ivar smb_o_auth_settings: Required for Managed Identities access using OAuth over SMB. - :vartype smb_o_auth_settings: ~azure.mgmt.storage.models.SmbOAuthSettings - """ - - _validation = { - "directory_service_options": {"required": True}, - } - - _attribute_map = { - "directory_service_options": {"key": "directoryServiceOptions", "type": "str"}, - "active_directory_properties": {"key": "activeDirectoryProperties", "type": "ActiveDirectoryProperties"}, - "default_share_permission": {"key": "defaultSharePermission", "type": "str"}, - "smb_o_auth_settings": {"key": "smbOAuthSettings", "type": "SmbOAuthSettings"}, - } - - def __init__( - self, - *, - directory_service_options: Union[str, "_models.DirectoryServiceOptions"], - active_directory_properties: Optional["_models.ActiveDirectoryProperties"] = None, - default_share_permission: Optional[Union[str, "_models.DefaultSharePermission"]] = None, - smb_o_auth_settings: Optional["_models.SmbOAuthSettings"] = None, - **kwargs: Any - ) -> None: - """ - :keyword directory_service_options: Indicates the directory service used. Note that this enum - may be extended in the future. Required. Known values are: "None", "AADDS", "AD", and - "AADKERB". - :paramtype directory_service_options: str or ~azure.mgmt.storage.models.DirectoryServiceOptions - :keyword active_directory_properties: Additional information about the directory service. - Required if directoryServiceOptions is AD (AD DS authentication). Optional for - directoryServiceOptions AADDS (Entra DS authentication) and AADKERB (Entra authentication). - :paramtype active_directory_properties: ~azure.mgmt.storage.models.ActiveDirectoryProperties - :keyword default_share_permission: Default share permission for users using Kerberos - authentication if RBAC role is not assigned. Known values are: "None", - "StorageFileDataSmbShareReader", "StorageFileDataSmbShareContributor", and - "StorageFileDataSmbShareElevatedContributor". - :paramtype default_share_permission: str or ~azure.mgmt.storage.models.DefaultSharePermission - :keyword smb_o_auth_settings: Required for Managed Identities access using OAuth over SMB. - :paramtype smb_o_auth_settings: ~azure.mgmt.storage.models.SmbOAuthSettings - """ - super().__init__(**kwargs) - self.directory_service_options = directory_service_options - self.active_directory_properties = active_directory_properties - self.default_share_permission = default_share_permission - self.smb_o_auth_settings = smb_o_auth_settings - - -class BlobContainer(AzureEntityResource): - """Properties of the blob container, including Id, resource name, resource type, Etag. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar etag: Resource Etag. - :vartype etag: str - :ivar version: The version of the deleted blob container. - :vartype version: str - :ivar deleted: Indicates whether the blob container was deleted. - :vartype deleted: bool - :ivar deleted_time: Blob container deletion time. - :vartype deleted_time: ~datetime.datetime - :ivar remaining_retention_days: Remaining retention days for soft deleted blob container. - :vartype remaining_retention_days: int - :ivar default_encryption_scope: Default the container to use specified encryption scope for all - writes. - :vartype default_encryption_scope: str - :ivar deny_encryption_scope_override: Block override of encryption scope from the container - default. - :vartype deny_encryption_scope_override: bool - :ivar public_access: Specifies whether data in the container may be accessed publicly and the - level of access. Known values are: "Container", "Blob", and "None". - :vartype public_access: str or ~azure.mgmt.storage.models.PublicAccess - :ivar last_modified_time: Returns the date and time the container was last modified. - :vartype last_modified_time: ~datetime.datetime - :ivar lease_status: The lease status of the container. Known values are: "Locked" and - "Unlocked". - :vartype lease_status: str or ~azure.mgmt.storage.models.LeaseStatus - :ivar lease_state: Lease state of the container. Known values are: "Available", "Leased", - "Expired", "Breaking", and "Broken". - :vartype lease_state: str or ~azure.mgmt.storage.models.LeaseState - :ivar lease_duration: Specifies whether the lease on a container is of infinite or fixed - duration, only when the container is leased. Known values are: "Infinite" and "Fixed". - :vartype lease_duration: str or ~azure.mgmt.storage.models.LeaseDuration - :ivar metadata: A name-value pair to associate with the container as metadata. - :vartype metadata: dict[str, str] - :ivar immutability_policy: The ImmutabilityPolicy property of the container. - :vartype immutability_policy: ~azure.mgmt.storage.models.ImmutabilityPolicyProperties - :ivar legal_hold: The LegalHold property of the container. - :vartype legal_hold: ~azure.mgmt.storage.models.LegalHoldProperties - :ivar has_legal_hold: The hasLegalHold public property is set to true by SRP if there are at - least one existing tag. The hasLegalHold public property is set to false by SRP if all existing - legal hold tags are cleared out. There can be a maximum of 1000 blob containers with - hasLegalHold=true for a given account. - :vartype has_legal_hold: bool - :ivar has_immutability_policy: The hasImmutabilityPolicy public property is set to true by SRP - if ImmutabilityPolicy has been created for this container. The hasImmutabilityPolicy public - property is set to false by SRP if ImmutabilityPolicy has not been created for this container. - :vartype has_immutability_policy: bool - :ivar immutable_storage_with_versioning: The object level immutability property of the - container. The property is immutable and can only be set to true at the container creation - time. Existing containers must undergo a migration process. - :vartype immutable_storage_with_versioning: - ~azure.mgmt.storage.models.ImmutableStorageWithVersioning - :ivar enable_nfs_v3_root_squash: Enable NFSv3 root squash on blob container. - :vartype enable_nfs_v3_root_squash: bool - :ivar enable_nfs_v3_all_squash: Enable NFSv3 all squash on blob container. - :vartype enable_nfs_v3_all_squash: bool - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "etag": {"readonly": True}, - "version": {"readonly": True}, - "deleted": {"readonly": True}, - "deleted_time": {"readonly": True}, - "remaining_retention_days": {"readonly": True}, - "last_modified_time": {"readonly": True}, - "lease_status": {"readonly": True}, - "lease_state": {"readonly": True}, - "lease_duration": {"readonly": True}, - "immutability_policy": {"readonly": True}, - "legal_hold": {"readonly": True}, - "has_legal_hold": {"readonly": True}, - "has_immutability_policy": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "etag": {"key": "etag", "type": "str"}, - "version": {"key": "properties.version", "type": "str"}, - "deleted": {"key": "properties.deleted", "type": "bool"}, - "deleted_time": {"key": "properties.deletedTime", "type": "iso-8601"}, - "remaining_retention_days": {"key": "properties.remainingRetentionDays", "type": "int"}, - "default_encryption_scope": {"key": "properties.defaultEncryptionScope", "type": "str"}, - "deny_encryption_scope_override": {"key": "properties.denyEncryptionScopeOverride", "type": "bool"}, - "public_access": {"key": "properties.publicAccess", "type": "str"}, - "last_modified_time": {"key": "properties.lastModifiedTime", "type": "iso-8601"}, - "lease_status": {"key": "properties.leaseStatus", "type": "str"}, - "lease_state": {"key": "properties.leaseState", "type": "str"}, - "lease_duration": {"key": "properties.leaseDuration", "type": "str"}, - "metadata": {"key": "properties.metadata", "type": "{str}"}, - "immutability_policy": {"key": "properties.immutabilityPolicy", "type": "ImmutabilityPolicyProperties"}, - "legal_hold": {"key": "properties.legalHold", "type": "LegalHoldProperties"}, - "has_legal_hold": {"key": "properties.hasLegalHold", "type": "bool"}, - "has_immutability_policy": {"key": "properties.hasImmutabilityPolicy", "type": "bool"}, - "immutable_storage_with_versioning": { - "key": "properties.immutableStorageWithVersioning", - "type": "ImmutableStorageWithVersioning", - }, - "enable_nfs_v3_root_squash": {"key": "properties.enableNfsV3RootSquash", "type": "bool"}, - "enable_nfs_v3_all_squash": {"key": "properties.enableNfsV3AllSquash", "type": "bool"}, - } - - def __init__( - self, - *, - default_encryption_scope: Optional[str] = None, - deny_encryption_scope_override: Optional[bool] = None, - public_access: Optional[Union[str, "_models.PublicAccess"]] = None, - metadata: Optional[dict[str, str]] = None, - immutable_storage_with_versioning: Optional["_models.ImmutableStorageWithVersioning"] = None, - enable_nfs_v3_root_squash: Optional[bool] = None, - enable_nfs_v3_all_squash: Optional[bool] = None, - **kwargs: Any - ) -> None: - """ - :keyword default_encryption_scope: Default the container to use specified encryption scope for - all writes. - :paramtype default_encryption_scope: str - :keyword deny_encryption_scope_override: Block override of encryption scope from the container - default. - :paramtype deny_encryption_scope_override: bool - :keyword public_access: Specifies whether data in the container may be accessed publicly and - the level of access. Known values are: "Container", "Blob", and "None". - :paramtype public_access: str or ~azure.mgmt.storage.models.PublicAccess - :keyword metadata: A name-value pair to associate with the container as metadata. - :paramtype metadata: dict[str, str] - :keyword immutable_storage_with_versioning: The object level immutability property of the - container. The property is immutable and can only be set to true at the container creation - time. Existing containers must undergo a migration process. - :paramtype immutable_storage_with_versioning: - ~azure.mgmt.storage.models.ImmutableStorageWithVersioning - :keyword enable_nfs_v3_root_squash: Enable NFSv3 root squash on blob container. - :paramtype enable_nfs_v3_root_squash: bool - :keyword enable_nfs_v3_all_squash: Enable NFSv3 all squash on blob container. - :paramtype enable_nfs_v3_all_squash: bool - """ - super().__init__(**kwargs) - self.version: Optional[str] = None - self.deleted: Optional[bool] = None - self.deleted_time: Optional[datetime.datetime] = None - self.remaining_retention_days: Optional[int] = None - self.default_encryption_scope = default_encryption_scope - self.deny_encryption_scope_override = deny_encryption_scope_override - self.public_access = public_access - self.last_modified_time: Optional[datetime.datetime] = None - self.lease_status: Optional[Union[str, "_models.LeaseStatus"]] = None - self.lease_state: Optional[Union[str, "_models.LeaseState"]] = None - self.lease_duration: Optional[Union[str, "_models.LeaseDuration"]] = None - self.metadata = metadata - self.immutability_policy: Optional["_models.ImmutabilityPolicyProperties"] = None - self.legal_hold: Optional["_models.LegalHoldProperties"] = None - self.has_legal_hold: Optional[bool] = None - self.has_immutability_policy: Optional[bool] = None - self.immutable_storage_with_versioning = immutable_storage_with_versioning - self.enable_nfs_v3_root_squash = enable_nfs_v3_root_squash - self.enable_nfs_v3_all_squash = enable_nfs_v3_all_squash - - -class BlobInventoryCreationTime(_serialization.Model): - """This property defines the creation time based filtering condition. Blob Inventory schema - parameter 'Creation-Time' is mandatory with this filter. - - :ivar last_n_days: When set the policy filters the objects that are created in the last N days. - Where N is an integer value between 1 to 36500. - :vartype last_n_days: int - """ - - _validation = { - "last_n_days": {"maximum": 36500, "minimum": 1}, - } - - _attribute_map = { - "last_n_days": {"key": "lastNDays", "type": "int"}, - } - - def __init__(self, *, last_n_days: Optional[int] = None, **kwargs: Any) -> None: - """ - :keyword last_n_days: When set the policy filters the objects that are created in the last N - days. Where N is an integer value between 1 to 36500. - :paramtype last_n_days: int - """ - super().__init__(**kwargs) - self.last_n_days = last_n_days - - -class BlobInventoryPolicy(Resource): - """The storage account blob inventory policy. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Metadata pertaining to creation and last modification of the resource. - :vartype system_data: ~azure.mgmt.storage.models.SystemData - :ivar last_modified_time: Returns the last modified date and time of the blob inventory policy. - :vartype last_modified_time: ~datetime.datetime - :ivar policy: The storage account blob inventory policy object. It is composed of policy rules. - :vartype policy: ~azure.mgmt.storage.models.BlobInventoryPolicySchema - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "system_data": {"readonly": True}, - "last_modified_time": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemData"}, - "last_modified_time": {"key": "properties.lastModifiedTime", "type": "iso-8601"}, - "policy": {"key": "properties.policy", "type": "BlobInventoryPolicySchema"}, - } - - def __init__(self, *, policy: Optional["_models.BlobInventoryPolicySchema"] = None, **kwargs: Any) -> None: - """ - :keyword policy: The storage account blob inventory policy object. It is composed of policy - rules. - :paramtype policy: ~azure.mgmt.storage.models.BlobInventoryPolicySchema - """ - super().__init__(**kwargs) - self.system_data: Optional["_models.SystemData"] = None - self.last_modified_time: Optional[datetime.datetime] = None - self.policy = policy - - -class BlobInventoryPolicyDefinition(_serialization.Model): - """An object that defines the blob inventory rule. - - All required parameters must be populated in order to send to server. - - :ivar filters: An object that defines the filter set. - :vartype filters: ~azure.mgmt.storage.models.BlobInventoryPolicyFilter - :ivar format: This is a required field, it specifies the format for the inventory files. - Required. Known values are: "Csv" and "Parquet". - :vartype format: str or ~azure.mgmt.storage.models.Format - :ivar schedule: This is a required field. This field is used to schedule an inventory - formation. Required. Known values are: "Daily" and "Weekly". - :vartype schedule: str or ~azure.mgmt.storage.models.Schedule - :ivar object_type: This is a required field. This field specifies the scope of the inventory - created either at the blob or container level. Required. Known values are: "Blob" and - "Container". - :vartype object_type: str or ~azure.mgmt.storage.models.ObjectType - :ivar schema_fields: This is a required field. This field specifies the fields and properties - of the object to be included in the inventory. The Schema field value 'Name' is always - required. The valid values for this field for the 'Blob' definition.objectType include 'Name, - Creation-Time, Last-Modified, Content-Length, Content-MD5, BlobType, AccessTier, - AccessTierChangeTime, AccessTierInferred, Tags, Expiry-Time, hdi_isfolder, Owner, Group, - Permissions, Acl, Snapshot, VersionId, IsCurrentVersion, Metadata, LastAccessTime, Tags, Etag, - ContentType, ContentEncoding, ContentLanguage, ContentCRC64, CacheControl, ContentDisposition, - LeaseStatus, LeaseState, LeaseDuration, ServerEncrypted, Deleted, DeletionId, DeletedTime, - RemainingRetentionDays, ImmutabilityPolicyUntilDate, ImmutabilityPolicyMode, LegalHold, CopyId, - CopyStatus, CopySource, CopyProgress, CopyCompletionTime, CopyStatusDescription, - CustomerProvidedKeySha256, RehydratePriority, ArchiveStatus, XmsBlobSequenceNumber, - EncryptionScope, IncrementalCopy, TagCount'. For Blob object type schema field value - 'DeletedTime' is applicable only for Hns enabled accounts. The valid values for 'Container' - definition.objectType include 'Name, Last-Modified, Metadata, LeaseStatus, LeaseState, - LeaseDuration, PublicAccess, HasImmutabilityPolicy, HasLegalHold, Etag, DefaultEncryptionScope, - DenyEncryptionScopeOverride, ImmutableStorageWithVersioningEnabled, Deleted, Version, - DeletedTime, RemainingRetentionDays'. Schema field values 'Expiry-Time, hdi_isfolder, Owner, - Group, Permissions, Acl, DeletionId' are valid only for Hns enabled accounts.Schema field - values 'Tags, TagCount' are only valid for Non-Hns accounts. Required. - :vartype schema_fields: list[str] - """ - - _validation = { - "format": {"required": True}, - "schedule": {"required": True}, - "object_type": {"required": True}, - "schema_fields": {"required": True}, - } - - _attribute_map = { - "filters": {"key": "filters", "type": "BlobInventoryPolicyFilter"}, - "format": {"key": "format", "type": "str"}, - "schedule": {"key": "schedule", "type": "str"}, - "object_type": {"key": "objectType", "type": "str"}, - "schema_fields": {"key": "schemaFields", "type": "[str]"}, - } - - def __init__( - self, - *, - format: Union[str, "_models.Format"], - schedule: Union[str, "_models.Schedule"], - object_type: Union[str, "_models.ObjectType"], - schema_fields: list[str], - filters: Optional["_models.BlobInventoryPolicyFilter"] = None, - **kwargs: Any - ) -> None: - """ - :keyword filters: An object that defines the filter set. - :paramtype filters: ~azure.mgmt.storage.models.BlobInventoryPolicyFilter - :keyword format: This is a required field, it specifies the format for the inventory files. - Required. Known values are: "Csv" and "Parquet". - :paramtype format: str or ~azure.mgmt.storage.models.Format - :keyword schedule: This is a required field. This field is used to schedule an inventory - formation. Required. Known values are: "Daily" and "Weekly". - :paramtype schedule: str or ~azure.mgmt.storage.models.Schedule - :keyword object_type: This is a required field. This field specifies the scope of the inventory - created either at the blob or container level. Required. Known values are: "Blob" and - "Container". - :paramtype object_type: str or ~azure.mgmt.storage.models.ObjectType - :keyword schema_fields: This is a required field. This field specifies the fields and - properties of the object to be included in the inventory. The Schema field value 'Name' is - always required. The valid values for this field for the 'Blob' definition.objectType include - 'Name, Creation-Time, Last-Modified, Content-Length, Content-MD5, BlobType, AccessTier, - AccessTierChangeTime, AccessTierInferred, Tags, Expiry-Time, hdi_isfolder, Owner, Group, - Permissions, Acl, Snapshot, VersionId, IsCurrentVersion, Metadata, LastAccessTime, Tags, Etag, - ContentType, ContentEncoding, ContentLanguage, ContentCRC64, CacheControl, ContentDisposition, - LeaseStatus, LeaseState, LeaseDuration, ServerEncrypted, Deleted, DeletionId, DeletedTime, - RemainingRetentionDays, ImmutabilityPolicyUntilDate, ImmutabilityPolicyMode, LegalHold, CopyId, - CopyStatus, CopySource, CopyProgress, CopyCompletionTime, CopyStatusDescription, - CustomerProvidedKeySha256, RehydratePriority, ArchiveStatus, XmsBlobSequenceNumber, - EncryptionScope, IncrementalCopy, TagCount'. For Blob object type schema field value - 'DeletedTime' is applicable only for Hns enabled accounts. The valid values for 'Container' - definition.objectType include 'Name, Last-Modified, Metadata, LeaseStatus, LeaseState, - LeaseDuration, PublicAccess, HasImmutabilityPolicy, HasLegalHold, Etag, DefaultEncryptionScope, - DenyEncryptionScopeOverride, ImmutableStorageWithVersioningEnabled, Deleted, Version, - DeletedTime, RemainingRetentionDays'. Schema field values 'Expiry-Time, hdi_isfolder, Owner, - Group, Permissions, Acl, DeletionId' are valid only for Hns enabled accounts.Schema field - values 'Tags, TagCount' are only valid for Non-Hns accounts. Required. - :paramtype schema_fields: list[str] - """ - super().__init__(**kwargs) - self.filters = filters - self.format = format - self.schedule = schedule - self.object_type = object_type - self.schema_fields = schema_fields - - -class BlobInventoryPolicyFilter(_serialization.Model): - """An object that defines the blob inventory rule filter conditions. For 'Blob' - definition.objectType all filter properties are applicable, 'blobTypes' is required and others - are optional. For 'Container' definition.objectType only prefixMatch is applicable and is - optional. - - :ivar prefix_match: An array of strings with maximum 10 blob prefixes to be included in the - inventory. - :vartype prefix_match: list[str] - :ivar exclude_prefix: An array of strings with maximum 10 blob prefixes to be excluded from the - inventory. - :vartype exclude_prefix: list[str] - :ivar blob_types: An array of predefined enum values. Valid values include blockBlob, - appendBlob, pageBlob. Hns accounts does not support pageBlobs. This field is required when - definition.objectType property is set to 'Blob'. - :vartype blob_types: list[str] - :ivar include_blob_versions: Includes blob versions in blob inventory when value is set to - true. The definition.schemaFields values 'VersionId and IsCurrentVersion' are required if this - property is set to true, else they must be excluded. - :vartype include_blob_versions: bool - :ivar include_snapshots: Includes blob snapshots in blob inventory when value is set to true. - The definition.schemaFields value 'Snapshot' is required if this property is set to true, else - it must be excluded. - :vartype include_snapshots: bool - :ivar include_deleted: For 'Container' definition.objectType the definition.schemaFields must - include 'Deleted, Version, DeletedTime and RemainingRetentionDays'. For 'Blob' - definition.objectType and HNS enabled storage accounts the definition.schemaFields must include - 'DeletionId, Deleted, DeletedTime and RemainingRetentionDays' and for Hns disabled accounts the - definition.schemaFields must include 'Deleted and RemainingRetentionDays', else it must be - excluded. - :vartype include_deleted: bool - :ivar creation_time: This property is used to filter objects based on the object creation time. - :vartype creation_time: ~azure.mgmt.storage.models.BlobInventoryCreationTime - """ - - _attribute_map = { - "prefix_match": {"key": "prefixMatch", "type": "[str]"}, - "exclude_prefix": {"key": "excludePrefix", "type": "[str]"}, - "blob_types": {"key": "blobTypes", "type": "[str]"}, - "include_blob_versions": {"key": "includeBlobVersions", "type": "bool"}, - "include_snapshots": {"key": "includeSnapshots", "type": "bool"}, - "include_deleted": {"key": "includeDeleted", "type": "bool"}, - "creation_time": {"key": "creationTime", "type": "BlobInventoryCreationTime"}, - } - - def __init__( - self, - *, - prefix_match: Optional[list[str]] = None, - exclude_prefix: Optional[list[str]] = None, - blob_types: Optional[list[str]] = None, - include_blob_versions: Optional[bool] = None, - include_snapshots: Optional[bool] = None, - include_deleted: Optional[bool] = None, - creation_time: Optional["_models.BlobInventoryCreationTime"] = None, - **kwargs: Any - ) -> None: - """ - :keyword prefix_match: An array of strings with maximum 10 blob prefixes to be included in the - inventory. - :paramtype prefix_match: list[str] - :keyword exclude_prefix: An array of strings with maximum 10 blob prefixes to be excluded from - the inventory. - :paramtype exclude_prefix: list[str] - :keyword blob_types: An array of predefined enum values. Valid values include blockBlob, - appendBlob, pageBlob. Hns accounts does not support pageBlobs. This field is required when - definition.objectType property is set to 'Blob'. - :paramtype blob_types: list[str] - :keyword include_blob_versions: Includes blob versions in blob inventory when value is set to - true. The definition.schemaFields values 'VersionId and IsCurrentVersion' are required if this - property is set to true, else they must be excluded. - :paramtype include_blob_versions: bool - :keyword include_snapshots: Includes blob snapshots in blob inventory when value is set to - true. The definition.schemaFields value 'Snapshot' is required if this property is set to true, - else it must be excluded. - :paramtype include_snapshots: bool - :keyword include_deleted: For 'Container' definition.objectType the definition.schemaFields - must include 'Deleted, Version, DeletedTime and RemainingRetentionDays'. For 'Blob' - definition.objectType and HNS enabled storage accounts the definition.schemaFields must include - 'DeletionId, Deleted, DeletedTime and RemainingRetentionDays' and for Hns disabled accounts the - definition.schemaFields must include 'Deleted and RemainingRetentionDays', else it must be - excluded. - :paramtype include_deleted: bool - :keyword creation_time: This property is used to filter objects based on the object creation - time. - :paramtype creation_time: ~azure.mgmt.storage.models.BlobInventoryCreationTime - """ - super().__init__(**kwargs) - self.prefix_match = prefix_match - self.exclude_prefix = exclude_prefix - self.blob_types = blob_types - self.include_blob_versions = include_blob_versions - self.include_snapshots = include_snapshots - self.include_deleted = include_deleted - self.creation_time = creation_time - - -class BlobInventoryPolicyRule(_serialization.Model): - """An object that wraps the blob inventory rule. Each rule is uniquely defined by name. - - All required parameters must be populated in order to send to server. - - :ivar enabled: Rule is enabled when set to true. Required. - :vartype enabled: bool - :ivar name: A rule name can contain any combination of alpha numeric characters. Rule name is - case-sensitive. It must be unique within a policy. Required. - :vartype name: str - :ivar destination: Container name where blob inventory files are stored. Must be pre-created. - Required. - :vartype destination: str - :ivar definition: An object that defines the blob inventory policy rule. Required. - :vartype definition: ~azure.mgmt.storage.models.BlobInventoryPolicyDefinition - """ - - _validation = { - "enabled": {"required": True}, - "name": {"required": True}, - "destination": {"required": True}, - "definition": {"required": True}, - } - - _attribute_map = { - "enabled": {"key": "enabled", "type": "bool"}, - "name": {"key": "name", "type": "str"}, - "destination": {"key": "destination", "type": "str"}, - "definition": {"key": "definition", "type": "BlobInventoryPolicyDefinition"}, - } - - def __init__( - self, - *, - enabled: bool, - name: str, - destination: str, - definition: "_models.BlobInventoryPolicyDefinition", - **kwargs: Any - ) -> None: - """ - :keyword enabled: Rule is enabled when set to true. Required. - :paramtype enabled: bool - :keyword name: A rule name can contain any combination of alpha numeric characters. Rule name - is case-sensitive. It must be unique within a policy. Required. - :paramtype name: str - :keyword destination: Container name where blob inventory files are stored. Must be - pre-created. Required. - :paramtype destination: str - :keyword definition: An object that defines the blob inventory policy rule. Required. - :paramtype definition: ~azure.mgmt.storage.models.BlobInventoryPolicyDefinition - """ - super().__init__(**kwargs) - self.enabled = enabled - self.name = name - self.destination = destination - self.definition = definition - - -class BlobInventoryPolicySchema(_serialization.Model): - """The storage account blob inventory policy rules. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to server. - - :ivar enabled: Policy is enabled if set to true. Required. - :vartype enabled: bool - :ivar destination: Deprecated Property from API version 2021-04-01 onwards, the required - destination container name must be specified at the rule level 'policy.rule.destination'. - :vartype destination: str - :ivar type: The valid value is Inventory. Required. "Inventory" - :vartype type: str or ~azure.mgmt.storage.models.InventoryRuleType - :ivar rules: The storage account blob inventory policy rules. The rule is applied when it is - enabled. Required. - :vartype rules: list[~azure.mgmt.storage.models.BlobInventoryPolicyRule] - """ - - _validation = { - "enabled": {"required": True}, - "destination": {"readonly": True}, - "type": {"required": True}, - "rules": {"required": True}, - } - - _attribute_map = { - "enabled": {"key": "enabled", "type": "bool"}, - "destination": {"key": "destination", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "rules": {"key": "rules", "type": "[BlobInventoryPolicyRule]"}, - } - - def __init__( - self, - *, - enabled: bool, - type: Union[str, "_models.InventoryRuleType"], - rules: list["_models.BlobInventoryPolicyRule"], - **kwargs: Any - ) -> None: - """ - :keyword enabled: Policy is enabled if set to true. Required. - :paramtype enabled: bool - :keyword type: The valid value is Inventory. Required. "Inventory" - :paramtype type: str or ~azure.mgmt.storage.models.InventoryRuleType - :keyword rules: The storage account blob inventory policy rules. The rule is applied when it is - enabled. Required. - :paramtype rules: list[~azure.mgmt.storage.models.BlobInventoryPolicyRule] - """ - super().__init__(**kwargs) - self.enabled = enabled - self.destination: Optional[str] = None - self.type = type - self.rules = rules - - -class BlobRestoreParameters(_serialization.Model): - """Blob restore parameters. - - All required parameters must be populated in order to send to server. - - :ivar time_to_restore: Restore blob to the specified time. Required. - :vartype time_to_restore: ~datetime.datetime - :ivar blob_ranges: Blob ranges to restore. Required. - :vartype blob_ranges: list[~azure.mgmt.storage.models.BlobRestoreRange] - """ - - _validation = { - "time_to_restore": {"required": True}, - "blob_ranges": {"required": True}, - } - - _attribute_map = { - "time_to_restore": {"key": "timeToRestore", "type": "iso-8601"}, - "blob_ranges": {"key": "blobRanges", "type": "[BlobRestoreRange]"}, - } - - def __init__( - self, *, time_to_restore: datetime.datetime, blob_ranges: list["_models.BlobRestoreRange"], **kwargs: Any - ) -> None: - """ - :keyword time_to_restore: Restore blob to the specified time. Required. - :paramtype time_to_restore: ~datetime.datetime - :keyword blob_ranges: Blob ranges to restore. Required. - :paramtype blob_ranges: list[~azure.mgmt.storage.models.BlobRestoreRange] - """ - super().__init__(**kwargs) - self.time_to_restore = time_to_restore - self.blob_ranges = blob_ranges - - -class BlobRestoreRange(_serialization.Model): - """Blob range. - - All required parameters must be populated in order to send to server. - - :ivar start_range: Blob start range. This is inclusive. Empty means account start. Required. - :vartype start_range: str - :ivar end_range: Blob end range. This is exclusive. Empty means account end. Required. - :vartype end_range: str - """ - - _validation = { - "start_range": {"required": True}, - "end_range": {"required": True}, - } - - _attribute_map = { - "start_range": {"key": "startRange", "type": "str"}, - "end_range": {"key": "endRange", "type": "str"}, - } - - def __init__(self, *, start_range: str, end_range: str, **kwargs: Any) -> None: - """ - :keyword start_range: Blob start range. This is inclusive. Empty means account start. Required. - :paramtype start_range: str - :keyword end_range: Blob end range. This is exclusive. Empty means account end. Required. - :paramtype end_range: str - """ - super().__init__(**kwargs) - self.start_range = start_range - self.end_range = end_range - - -class BlobRestoreStatus(_serialization.Model): - """Blob restore status. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar status: The status of blob restore progress. Possible values are: - InProgress: Indicates - that blob restore is ongoing. - Complete: Indicates that blob restore has been completed - successfully. - Failed: Indicates that blob restore is failed. Known values are: "InProgress", - "Complete", and "Failed". - :vartype status: str or ~azure.mgmt.storage.models.BlobRestoreProgressStatus - :ivar failure_reason: Failure reason when blob restore is failed. - :vartype failure_reason: str - :ivar restore_id: Id for tracking blob restore request. - :vartype restore_id: str - :ivar parameters: Blob restore request parameters. - :vartype parameters: ~azure.mgmt.storage.models.BlobRestoreParameters - """ - - _validation = { - "status": {"readonly": True}, - "failure_reason": {"readonly": True}, - "restore_id": {"readonly": True}, - "parameters": {"readonly": True}, - } - - _attribute_map = { - "status": {"key": "status", "type": "str"}, - "failure_reason": {"key": "failureReason", "type": "str"}, - "restore_id": {"key": "restoreId", "type": "str"}, - "parameters": {"key": "parameters", "type": "BlobRestoreParameters"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.status: Optional[Union[str, "_models.BlobRestoreProgressStatus"]] = None - self.failure_reason: Optional[str] = None - self.restore_id: Optional[str] = None - self.parameters: Optional["_models.BlobRestoreParameters"] = None - - -class BlobServiceItems(_serialization.Model): - """BlobServiceItems. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: List of blob services returned. - :vartype value: list[~azure.mgmt.storage.models.BlobServiceProperties] - """ - - _validation = { - "value": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[BlobServiceProperties]"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value: Optional[list["_models.BlobServiceProperties"]] = None - - -class BlobServiceProperties(Resource): - """The properties of a storage account’s Blob service. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar sku: Sku name and tier. - :vartype sku: ~azure.mgmt.storage.models.Sku - :ivar cors: Specifies CORS rules for the Blob service. You can include up to five CorsRule - elements in the request. If no CorsRule elements are included in the request body, all CORS - rules will be deleted, and CORS will be disabled for the Blob service. - :vartype cors: ~azure.mgmt.storage.models.CorsRules - :ivar default_service_version: DefaultServiceVersion indicates the default version to use for - requests to the Blob service if an incoming request’s version is not specified. Possible values - include version 2008-10-27 and all more recent versions. - :vartype default_service_version: str - :ivar delete_retention_policy: The blob service properties for blob soft delete. - :vartype delete_retention_policy: ~azure.mgmt.storage.models.DeleteRetentionPolicy - :ivar is_versioning_enabled: Versioning is enabled if set to true. - :vartype is_versioning_enabled: bool - :ivar automatic_snapshot_policy_enabled: Deprecated in favor of isVersioningEnabled property. - :vartype automatic_snapshot_policy_enabled: bool - :ivar change_feed: The blob service properties for change feed events. - :vartype change_feed: ~azure.mgmt.storage.models.ChangeFeed - :ivar restore_policy: The blob service properties for blob restore policy. - :vartype restore_policy: ~azure.mgmt.storage.models.RestorePolicyProperties - :ivar container_delete_retention_policy: The blob service properties for container soft delete. - :vartype container_delete_retention_policy: ~azure.mgmt.storage.models.DeleteRetentionPolicy - :ivar last_access_time_tracking_policy: The blob service property to configure last access time - based tracking policy. - :vartype last_access_time_tracking_policy: - ~azure.mgmt.storage.models.LastAccessTimeTrackingPolicy - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "sku": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "sku": {"key": "sku", "type": "Sku"}, - "cors": {"key": "properties.cors", "type": "CorsRules"}, - "default_service_version": {"key": "properties.defaultServiceVersion", "type": "str"}, - "delete_retention_policy": {"key": "properties.deleteRetentionPolicy", "type": "DeleteRetentionPolicy"}, - "is_versioning_enabled": {"key": "properties.isVersioningEnabled", "type": "bool"}, - "automatic_snapshot_policy_enabled": {"key": "properties.automaticSnapshotPolicyEnabled", "type": "bool"}, - "change_feed": {"key": "properties.changeFeed", "type": "ChangeFeed"}, - "restore_policy": {"key": "properties.restorePolicy", "type": "RestorePolicyProperties"}, - "container_delete_retention_policy": { - "key": "properties.containerDeleteRetentionPolicy", - "type": "DeleteRetentionPolicy", - }, - "last_access_time_tracking_policy": { - "key": "properties.lastAccessTimeTrackingPolicy", - "type": "LastAccessTimeTrackingPolicy", - }, - } - - def __init__( - self, - *, - cors: Optional["_models.CorsRules"] = None, - default_service_version: Optional[str] = None, - delete_retention_policy: Optional["_models.DeleteRetentionPolicy"] = None, - is_versioning_enabled: Optional[bool] = None, - automatic_snapshot_policy_enabled: Optional[bool] = None, - change_feed: Optional["_models.ChangeFeed"] = None, - restore_policy: Optional["_models.RestorePolicyProperties"] = None, - container_delete_retention_policy: Optional["_models.DeleteRetentionPolicy"] = None, - last_access_time_tracking_policy: Optional["_models.LastAccessTimeTrackingPolicy"] = None, - **kwargs: Any - ) -> None: - """ - :keyword cors: Specifies CORS rules for the Blob service. You can include up to five CorsRule - elements in the request. If no CorsRule elements are included in the request body, all CORS - rules will be deleted, and CORS will be disabled for the Blob service. - :paramtype cors: ~azure.mgmt.storage.models.CorsRules - :keyword default_service_version: DefaultServiceVersion indicates the default version to use - for requests to the Blob service if an incoming request’s version is not specified. Possible - values include version 2008-10-27 and all more recent versions. - :paramtype default_service_version: str - :keyword delete_retention_policy: The blob service properties for blob soft delete. - :paramtype delete_retention_policy: ~azure.mgmt.storage.models.DeleteRetentionPolicy - :keyword is_versioning_enabled: Versioning is enabled if set to true. - :paramtype is_versioning_enabled: bool - :keyword automatic_snapshot_policy_enabled: Deprecated in favor of isVersioningEnabled - property. - :paramtype automatic_snapshot_policy_enabled: bool - :keyword change_feed: The blob service properties for change feed events. - :paramtype change_feed: ~azure.mgmt.storage.models.ChangeFeed - :keyword restore_policy: The blob service properties for blob restore policy. - :paramtype restore_policy: ~azure.mgmt.storage.models.RestorePolicyProperties - :keyword container_delete_retention_policy: The blob service properties for container soft - delete. - :paramtype container_delete_retention_policy: ~azure.mgmt.storage.models.DeleteRetentionPolicy - :keyword last_access_time_tracking_policy: The blob service property to configure last access - time based tracking policy. - :paramtype last_access_time_tracking_policy: - ~azure.mgmt.storage.models.LastAccessTimeTrackingPolicy - """ - super().__init__(**kwargs) - self.sku: Optional["_models.Sku"] = None - self.cors = cors - self.default_service_version = default_service_version - self.delete_retention_policy = delete_retention_policy - self.is_versioning_enabled = is_versioning_enabled - self.automatic_snapshot_policy_enabled = automatic_snapshot_policy_enabled - self.change_feed = change_feed - self.restore_policy = restore_policy - self.container_delete_retention_policy = container_delete_retention_policy - self.last_access_time_tracking_policy = last_access_time_tracking_policy - - -class BurstingConstants(_serialization.Model): - """Constants used for calculating included burst IOPS and maximum burst credits for IOPS for a - file share in the storage account. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar burst_floor_iops: The guaranteed floor of burst IOPS for small file shares. - :vartype burst_floor_iops: int - :ivar burst_io_scalar: The scalar against provisioned IOPS in the file share included burst - IOPS formula. - :vartype burst_io_scalar: float - :ivar burst_timeframe_seconds: The time frame for bursting in seconds in the file share maximum - burst credits for IOPS formula. - :vartype burst_timeframe_seconds: int - """ - - _validation = { - "burst_floor_iops": {"readonly": True}, - "burst_io_scalar": {"readonly": True}, - "burst_timeframe_seconds": {"readonly": True}, - } - - _attribute_map = { - "burst_floor_iops": {"key": "burstFloorIOPS", "type": "int"}, - "burst_io_scalar": {"key": "burstIOScalar", "type": "float"}, - "burst_timeframe_seconds": {"key": "burstTimeframeSeconds", "type": "int"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.burst_floor_iops: Optional[int] = None - self.burst_io_scalar: Optional[float] = None - self.burst_timeframe_seconds: Optional[int] = None - - -class ChangeFeed(_serialization.Model): - """The blob service properties for change feed events. - - :ivar enabled: Indicates whether change feed event logging is enabled for the Blob service. - :vartype enabled: bool - :ivar retention_in_days: Indicates the duration of changeFeed retention in days. Minimum value - is 1 day and maximum value is 146000 days (400 years). A null value indicates an infinite - retention of the change feed. - :vartype retention_in_days: int - """ - - _validation = { - "retention_in_days": {"maximum": 146000, "minimum": 1}, - } - - _attribute_map = { - "enabled": {"key": "enabled", "type": "bool"}, - "retention_in_days": {"key": "retentionInDays", "type": "int"}, - } - - def __init__( - self, *, enabled: Optional[bool] = None, retention_in_days: Optional[int] = None, **kwargs: Any - ) -> None: - """ - :keyword enabled: Indicates whether change feed event logging is enabled for the Blob service. - :paramtype enabled: bool - :keyword retention_in_days: Indicates the duration of changeFeed retention in days. Minimum - value is 1 day and maximum value is 146000 days (400 years). A null value indicates an infinite - retention of the change feed. - :paramtype retention_in_days: int - """ - super().__init__(**kwargs) - self.enabled = enabled - self.retention_in_days = retention_in_days - - -class CheckNameAvailabilityResult(_serialization.Model): - """The CheckNameAvailability operation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name_available: Gets a boolean value that indicates whether the name is available for you - to use. If true, the name is available. If false, the name has already been taken or is invalid - and cannot be used. - :vartype name_available: bool - :ivar reason: Gets the reason that a storage account name could not be used. The Reason element - is only returned if NameAvailable is false. Known values are: "AccountNameInvalid" and - "AlreadyExists". - :vartype reason: str or ~azure.mgmt.storage.models.Reason - :ivar message: Gets an error message explaining the Reason value in more detail. - :vartype message: str - """ - - _validation = { - "name_available": {"readonly": True}, - "reason": {"readonly": True}, - "message": {"readonly": True}, - } - - _attribute_map = { - "name_available": {"key": "nameAvailable", "type": "bool"}, - "reason": {"key": "reason", "type": "str"}, - "message": {"key": "message", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.name_available: Optional[bool] = None - self.reason: Optional[Union[str, "_models.Reason"]] = None - self.message: Optional[str] = None - - -class CloudErrorBody(_serialization.Model): - """An error response from the Storage service. - - :ivar code: An identifier for the error. Codes are invariant and are intended to be consumed - programmatically. - :vartype code: str - :ivar message: A message describing the error, intended to be suitable for display in a user - interface. - :vartype message: str - :ivar target: The target of the particular error. For example, the name of the property in - error. - :vartype target: str - :ivar details: A list of additional details about the error. - :vartype details: list[~azure.mgmt.storage.models.CloudErrorBody] - """ - - _attribute_map = { - "code": {"key": "code", "type": "str"}, - "message": {"key": "message", "type": "str"}, - "target": {"key": "target", "type": "str"}, - "details": {"key": "details", "type": "[CloudErrorBody]"}, - } - - def __init__( - self, - *, - code: Optional[str] = None, - message: Optional[str] = None, - target: Optional[str] = None, - details: Optional[list["_models.CloudErrorBody"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword code: An identifier for the error. Codes are invariant and are intended to be consumed - programmatically. - :paramtype code: str - :keyword message: A message describing the error, intended to be suitable for display in a user - interface. - :paramtype message: str - :keyword target: The target of the particular error. For example, the name of the property in - error. - :paramtype target: str - :keyword details: A list of additional details about the error. - :paramtype details: list[~azure.mgmt.storage.models.CloudErrorBody] - """ - super().__init__(**kwargs) - self.code = code - self.message = message - self.target = target - self.details = details - - -class CorsRule(_serialization.Model): - """Specifies a CORS rule for the Blob service. - - All required parameters must be populated in order to send to server. - - :ivar allowed_origins: Required if CorsRule element is present. A list of origin domains that - will be allowed via CORS, or "*" to allow all domains. Required. - :vartype allowed_origins: list[str] - :ivar allowed_methods: Required if CorsRule element is present. A list of HTTP methods that are - allowed to be executed by the origin. Required. - :vartype allowed_methods: list[str or ~azure.mgmt.storage.models.AllowedMethods] - :ivar max_age_in_seconds: Required if CorsRule element is present. The number of seconds that - the client/browser should cache a preflight response. Required. - :vartype max_age_in_seconds: int - :ivar exposed_headers: Required if CorsRule element is present. A list of response headers to - expose to CORS clients. Required. - :vartype exposed_headers: list[str] - :ivar allowed_headers: Required if CorsRule element is present. A list of headers allowed to be - part of the cross-origin request. Required. - :vartype allowed_headers: list[str] - """ - - _validation = { - "allowed_origins": {"required": True}, - "allowed_methods": {"required": True}, - "max_age_in_seconds": {"required": True}, - "exposed_headers": {"required": True}, - "allowed_headers": {"required": True}, - } - - _attribute_map = { - "allowed_origins": {"key": "allowedOrigins", "type": "[str]"}, - "allowed_methods": {"key": "allowedMethods", "type": "[str]"}, - "max_age_in_seconds": {"key": "maxAgeInSeconds", "type": "int"}, - "exposed_headers": {"key": "exposedHeaders", "type": "[str]"}, - "allowed_headers": {"key": "allowedHeaders", "type": "[str]"}, - } - - def __init__( - self, - *, - allowed_origins: list[str], - allowed_methods: list[Union[str, "_models.AllowedMethods"]], - max_age_in_seconds: int, - exposed_headers: list[str], - allowed_headers: list[str], - **kwargs: Any - ) -> None: - """ - :keyword allowed_origins: Required if CorsRule element is present. A list of origin domains - that will be allowed via CORS, or "*" to allow all domains. Required. - :paramtype allowed_origins: list[str] - :keyword allowed_methods: Required if CorsRule element is present. A list of HTTP methods that - are allowed to be executed by the origin. Required. - :paramtype allowed_methods: list[str or ~azure.mgmt.storage.models.AllowedMethods] - :keyword max_age_in_seconds: Required if CorsRule element is present. The number of seconds - that the client/browser should cache a preflight response. Required. - :paramtype max_age_in_seconds: int - :keyword exposed_headers: Required if CorsRule element is present. A list of response headers - to expose to CORS clients. Required. - :paramtype exposed_headers: list[str] - :keyword allowed_headers: Required if CorsRule element is present. A list of headers allowed to - be part of the cross-origin request. Required. - :paramtype allowed_headers: list[str] - """ - super().__init__(**kwargs) - self.allowed_origins = allowed_origins - self.allowed_methods = allowed_methods - self.max_age_in_seconds = max_age_in_seconds - self.exposed_headers = exposed_headers - self.allowed_headers = allowed_headers - - -class CorsRules(_serialization.Model): - """Sets the CORS rules. You can include up to five CorsRule elements in the request. - - :ivar cors_rules: The List of CORS rules. You can include up to five CorsRule elements in the - request. - :vartype cors_rules: list[~azure.mgmt.storage.models.CorsRule] - """ - - _attribute_map = { - "cors_rules": {"key": "corsRules", "type": "[CorsRule]"}, - } - - def __init__(self, *, cors_rules: Optional[list["_models.CorsRule"]] = None, **kwargs: Any) -> None: - """ - :keyword cors_rules: The List of CORS rules. You can include up to five CorsRule elements in - the request. - :paramtype cors_rules: list[~azure.mgmt.storage.models.CorsRule] - """ - super().__init__(**kwargs) - self.cors_rules = cors_rules - - -class CustomDomain(_serialization.Model): - """The custom domain assigned to this storage account. This can be set via Update. - - All required parameters must be populated in order to send to server. - - :ivar name: Gets or sets the custom domain name assigned to the storage account. Name is the - CNAME source. Required. - :vartype name: str - :ivar use_sub_domain_name: Indicates whether indirect CName validation is enabled. Default - value is false. This should only be set on updates. - :vartype use_sub_domain_name: bool - """ - - _validation = { - "name": {"required": True}, - } - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "use_sub_domain_name": {"key": "useSubDomainName", "type": "bool"}, - } - - def __init__(self, *, name: str, use_sub_domain_name: Optional[bool] = None, **kwargs: Any) -> None: - """ - :keyword name: Gets or sets the custom domain name assigned to the storage account. Name is the - CNAME source. Required. - :paramtype name: str - :keyword use_sub_domain_name: Indicates whether indirect CName validation is enabled. Default - value is false. This should only be set on updates. - :paramtype use_sub_domain_name: bool - """ - super().__init__(**kwargs) - self.name = name - self.use_sub_domain_name = use_sub_domain_name - - -class DateAfterCreation(_serialization.Model): - """Object to define snapshot and version action conditions. - - All required parameters must be populated in order to send to server. - - :ivar days_after_creation_greater_than: Value indicating the age in days after creation. - Required. - :vartype days_after_creation_greater_than: float - :ivar days_after_last_tier_change_greater_than: Value indicating the age in days after last - blob tier change time. This property is only applicable for tierToArchive actions and requires - daysAfterCreationGreaterThan to be set for snapshots and blob version based actions. The blob - will be archived if both the conditions are satisfied. - :vartype days_after_last_tier_change_greater_than: float - """ - - _validation = { - "days_after_creation_greater_than": {"required": True, "minimum": 0, "multiple": 1}, - "days_after_last_tier_change_greater_than": {"minimum": 0, "multiple": 1}, - } - - _attribute_map = { - "days_after_creation_greater_than": {"key": "daysAfterCreationGreaterThan", "type": "float"}, - "days_after_last_tier_change_greater_than": {"key": "daysAfterLastTierChangeGreaterThan", "type": "float"}, - } - - def __init__( - self, - *, - days_after_creation_greater_than: float, - days_after_last_tier_change_greater_than: Optional[float] = None, - **kwargs: Any - ) -> None: - """ - :keyword days_after_creation_greater_than: Value indicating the age in days after creation. - Required. - :paramtype days_after_creation_greater_than: float - :keyword days_after_last_tier_change_greater_than: Value indicating the age in days after last - blob tier change time. This property is only applicable for tierToArchive actions and requires - daysAfterCreationGreaterThan to be set for snapshots and blob version based actions. The blob - will be archived if both the conditions are satisfied. - :paramtype days_after_last_tier_change_greater_than: float - """ - super().__init__(**kwargs) - self.days_after_creation_greater_than = days_after_creation_greater_than - self.days_after_last_tier_change_greater_than = days_after_last_tier_change_greater_than - - -class DateAfterModification(_serialization.Model): - """Object to define the base blob action conditions. Properties daysAfterModificationGreaterThan, - daysAfterLastAccessTimeGreaterThan and daysAfterCreationGreaterThan are mutually exclusive. The - daysAfterLastTierChangeGreaterThan property is only applicable for tierToArchive actions which - requires daysAfterModificationGreaterThan to be set, also it cannot be used in conjunction with - daysAfterLastAccessTimeGreaterThan or daysAfterCreationGreaterThan. - - :ivar days_after_modification_greater_than: Value indicating the age in days after last - modification. - :vartype days_after_modification_greater_than: float - :ivar days_after_last_access_time_greater_than: Value indicating the age in days after last - blob access. This property can only be used in conjunction with last access time tracking - policy. - :vartype days_after_last_access_time_greater_than: float - :ivar days_after_last_tier_change_greater_than: Value indicating the age in days after last - blob tier change time. This property is only applicable for tierToArchive actions and requires - daysAfterModificationGreaterThan to be set for baseBlobs based actions. The blob will be - archived if both the conditions are satisfied. - :vartype days_after_last_tier_change_greater_than: float - :ivar days_after_creation_greater_than: Value indicating the age in days after blob creation. - :vartype days_after_creation_greater_than: float - """ - - _validation = { - "days_after_modification_greater_than": {"minimum": 0, "multiple": 1}, - "days_after_last_access_time_greater_than": {"minimum": 0, "multiple": 1}, - "days_after_last_tier_change_greater_than": {"minimum": 0, "multiple": 1}, - "days_after_creation_greater_than": {"minimum": 0, "multiple": 1}, - } - - _attribute_map = { - "days_after_modification_greater_than": {"key": "daysAfterModificationGreaterThan", "type": "float"}, - "days_after_last_access_time_greater_than": {"key": "daysAfterLastAccessTimeGreaterThan", "type": "float"}, - "days_after_last_tier_change_greater_than": {"key": "daysAfterLastTierChangeGreaterThan", "type": "float"}, - "days_after_creation_greater_than": {"key": "daysAfterCreationGreaterThan", "type": "float"}, - } - - def __init__( - self, - *, - days_after_modification_greater_than: Optional[float] = None, - days_after_last_access_time_greater_than: Optional[float] = None, - days_after_last_tier_change_greater_than: Optional[float] = None, - days_after_creation_greater_than: Optional[float] = None, - **kwargs: Any - ) -> None: - """ - :keyword days_after_modification_greater_than: Value indicating the age in days after last - modification. - :paramtype days_after_modification_greater_than: float - :keyword days_after_last_access_time_greater_than: Value indicating the age in days after last - blob access. This property can only be used in conjunction with last access time tracking - policy. - :paramtype days_after_last_access_time_greater_than: float - :keyword days_after_last_tier_change_greater_than: Value indicating the age in days after last - blob tier change time. This property is only applicable for tierToArchive actions and requires - daysAfterModificationGreaterThan to be set for baseBlobs based actions. The blob will be - archived if both the conditions are satisfied. - :paramtype days_after_last_tier_change_greater_than: float - :keyword days_after_creation_greater_than: Value indicating the age in days after blob - creation. - :paramtype days_after_creation_greater_than: float - """ - super().__init__(**kwargs) - self.days_after_modification_greater_than = days_after_modification_greater_than - self.days_after_last_access_time_greater_than = days_after_last_access_time_greater_than - self.days_after_last_tier_change_greater_than = days_after_last_tier_change_greater_than - self.days_after_creation_greater_than = days_after_creation_greater_than - - -class ProxyResource(Resource): - """The resource model definition for a Azure Resource Manager proxy resource. It will not have - tags and a location. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - """ - - -class DeletedAccount(ProxyResource): - """Deleted storage account. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar storage_account_resource_id: Full resource id of the original storage account. - :vartype storage_account_resource_id: str - :ivar location: Location of the deleted account. - :vartype location: str - :ivar restore_reference: Can be used to attempt recovering this deleted account via - PutStorageAccount API. - :vartype restore_reference: str - :ivar creation_time: Creation time of the deleted account. - :vartype creation_time: str - :ivar deletion_time: Deletion time of the deleted account. - :vartype deletion_time: str - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "storage_account_resource_id": {"readonly": True}, - "location": {"readonly": True}, - "restore_reference": {"readonly": True}, - "creation_time": {"readonly": True}, - "deletion_time": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "storage_account_resource_id": {"key": "properties.storageAccountResourceId", "type": "str"}, - "location": {"key": "properties.location", "type": "str"}, - "restore_reference": {"key": "properties.restoreReference", "type": "str"}, - "creation_time": {"key": "properties.creationTime", "type": "str"}, - "deletion_time": {"key": "properties.deletionTime", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.storage_account_resource_id: Optional[str] = None - self.location: Optional[str] = None - self.restore_reference: Optional[str] = None - self.creation_time: Optional[str] = None - self.deletion_time: Optional[str] = None - - -class DeletedAccountListResult(_serialization.Model): - """The response from the List Deleted Accounts operation. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: Gets the list of deleted accounts and their properties. - :vartype value: list[~azure.mgmt.storage.models.DeletedAccount] - :ivar next_link: Request URL that can be used to query next page of deleted accounts. Returned - when total number of requested deleted accounts exceed maximum page size. - :vartype next_link: str - """ - - _validation = { - "value": {"readonly": True}, - "next_link": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[DeletedAccount]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value: Optional[list["_models.DeletedAccount"]] = None - self.next_link: Optional[str] = None - - -class DeletedShare(_serialization.Model): - """The deleted share to be restored. - - All required parameters must be populated in order to send to server. - - :ivar deleted_share_name: Required. Identify the name of the deleted share that will be - restored. Required. - :vartype deleted_share_name: str - :ivar deleted_share_version: Required. Identify the version of the deleted share that will be - restored. Required. - :vartype deleted_share_version: str - """ - - _validation = { - "deleted_share_name": {"required": True}, - "deleted_share_version": {"required": True}, - } - - _attribute_map = { - "deleted_share_name": {"key": "deletedShareName", "type": "str"}, - "deleted_share_version": {"key": "deletedShareVersion", "type": "str"}, - } - - def __init__(self, *, deleted_share_name: str, deleted_share_version: str, **kwargs: Any) -> None: - """ - :keyword deleted_share_name: Required. Identify the name of the deleted share that will be - restored. Required. - :paramtype deleted_share_name: str - :keyword deleted_share_version: Required. Identify the version of the deleted share that will - be restored. Required. - :paramtype deleted_share_version: str - """ - super().__init__(**kwargs) - self.deleted_share_name = deleted_share_name - self.deleted_share_version = deleted_share_version - - -class DeleteRetentionPolicy(_serialization.Model): - """The service properties for soft delete. - - :ivar enabled: Indicates whether DeleteRetentionPolicy is enabled. - :vartype enabled: bool - :ivar days: Indicates the number of days that the deleted item should be retained. The minimum - specified value can be 1 and the maximum value can be 365. - :vartype days: int - :ivar allow_permanent_delete: This property when set to true allows deletion of the soft - deleted blob versions and snapshots. This property cannot be used blob restore policy. This - property only applies to blob service and does not apply to containers or file share. - :vartype allow_permanent_delete: bool - """ - - _validation = { - "days": {"maximum": 365, "minimum": 1}, - } - - _attribute_map = { - "enabled": {"key": "enabled", "type": "bool"}, - "days": {"key": "days", "type": "int"}, - "allow_permanent_delete": {"key": "allowPermanentDelete", "type": "bool"}, - } - - def __init__( - self, - *, - enabled: Optional[bool] = None, - days: Optional[int] = None, - allow_permanent_delete: Optional[bool] = None, - **kwargs: Any - ) -> None: - """ - :keyword enabled: Indicates whether DeleteRetentionPolicy is enabled. - :paramtype enabled: bool - :keyword days: Indicates the number of days that the deleted item should be retained. The - minimum specified value can be 1 and the maximum value can be 365. - :paramtype days: int - :keyword allow_permanent_delete: This property when set to true allows deletion of the soft - deleted blob versions and snapshots. This property cannot be used blob restore policy. This - property only applies to blob service and does not apply to containers or file share. - :paramtype allow_permanent_delete: bool - """ - super().__init__(**kwargs) - self.enabled = enabled - self.days = days - self.allow_permanent_delete = allow_permanent_delete - - -class Dimension(_serialization.Model): - """Dimension of blobs, possibly be blob type or access tier. - - :ivar name: Display name of dimension. - :vartype name: str - :ivar display_name: Display name of dimension. - :vartype display_name: str - """ - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "display_name": {"key": "displayName", "type": "str"}, - } - - def __init__(self, *, name: Optional[str] = None, display_name: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword name: Display name of dimension. - :paramtype name: str - :keyword display_name: Display name of dimension. - :paramtype display_name: str - """ - super().__init__(**kwargs) - self.name = name - self.display_name = display_name - - -class DualStackEndpointPreference(_serialization.Model): - """Dual-stack endpoint preference defines whether IPv6 endpoints are going to be published. - - :ivar publish_ipv6_endpoint: A boolean flag which indicates whether IPv6 storage endpoints are - to be published. - :vartype publish_ipv6_endpoint: bool - """ - - _attribute_map = { - "publish_ipv6_endpoint": {"key": "publishIpv6Endpoint", "type": "bool"}, - } - - def __init__(self, *, publish_ipv6_endpoint: Optional[bool] = None, **kwargs: Any) -> None: - """ - :keyword publish_ipv6_endpoint: A boolean flag which indicates whether IPv6 storage endpoints - are to be published. - :paramtype publish_ipv6_endpoint: bool - """ - super().__init__(**kwargs) - self.publish_ipv6_endpoint = publish_ipv6_endpoint - - -class Encryption(_serialization.Model): - """The encryption settings on the storage account. - - :ivar services: List of services which support encryption. - :vartype services: ~azure.mgmt.storage.models.EncryptionServices - :ivar key_source: The encryption keySource (provider). Possible values (case-insensitive): - Microsoft.Storage, Microsoft.Keyvault. Known values are: "Microsoft.Storage" and - "Microsoft.Keyvault". - :vartype key_source: str or ~azure.mgmt.storage.models.KeySource - :ivar require_infrastructure_encryption: A boolean indicating whether or not the service - applies a secondary layer of encryption with platform managed keys for data at rest. - :vartype require_infrastructure_encryption: bool - :ivar key_vault_properties: Properties provided by key vault. - :vartype key_vault_properties: ~azure.mgmt.storage.models.KeyVaultProperties - :ivar encryption_identity: The identity to be used with service-side encryption at rest. - :vartype encryption_identity: ~azure.mgmt.storage.models.EncryptionIdentity - """ - - _attribute_map = { - "services": {"key": "services", "type": "EncryptionServices"}, - "key_source": {"key": "keySource", "type": "str"}, - "require_infrastructure_encryption": {"key": "requireInfrastructureEncryption", "type": "bool"}, - "key_vault_properties": {"key": "keyvaultproperties", "type": "KeyVaultProperties"}, - "encryption_identity": {"key": "identity", "type": "EncryptionIdentity"}, - } - - def __init__( - self, - *, - services: Optional["_models.EncryptionServices"] = None, - key_source: Union[str, "_models.KeySource"] = "Microsoft.Storage", - require_infrastructure_encryption: Optional[bool] = None, - key_vault_properties: Optional["_models.KeyVaultProperties"] = None, - encryption_identity: Optional["_models.EncryptionIdentity"] = None, - **kwargs: Any - ) -> None: - """ - :keyword services: List of services which support encryption. - :paramtype services: ~azure.mgmt.storage.models.EncryptionServices - :keyword key_source: The encryption keySource (provider). Possible values (case-insensitive): - Microsoft.Storage, Microsoft.Keyvault. Known values are: "Microsoft.Storage" and - "Microsoft.Keyvault". - :paramtype key_source: str or ~azure.mgmt.storage.models.KeySource - :keyword require_infrastructure_encryption: A boolean indicating whether or not the service - applies a secondary layer of encryption with platform managed keys for data at rest. - :paramtype require_infrastructure_encryption: bool - :keyword key_vault_properties: Properties provided by key vault. - :paramtype key_vault_properties: ~azure.mgmt.storage.models.KeyVaultProperties - :keyword encryption_identity: The identity to be used with service-side encryption at rest. - :paramtype encryption_identity: ~azure.mgmt.storage.models.EncryptionIdentity - """ - super().__init__(**kwargs) - self.services = services - self.key_source = key_source - self.require_infrastructure_encryption = require_infrastructure_encryption - self.key_vault_properties = key_vault_properties - self.encryption_identity = encryption_identity - - -class EncryptionIdentity(_serialization.Model): - """Encryption identity for the storage account. - - :ivar encryption_user_assigned_identity: Resource identifier of the UserAssigned identity to be - associated with server-side encryption on the storage account. - :vartype encryption_user_assigned_identity: str - :ivar encryption_federated_identity_client_id: ClientId of the multi-tenant application to be - used in conjunction with the user-assigned identity for cross-tenant customer-managed-keys - server-side encryption on the storage account. - :vartype encryption_federated_identity_client_id: str - """ - - _attribute_map = { - "encryption_user_assigned_identity": {"key": "userAssignedIdentity", "type": "str"}, - "encryption_federated_identity_client_id": {"key": "federatedIdentityClientId", "type": "str"}, - } - - def __init__( - self, - *, - encryption_user_assigned_identity: Optional[str] = None, - encryption_federated_identity_client_id: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword encryption_user_assigned_identity: Resource identifier of the UserAssigned identity to - be associated with server-side encryption on the storage account. - :paramtype encryption_user_assigned_identity: str - :keyword encryption_federated_identity_client_id: ClientId of the multi-tenant application to - be used in conjunction with the user-assigned identity for cross-tenant customer-managed-keys - server-side encryption on the storage account. - :paramtype encryption_federated_identity_client_id: str - """ - super().__init__(**kwargs) - self.encryption_user_assigned_identity = encryption_user_assigned_identity - self.encryption_federated_identity_client_id = encryption_federated_identity_client_id - - -class EncryptionInTransit(_serialization.Model): - """Encryption in transit setting. - - :ivar required: Indicates whether encryption in transit is required. - :vartype required: bool - """ - - _attribute_map = { - "required": {"key": "required", "type": "bool"}, - } - - def __init__(self, *, required: Optional[bool] = None, **kwargs: Any) -> None: - """ - :keyword required: Indicates whether encryption in transit is required. - :paramtype required: bool - """ - super().__init__(**kwargs) - self.required = required - - -class EncryptionScope(Resource): - """The Encryption Scope resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar source: The provider for the encryption scope. Possible values (case-insensitive): - Microsoft.Storage, Microsoft.KeyVault. Known values are: "Microsoft.Storage" and - "Microsoft.KeyVault". - :vartype source: str or ~azure.mgmt.storage.models.EncryptionScopeSource - :ivar state: The state of the encryption scope. Possible values (case-insensitive): Enabled, - Disabled. Known values are: "Enabled" and "Disabled". - :vartype state: str or ~azure.mgmt.storage.models.EncryptionScopeState - :ivar creation_time: Gets the creation date and time of the encryption scope in UTC. - :vartype creation_time: ~datetime.datetime - :ivar last_modified_time: Gets the last modification date and time of the encryption scope in - UTC. - :vartype last_modified_time: ~datetime.datetime - :ivar key_vault_properties: The key vault properties for the encryption scope. This is a - required field if encryption scope 'source' attribute is set to 'Microsoft.KeyVault'. - :vartype key_vault_properties: ~azure.mgmt.storage.models.EncryptionScopeKeyVaultProperties - :ivar require_infrastructure_encryption: A boolean indicating whether or not the service - applies a secondary layer of encryption with platform managed keys for data at rest. - :vartype require_infrastructure_encryption: bool - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "creation_time": {"readonly": True}, - "last_modified_time": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "source": {"key": "properties.source", "type": "str"}, - "state": {"key": "properties.state", "type": "str"}, - "creation_time": {"key": "properties.creationTime", "type": "iso-8601"}, - "last_modified_time": {"key": "properties.lastModifiedTime", "type": "iso-8601"}, - "key_vault_properties": {"key": "properties.keyVaultProperties", "type": "EncryptionScopeKeyVaultProperties"}, - "require_infrastructure_encryption": {"key": "properties.requireInfrastructureEncryption", "type": "bool"}, - } - - def __init__( - self, - *, - source: Optional[Union[str, "_models.EncryptionScopeSource"]] = None, - state: Optional[Union[str, "_models.EncryptionScopeState"]] = None, - key_vault_properties: Optional["_models.EncryptionScopeKeyVaultProperties"] = None, - require_infrastructure_encryption: Optional[bool] = None, - **kwargs: Any - ) -> None: - """ - :keyword source: The provider for the encryption scope. Possible values (case-insensitive): - Microsoft.Storage, Microsoft.KeyVault. Known values are: "Microsoft.Storage" and - "Microsoft.KeyVault". - :paramtype source: str or ~azure.mgmt.storage.models.EncryptionScopeSource - :keyword state: The state of the encryption scope. Possible values (case-insensitive): - Enabled, Disabled. Known values are: "Enabled" and "Disabled". - :paramtype state: str or ~azure.mgmt.storage.models.EncryptionScopeState - :keyword key_vault_properties: The key vault properties for the encryption scope. This is a - required field if encryption scope 'source' attribute is set to 'Microsoft.KeyVault'. - :paramtype key_vault_properties: ~azure.mgmt.storage.models.EncryptionScopeKeyVaultProperties - :keyword require_infrastructure_encryption: A boolean indicating whether or not the service - applies a secondary layer of encryption with platform managed keys for data at rest. - :paramtype require_infrastructure_encryption: bool - """ - super().__init__(**kwargs) - self.source = source - self.state = state - self.creation_time: Optional[datetime.datetime] = None - self.last_modified_time: Optional[datetime.datetime] = None - self.key_vault_properties = key_vault_properties - self.require_infrastructure_encryption = require_infrastructure_encryption - - -class EncryptionScopeKeyVaultProperties(_serialization.Model): - """The key vault properties for the encryption scope. This is a required field if encryption scope - 'source' attribute is set to 'Microsoft.KeyVault'. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar key_uri: The object identifier for a key vault key object. When applied, the encryption - scope will use the key referenced by the identifier to enable customer-managed key support on - this encryption scope. - :vartype key_uri: str - :ivar current_versioned_key_identifier: The object identifier of the current versioned Key - Vault Key in use. - :vartype current_versioned_key_identifier: str - :ivar last_key_rotation_timestamp: Timestamp of last rotation of the Key Vault Key. - :vartype last_key_rotation_timestamp: ~datetime.datetime - """ - - _validation = { - "current_versioned_key_identifier": {"readonly": True}, - "last_key_rotation_timestamp": {"readonly": True}, - } - - _attribute_map = { - "key_uri": {"key": "keyUri", "type": "str"}, - "current_versioned_key_identifier": {"key": "currentVersionedKeyIdentifier", "type": "str"}, - "last_key_rotation_timestamp": {"key": "lastKeyRotationTimestamp", "type": "iso-8601"}, - } - - def __init__(self, *, key_uri: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword key_uri: The object identifier for a key vault key object. When applied, the - encryption scope will use the key referenced by the identifier to enable customer-managed key - support on this encryption scope. - :paramtype key_uri: str - """ - super().__init__(**kwargs) - self.key_uri = key_uri - self.current_versioned_key_identifier: Optional[str] = None - self.last_key_rotation_timestamp: Optional[datetime.datetime] = None - - -class EncryptionScopeListResult(_serialization.Model): - """List of encryption scopes requested, and if paging is required, a URL to the next page of - encryption scopes. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: List of encryption scopes requested. - :vartype value: list[~azure.mgmt.storage.models.EncryptionScope] - :ivar next_link: Request URL that can be used to query next page of encryption scopes. Returned - when total number of requested encryption scopes exceeds the maximum page size. - :vartype next_link: str - """ - - _validation = { - "value": {"readonly": True}, - "next_link": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[EncryptionScope]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value: Optional[list["_models.EncryptionScope"]] = None - self.next_link: Optional[str] = None - - -class EncryptionService(_serialization.Model): - """A service that allows server-side encryption to be used. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar enabled: A boolean indicating whether or not the service encrypts the data as it is - stored. Encryption at rest is enabled by default today and cannot be disabled. - :vartype enabled: bool - :ivar last_enabled_time: Gets a rough estimate of the date/time when the encryption was last - enabled by the user. Data is encrypted at rest by default today and cannot be disabled. - :vartype last_enabled_time: ~datetime.datetime - :ivar key_type: Encryption key type to be used for the encryption service. 'Account' key type - implies that an account-scoped encryption key will be used. 'Service' key type implies that a - default service key is used. Known values are: "Service" and "Account". - :vartype key_type: str or ~azure.mgmt.storage.models.KeyType - """ - - _validation = { - "last_enabled_time": {"readonly": True}, - } - - _attribute_map = { - "enabled": {"key": "enabled", "type": "bool"}, - "last_enabled_time": {"key": "lastEnabledTime", "type": "iso-8601"}, - "key_type": {"key": "keyType", "type": "str"}, - } - - def __init__( - self, *, enabled: Optional[bool] = None, key_type: Optional[Union[str, "_models.KeyType"]] = None, **kwargs: Any - ) -> None: - """ - :keyword enabled: A boolean indicating whether or not the service encrypts the data as it is - stored. Encryption at rest is enabled by default today and cannot be disabled. - :paramtype enabled: bool - :keyword key_type: Encryption key type to be used for the encryption service. 'Account' key - type implies that an account-scoped encryption key will be used. 'Service' key type implies - that a default service key is used. Known values are: "Service" and "Account". - :paramtype key_type: str or ~azure.mgmt.storage.models.KeyType - """ - super().__init__(**kwargs) - self.enabled = enabled - self.last_enabled_time: Optional[datetime.datetime] = None - self.key_type = key_type - - -class EncryptionServices(_serialization.Model): - """A list of services that support encryption. - - :ivar blob: The encryption function of the blob storage service. - :vartype blob: ~azure.mgmt.storage.models.EncryptionService - :ivar file: The encryption function of the file storage service. - :vartype file: ~azure.mgmt.storage.models.EncryptionService - :ivar table: The encryption function of the table storage service. - :vartype table: ~azure.mgmt.storage.models.EncryptionService - :ivar queue: The encryption function of the queue storage service. - :vartype queue: ~azure.mgmt.storage.models.EncryptionService - """ - - _attribute_map = { - "blob": {"key": "blob", "type": "EncryptionService"}, - "file": {"key": "file", "type": "EncryptionService"}, - "table": {"key": "table", "type": "EncryptionService"}, - "queue": {"key": "queue", "type": "EncryptionService"}, - } - - def __init__( - self, - *, - blob: Optional["_models.EncryptionService"] = None, - file: Optional["_models.EncryptionService"] = None, - table: Optional["_models.EncryptionService"] = None, - queue: Optional["_models.EncryptionService"] = None, - **kwargs: Any - ) -> None: - """ - :keyword blob: The encryption function of the blob storage service. - :paramtype blob: ~azure.mgmt.storage.models.EncryptionService - :keyword file: The encryption function of the file storage service. - :paramtype file: ~azure.mgmt.storage.models.EncryptionService - :keyword table: The encryption function of the table storage service. - :paramtype table: ~azure.mgmt.storage.models.EncryptionService - :keyword queue: The encryption function of the queue storage service. - :paramtype queue: ~azure.mgmt.storage.models.EncryptionService - """ - super().__init__(**kwargs) - self.blob = blob - self.file = file - self.table = table - self.queue = queue - - -class Endpoints(_serialization.Model): - """The URIs that are used to perform a retrieval of a public blob, queue, table, web or dfs - object. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar blob: Gets the blob endpoint. - :vartype blob: str - :ivar queue: Gets the queue endpoint. - :vartype queue: str - :ivar table: Gets the table endpoint. - :vartype table: str - :ivar file: Gets the file endpoint. - :vartype file: str - :ivar web: Gets the web endpoint. - :vartype web: str - :ivar dfs: Gets the dfs endpoint. - :vartype dfs: str - :ivar microsoft_endpoints: Gets the microsoft routing storage endpoints. - :vartype microsoft_endpoints: ~azure.mgmt.storage.models.StorageAccountMicrosoftEndpoints - :ivar internet_endpoints: Gets the internet routing storage endpoints. - :vartype internet_endpoints: ~azure.mgmt.storage.models.StorageAccountInternetEndpoints - :ivar ipv6_endpoints: Gets the IPv6 storage endpoints. - :vartype ipv6_endpoints: ~azure.mgmt.storage.models.StorageAccountIpv6Endpoints - """ - - _validation = { - "blob": {"readonly": True}, - "queue": {"readonly": True}, - "table": {"readonly": True}, - "file": {"readonly": True}, - "web": {"readonly": True}, - "dfs": {"readonly": True}, - } - - _attribute_map = { - "blob": {"key": "blob", "type": "str"}, - "queue": {"key": "queue", "type": "str"}, - "table": {"key": "table", "type": "str"}, - "file": {"key": "file", "type": "str"}, - "web": {"key": "web", "type": "str"}, - "dfs": {"key": "dfs", "type": "str"}, - "microsoft_endpoints": {"key": "microsoftEndpoints", "type": "StorageAccountMicrosoftEndpoints"}, - "internet_endpoints": {"key": "internetEndpoints", "type": "StorageAccountInternetEndpoints"}, - "ipv6_endpoints": {"key": "ipv6Endpoints", "type": "StorageAccountIpv6Endpoints"}, - } - - def __init__( - self, - *, - microsoft_endpoints: Optional["_models.StorageAccountMicrosoftEndpoints"] = None, - internet_endpoints: Optional["_models.StorageAccountInternetEndpoints"] = None, - ipv6_endpoints: Optional["_models.StorageAccountIpv6Endpoints"] = None, - **kwargs: Any - ) -> None: - """ - :keyword microsoft_endpoints: Gets the microsoft routing storage endpoints. - :paramtype microsoft_endpoints: ~azure.mgmt.storage.models.StorageAccountMicrosoftEndpoints - :keyword internet_endpoints: Gets the internet routing storage endpoints. - :paramtype internet_endpoints: ~azure.mgmt.storage.models.StorageAccountInternetEndpoints - :keyword ipv6_endpoints: Gets the IPv6 storage endpoints. - :paramtype ipv6_endpoints: ~azure.mgmt.storage.models.StorageAccountIpv6Endpoints - """ - super().__init__(**kwargs) - self.blob: Optional[str] = None - self.queue: Optional[str] = None - self.table: Optional[str] = None - self.file: Optional[str] = None - self.web: Optional[str] = None - self.dfs: Optional[str] = None - self.microsoft_endpoints = microsoft_endpoints - self.internet_endpoints = internet_endpoints - self.ipv6_endpoints = ipv6_endpoints - - -class ErrorAdditionalInfo(_serialization.Model): - """The resource management error additional info. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar type: The additional info type. - :vartype type: str - :ivar info: The additional info. - :vartype info: JSON - """ - - _validation = { - "type": {"readonly": True}, - "info": {"readonly": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "info": {"key": "info", "type": "object"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.type: Optional[str] = None - self.info: Optional[JSON] = None - - -class ErrorDetail(_serialization.Model): - """The error detail. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar code: The error code. - :vartype code: str - :ivar message: The error message. - :vartype message: str - :ivar target: The error target. - :vartype target: str - :ivar details: The error details. - :vartype details: list[~azure.mgmt.storage.models.ErrorDetail] - :ivar additional_info: The error additional info. - :vartype additional_info: list[~azure.mgmt.storage.models.ErrorAdditionalInfo] - """ - - _validation = { - "code": {"readonly": True}, - "message": {"readonly": True}, - "target": {"readonly": True}, - "details": {"readonly": True}, - "additional_info": {"readonly": True}, - } - - _attribute_map = { - "code": {"key": "code", "type": "str"}, - "message": {"key": "message", "type": "str"}, - "target": {"key": "target", "type": "str"}, - "details": {"key": "details", "type": "[ErrorDetail]"}, - "additional_info": {"key": "additionalInfo", "type": "[ErrorAdditionalInfo]"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.code: Optional[str] = None - self.message: Optional[str] = None - self.target: Optional[str] = None - self.details: Optional[list["_models.ErrorDetail"]] = None - self.additional_info: Optional[list["_models.ErrorAdditionalInfo"]] = None - - -class ErrorResponse(_serialization.Model): - """An error response from the storage resource provider. - - :ivar error: Azure Storage Resource Provider error response body. - :vartype error: ~azure.mgmt.storage.models.ErrorResponseBody - """ - - _attribute_map = { - "error": {"key": "error", "type": "ErrorResponseBody"}, - } - - def __init__(self, *, error: Optional["_models.ErrorResponseBody"] = None, **kwargs: Any) -> None: - """ - :keyword error: Azure Storage Resource Provider error response body. - :paramtype error: ~azure.mgmt.storage.models.ErrorResponseBody - """ - super().__init__(**kwargs) - self.error = error - - -class ErrorResponseAutoGenerated(_serialization.Model): - """Common error response for all Azure Resource Manager APIs to return error details for failed - operations. (This also follows the OData error response format.). - - :ivar error: The error object. - :vartype error: ~azure.mgmt.storage.models.ErrorDetail - """ - - _attribute_map = { - "error": {"key": "error", "type": "ErrorDetail"}, - } - - def __init__(self, *, error: Optional["_models.ErrorDetail"] = None, **kwargs: Any) -> None: - """ - :keyword error: The error object. - :paramtype error: ~azure.mgmt.storage.models.ErrorDetail - """ - super().__init__(**kwargs) - self.error = error - - -class ErrorResponseBody(_serialization.Model): - """Error response body contract. - - :ivar code: An identifier for the error. Codes are invariant and are intended to be consumed - programmatically. - :vartype code: str - :ivar message: A message describing the error, intended to be suitable for display in a user - interface. - :vartype message: str - """ - - _attribute_map = { - "code": {"key": "code", "type": "str"}, - "message": {"key": "message", "type": "str"}, - } - - def __init__(self, *, code: Optional[str] = None, message: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword code: An identifier for the error. Codes are invariant and are intended to be consumed - programmatically. - :paramtype code: str - :keyword message: A message describing the error, intended to be suitable for display in a user - interface. - :paramtype message: str - """ - super().__init__(**kwargs) - self.code = code - self.message = message - - -class ExecutionTarget(_serialization.Model): - """Target helps provide filter parameters for the objects in the storage account and forms the - execution context for the storage task. - - :ivar prefix: Required list of object prefixes to be included for task execution. - :vartype prefix: list[str] - :ivar exclude_prefix: List of object prefixes to be excluded from task execution. If there is a - conflict between include and exclude prefixes, the exclude prefix will be the determining - factor. - :vartype exclude_prefix: list[str] - """ - - _attribute_map = { - "prefix": {"key": "prefix", "type": "[str]"}, - "exclude_prefix": {"key": "excludePrefix", "type": "[str]"}, - } - - def __init__( - self, *, prefix: Optional[list[str]] = None, exclude_prefix: Optional[list[str]] = None, **kwargs: Any - ) -> None: - """ - :keyword prefix: Required list of object prefixes to be included for task execution. - :paramtype prefix: list[str] - :keyword exclude_prefix: List of object prefixes to be excluded from task execution. If there - is a conflict between include and exclude prefixes, the exclude prefix will be the determining - factor. - :paramtype exclude_prefix: list[str] - """ - super().__init__(**kwargs) - self.prefix = prefix - self.exclude_prefix = exclude_prefix - - -class ExecutionTrigger(_serialization.Model): - """Execution trigger for storage task assignment. - - All required parameters must be populated in order to send to server. - - :ivar type: The trigger type of the storage task assignment execution. Required. Known values - are: "RunOnce" and "OnSchedule". - :vartype type: str or ~azure.mgmt.storage.models.TriggerType - :ivar parameters: The trigger parameters of the storage task assignment execution. Required. - :vartype parameters: ~azure.mgmt.storage.models.TriggerParameters - """ - - _validation = { - "type": {"required": True}, - "parameters": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "parameters": {"key": "parameters", "type": "TriggerParameters"}, - } - - def __init__( - self, *, type: Union[str, "_models.TriggerType"], parameters: "_models.TriggerParameters", **kwargs: Any - ) -> None: - """ - :keyword type: The trigger type of the storage task assignment execution. Required. Known - values are: "RunOnce" and "OnSchedule". - :paramtype type: str or ~azure.mgmt.storage.models.TriggerType - :keyword parameters: The trigger parameters of the storage task assignment execution. Required. - :paramtype parameters: ~azure.mgmt.storage.models.TriggerParameters - """ - super().__init__(**kwargs) - self.type = type - self.parameters = parameters - - -class ExecutionTriggerUpdate(_serialization.Model): - """Execution trigger update for storage task assignment. - - :ivar type: The trigger type of the storage task assignment execution. Known values are: - "RunOnce" and "OnSchedule". - :vartype type: str or ~azure.mgmt.storage.models.TriggerType - :ivar parameters: The trigger parameters of the storage task assignment execution. - :vartype parameters: ~azure.mgmt.storage.models.TriggerParametersUpdate - """ - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "parameters": {"key": "parameters", "type": "TriggerParametersUpdate"}, - } - - def __init__( - self, - *, - type: Optional[Union[str, "_models.TriggerType"]] = None, - parameters: Optional["_models.TriggerParametersUpdate"] = None, - **kwargs: Any - ) -> None: - """ - :keyword type: The trigger type of the storage task assignment execution. Known values are: - "RunOnce" and "OnSchedule". - :paramtype type: str or ~azure.mgmt.storage.models.TriggerType - :keyword parameters: The trigger parameters of the storage task assignment execution. - :paramtype parameters: ~azure.mgmt.storage.models.TriggerParametersUpdate - """ - super().__init__(**kwargs) - self.type = type - self.parameters = parameters - - -class ExtendedLocation(_serialization.Model): - """The complex type of the extended location. - - :ivar name: The name of the extended location. - :vartype name: str - :ivar type: The type of the extended location. "EdgeZone" - :vartype type: str or ~azure.mgmt.storage.models.ExtendedLocationTypes - """ - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - type: Optional[Union[str, "_models.ExtendedLocationTypes"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword name: The name of the extended location. - :paramtype name: str - :keyword type: The type of the extended location. "EdgeZone" - :paramtype type: str or ~azure.mgmt.storage.models.ExtendedLocationTypes - """ - super().__init__(**kwargs) - self.name = name - self.type = type - - -class FileServiceItems(_serialization.Model): - """FileServiceItems. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: List of file services returned. - :vartype value: list[~azure.mgmt.storage.models.FileServiceProperties] - """ - - _validation = { - "value": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[FileServiceProperties]"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value: Optional[list["_models.FileServiceProperties"]] = None - - -class FileServiceProperties(Resource): - """The properties of File services in storage account. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar sku: Sku name and tier. - :vartype sku: ~azure.mgmt.storage.models.Sku - :ivar cors: Specifies CORS rules for the File service. You can include up to five CorsRule - elements in the request. If no CorsRule elements are included in the request body, all CORS - rules will be deleted, and CORS will be disabled for the File service. - :vartype cors: ~azure.mgmt.storage.models.CorsRules - :ivar share_delete_retention_policy: The file service properties for share soft delete. - :vartype share_delete_retention_policy: ~azure.mgmt.storage.models.DeleteRetentionPolicy - :ivar protocol_settings: Protocol settings for file service. - :vartype protocol_settings: ~azure.mgmt.storage.models.ProtocolSettings - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "sku": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "sku": {"key": "sku", "type": "Sku"}, - "cors": {"key": "properties.cors", "type": "CorsRules"}, - "share_delete_retention_policy": { - "key": "properties.shareDeleteRetentionPolicy", - "type": "DeleteRetentionPolicy", - }, - "protocol_settings": {"key": "properties.protocolSettings", "type": "ProtocolSettings"}, - } - - def __init__( - self, - *, - cors: Optional["_models.CorsRules"] = None, - share_delete_retention_policy: Optional["_models.DeleteRetentionPolicy"] = None, - protocol_settings: Optional["_models.ProtocolSettings"] = None, - **kwargs: Any - ) -> None: - """ - :keyword cors: Specifies CORS rules for the File service. You can include up to five CorsRule - elements in the request. If no CorsRule elements are included in the request body, all CORS - rules will be deleted, and CORS will be disabled for the File service. - :paramtype cors: ~azure.mgmt.storage.models.CorsRules - :keyword share_delete_retention_policy: The file service properties for share soft delete. - :paramtype share_delete_retention_policy: ~azure.mgmt.storage.models.DeleteRetentionPolicy - :keyword protocol_settings: Protocol settings for file service. - :paramtype protocol_settings: ~azure.mgmt.storage.models.ProtocolSettings - """ - super().__init__(**kwargs) - self.sku: Optional["_models.Sku"] = None - self.cors = cors - self.share_delete_retention_policy = share_delete_retention_policy - self.protocol_settings = protocol_settings - - -class FileServiceUsage(Resource): - """The usage of file service in storage account. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar properties: File service usage in storage account including account limits, file share - limits and constants used in recommendations and bursting formula. - :vartype properties: ~azure.mgmt.storage.models.FileServiceUsageProperties - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "properties": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "properties": {"key": "properties", "type": "FileServiceUsageProperties"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.properties: Optional["_models.FileServiceUsageProperties"] = None - - -class FileServiceUsageProperties(_serialization.Model): - """File service usage in storage account including account limits, file share limits and constants - used in recommendations and bursting formula. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar storage_account_limits: Maximum provisioned storage, IOPS, bandwidth and number of file - shares limits for the storage account. - :vartype storage_account_limits: ~azure.mgmt.storage.models.AccountLimits - :ivar file_share_limits: Minimum and maximum provisioned storage, IOPS and bandwidth limits for - a file share in the storage account. - :vartype file_share_limits: ~azure.mgmt.storage.models.FileShareLimits - :ivar file_share_recommendations: Constants used for calculating recommended provisioned IOPS - and bandwidth for a file share in the storage account. - :vartype file_share_recommendations: ~azure.mgmt.storage.models.FileShareRecommendations - :ivar bursting_constants: Constants used for calculating included burst IOPS and maximum burst - credits for IOPS for a file share in the storage account. - :vartype bursting_constants: ~azure.mgmt.storage.models.BurstingConstants - :ivar storage_account_usage: Usage of provisioned storage, IOPS, bandwidth and number of file - shares across all live shares and soft-deleted shares in the account. - :vartype storage_account_usage: ~azure.mgmt.storage.models.AccountUsage - """ - - _validation = { - "storage_account_limits": {"readonly": True}, - "file_share_limits": {"readonly": True}, - "file_share_recommendations": {"readonly": True}, - "bursting_constants": {"readonly": True}, - "storage_account_usage": {"readonly": True}, - } - - _attribute_map = { - "storage_account_limits": {"key": "storageAccountLimits", "type": "AccountLimits"}, - "file_share_limits": {"key": "fileShareLimits", "type": "FileShareLimits"}, - "file_share_recommendations": {"key": "fileShareRecommendations", "type": "FileShareRecommendations"}, - "bursting_constants": {"key": "burstingConstants", "type": "BurstingConstants"}, - "storage_account_usage": {"key": "storageAccountUsage", "type": "AccountUsage"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.storage_account_limits: Optional["_models.AccountLimits"] = None - self.file_share_limits: Optional["_models.FileShareLimits"] = None - self.file_share_recommendations: Optional["_models.FileShareRecommendations"] = None - self.bursting_constants: Optional["_models.BurstingConstants"] = None - self.storage_account_usage: Optional["_models.AccountUsage"] = None - - -class FileServiceUsages(_serialization.Model): - """List file service usages schema. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: List of file service usages returned. - :vartype value: list[~azure.mgmt.storage.models.FileServiceUsage] - :ivar next_link: Request URL that can be used to query next page of file service usages. - Returned when total number of requested file service usages exceed maximum page size. - :vartype next_link: str - """ - - _validation = { - "value": {"readonly": True}, - "next_link": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[FileServiceUsage]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value: Optional[list["_models.FileServiceUsage"]] = None - self.next_link: Optional[str] = None - - -class FileShare(AzureEntityResource): - """Properties of the file share, including Id, resource name, resource type, Etag. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar etag: Resource Etag. - :vartype etag: str - :ivar last_modified_time: Returns the date and time the share was last modified. - :vartype last_modified_time: ~datetime.datetime - :ivar metadata: A name-value pair to associate with the share as metadata. - :vartype metadata: dict[str, str] - :ivar share_quota: The provisioned size of the share, in gibibytes. Must be greater than 0, and - less than or equal to 5TB (5120). For Large File Shares, the maximum size is 102400. For file - shares created under Files Provisioned v2 account type, please refer to the GetFileServiceUsage - API response for the minimum and maximum allowed provisioned storage size. - :vartype share_quota: int - :ivar provisioned_iops: The provisioned IOPS of the share. This property is only for file - shares created under Files Provisioned v2 account type. Please refer to the GetFileServiceUsage - API response for the minimum and maximum allowed value for provisioned IOPS. - :vartype provisioned_iops: int - :ivar provisioned_bandwidth_mibps: The provisioned bandwidth of the share, in mebibytes per - second. This property is only for file shares created under Files Provisioned v2 account type. - Please refer to the GetFileServiceUsage API response for the minimum and maximum allowed value - for provisioned bandwidth. - :vartype provisioned_bandwidth_mibps: int - :ivar included_burst_iops: The calculated burst IOPS of the share. This property is only for - file shares created under Files Provisioned v2 account type. - :vartype included_burst_iops: int - :ivar max_burst_credits_for_iops: The calculated maximum burst credits for the share. This - property is only for file shares created under Files Provisioned v2 account type. - :vartype max_burst_credits_for_iops: int - :ivar next_allowed_quota_downgrade_time: Returns the next allowed provisioned storage size - downgrade time for the share. This property is only for file shares created under Files - Provisioned v1 SSD and Files Provisioned v2 account type. - :vartype next_allowed_quota_downgrade_time: ~datetime.datetime - :ivar next_allowed_provisioned_iops_downgrade_time: Returns the next allowed provisioned IOPS - downgrade time for the share. This property is only for file shares created under Files - Provisioned v2 account type. - :vartype next_allowed_provisioned_iops_downgrade_time: ~datetime.datetime - :ivar next_allowed_provisioned_bandwidth_downgrade_time: Returns the next allowed provisioned - bandwidth downgrade time for the share. This property is only for file shares created under - Files Provisioned v2 account type. - :vartype next_allowed_provisioned_bandwidth_downgrade_time: ~datetime.datetime - :ivar enabled_protocols: The authentication protocol that is used for the file share. Can only - be specified when creating a share. Known values are: "SMB" and "NFS". - :vartype enabled_protocols: str or ~azure.mgmt.storage.models.EnabledProtocols - :ivar root_squash: The property is for NFS share only. The default is NoRootSquash. Known - values are: "NoRootSquash", "RootSquash", and "AllSquash". - :vartype root_squash: str or ~azure.mgmt.storage.models.RootSquashType - :ivar version: The version of the share. - :vartype version: str - :ivar deleted: Indicates whether the share was deleted. - :vartype deleted: bool - :ivar deleted_time: The deleted time if the share was deleted. - :vartype deleted_time: ~datetime.datetime - :ivar remaining_retention_days: Remaining retention days for share that was soft deleted. - :vartype remaining_retention_days: int - :ivar access_tier: Access tier for specific share. GpV2 account can choose between - TransactionOptimized (default), Hot, and Cool. FileStorage account can choose Premium. Known - values are: "TransactionOptimized", "Hot", "Cool", and "Premium". - :vartype access_tier: str or ~azure.mgmt.storage.models.ShareAccessTier - :ivar access_tier_change_time: Indicates the last modification time for share access tier. - :vartype access_tier_change_time: ~datetime.datetime - :ivar access_tier_status: Indicates if there is a pending transition for access tier. - :vartype access_tier_status: str - :ivar share_usage_bytes: The approximate size of the data stored on the share. Note that this - value may not include all recently created or recently resized files. - :vartype share_usage_bytes: int - :ivar lease_status: The lease status of the share. Known values are: "Locked" and "Unlocked". - :vartype lease_status: str or ~azure.mgmt.storage.models.LeaseStatus - :ivar lease_state: Lease state of the share. Known values are: "Available", "Leased", - "Expired", "Breaking", and "Broken". - :vartype lease_state: str or ~azure.mgmt.storage.models.LeaseState - :ivar lease_duration: Specifies whether the lease on a share is of infinite or fixed duration, - only when the share is leased. Known values are: "Infinite" and "Fixed". - :vartype lease_duration: str or ~azure.mgmt.storage.models.LeaseDuration - :ivar signed_identifiers: List of stored access policies specified on the share. - :vartype signed_identifiers: list[~azure.mgmt.storage.models.SignedIdentifier] - :ivar snapshot_time: Creation time of share snapshot returned in the response of list shares - with expand param "snapshots". - :vartype snapshot_time: ~datetime.datetime - :ivar file_share_paid_bursting: File Share Paid Bursting properties. - :vartype file_share_paid_bursting: - ~azure.mgmt.storage.models.FileSharePropertiesFileSharePaidBursting - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "etag": {"readonly": True}, - "last_modified_time": {"readonly": True}, - "included_burst_iops": {"readonly": True}, - "max_burst_credits_for_iops": {"readonly": True}, - "next_allowed_quota_downgrade_time": {"readonly": True}, - "next_allowed_provisioned_iops_downgrade_time": {"readonly": True}, - "next_allowed_provisioned_bandwidth_downgrade_time": {"readonly": True}, - "version": {"readonly": True}, - "deleted": {"readonly": True}, - "deleted_time": {"readonly": True}, - "remaining_retention_days": {"readonly": True}, - "access_tier_change_time": {"readonly": True}, - "access_tier_status": {"readonly": True}, - "share_usage_bytes": {"readonly": True}, - "lease_status": {"readonly": True}, - "lease_state": {"readonly": True}, - "lease_duration": {"readonly": True}, - "snapshot_time": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "etag": {"key": "etag", "type": "str"}, - "last_modified_time": {"key": "properties.lastModifiedTime", "type": "iso-8601"}, - "metadata": {"key": "properties.metadata", "type": "{str}"}, - "share_quota": {"key": "properties.shareQuota", "type": "int"}, - "provisioned_iops": {"key": "properties.provisionedIops", "type": "int"}, - "provisioned_bandwidth_mibps": {"key": "properties.provisionedBandwidthMibps", "type": "int"}, - "included_burst_iops": {"key": "properties.includedBurstIops", "type": "int"}, - "max_burst_credits_for_iops": {"key": "properties.maxBurstCreditsForIops", "type": "int"}, - "next_allowed_quota_downgrade_time": {"key": "properties.nextAllowedQuotaDowngradeTime", "type": "rfc-1123"}, - "next_allowed_provisioned_iops_downgrade_time": { - "key": "properties.nextAllowedProvisionedIopsDowngradeTime", - "type": "rfc-1123", - }, - "next_allowed_provisioned_bandwidth_downgrade_time": { - "key": "properties.nextAllowedProvisionedBandwidthDowngradeTime", - "type": "rfc-1123", - }, - "enabled_protocols": {"key": "properties.enabledProtocols", "type": "str"}, - "root_squash": {"key": "properties.rootSquash", "type": "str"}, - "version": {"key": "properties.version", "type": "str"}, - "deleted": {"key": "properties.deleted", "type": "bool"}, - "deleted_time": {"key": "properties.deletedTime", "type": "iso-8601"}, - "remaining_retention_days": {"key": "properties.remainingRetentionDays", "type": "int"}, - "access_tier": {"key": "properties.accessTier", "type": "str"}, - "access_tier_change_time": {"key": "properties.accessTierChangeTime", "type": "iso-8601"}, - "access_tier_status": {"key": "properties.accessTierStatus", "type": "str"}, - "share_usage_bytes": {"key": "properties.shareUsageBytes", "type": "int"}, - "lease_status": {"key": "properties.leaseStatus", "type": "str"}, - "lease_state": {"key": "properties.leaseState", "type": "str"}, - "lease_duration": {"key": "properties.leaseDuration", "type": "str"}, - "signed_identifiers": {"key": "properties.signedIdentifiers", "type": "[SignedIdentifier]"}, - "snapshot_time": {"key": "properties.snapshotTime", "type": "iso-8601"}, - "file_share_paid_bursting": { - "key": "properties.fileSharePaidBursting", - "type": "FileSharePropertiesFileSharePaidBursting", - }, - } - - def __init__( # pylint: disable=too-many-locals - self, - *, - metadata: Optional[dict[str, str]] = None, - share_quota: Optional[int] = None, - provisioned_iops: Optional[int] = None, - provisioned_bandwidth_mibps: Optional[int] = None, - enabled_protocols: Optional[Union[str, "_models.EnabledProtocols"]] = None, - root_squash: Optional[Union[str, "_models.RootSquashType"]] = None, - access_tier: Optional[Union[str, "_models.ShareAccessTier"]] = None, - signed_identifiers: Optional[list["_models.SignedIdentifier"]] = None, - file_share_paid_bursting: Optional["_models.FileSharePropertiesFileSharePaidBursting"] = None, - **kwargs: Any - ) -> None: - """ - :keyword metadata: A name-value pair to associate with the share as metadata. - :paramtype metadata: dict[str, str] - :keyword share_quota: The provisioned size of the share, in gibibytes. Must be greater than 0, - and less than or equal to 5TB (5120). For Large File Shares, the maximum size is 102400. For - file shares created under Files Provisioned v2 account type, please refer to the - GetFileServiceUsage API response for the minimum and maximum allowed provisioned storage size. - :paramtype share_quota: int - :keyword provisioned_iops: The provisioned IOPS of the share. This property is only for file - shares created under Files Provisioned v2 account type. Please refer to the GetFileServiceUsage - API response for the minimum and maximum allowed value for provisioned IOPS. - :paramtype provisioned_iops: int - :keyword provisioned_bandwidth_mibps: The provisioned bandwidth of the share, in mebibytes per - second. This property is only for file shares created under Files Provisioned v2 account type. - Please refer to the GetFileServiceUsage API response for the minimum and maximum allowed value - for provisioned bandwidth. - :paramtype provisioned_bandwidth_mibps: int - :keyword enabled_protocols: The authentication protocol that is used for the file share. Can - only be specified when creating a share. Known values are: "SMB" and "NFS". - :paramtype enabled_protocols: str or ~azure.mgmt.storage.models.EnabledProtocols - :keyword root_squash: The property is for NFS share only. The default is NoRootSquash. Known - values are: "NoRootSquash", "RootSquash", and "AllSquash". - :paramtype root_squash: str or ~azure.mgmt.storage.models.RootSquashType - :keyword access_tier: Access tier for specific share. GpV2 account can choose between - TransactionOptimized (default), Hot, and Cool. FileStorage account can choose Premium. Known - values are: "TransactionOptimized", "Hot", "Cool", and "Premium". - :paramtype access_tier: str or ~azure.mgmt.storage.models.ShareAccessTier - :keyword signed_identifiers: List of stored access policies specified on the share. - :paramtype signed_identifiers: list[~azure.mgmt.storage.models.SignedIdentifier] - :keyword file_share_paid_bursting: File Share Paid Bursting properties. - :paramtype file_share_paid_bursting: - ~azure.mgmt.storage.models.FileSharePropertiesFileSharePaidBursting - """ - super().__init__(**kwargs) - self.last_modified_time: Optional[datetime.datetime] = None - self.metadata = metadata - self.share_quota = share_quota - self.provisioned_iops = provisioned_iops - self.provisioned_bandwidth_mibps = provisioned_bandwidth_mibps - self.included_burst_iops: Optional[int] = None - self.max_burst_credits_for_iops: Optional[int] = None - self.next_allowed_quota_downgrade_time: Optional[datetime.datetime] = None - self.next_allowed_provisioned_iops_downgrade_time: Optional[datetime.datetime] = None - self.next_allowed_provisioned_bandwidth_downgrade_time: Optional[datetime.datetime] = None - self.enabled_protocols = enabled_protocols - self.root_squash = root_squash - self.version: Optional[str] = None - self.deleted: Optional[bool] = None - self.deleted_time: Optional[datetime.datetime] = None - self.remaining_retention_days: Optional[int] = None - self.access_tier = access_tier - self.access_tier_change_time: Optional[datetime.datetime] = None - self.access_tier_status: Optional[str] = None - self.share_usage_bytes: Optional[int] = None - self.lease_status: Optional[Union[str, "_models.LeaseStatus"]] = None - self.lease_state: Optional[Union[str, "_models.LeaseState"]] = None - self.lease_duration: Optional[Union[str, "_models.LeaseDuration"]] = None - self.signed_identifiers = signed_identifiers - self.snapshot_time: Optional[datetime.datetime] = None - self.file_share_paid_bursting = file_share_paid_bursting - - -class FileShareItem(AzureEntityResource): - """The file share properties be listed out. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar etag: Resource Etag. - :vartype etag: str - :ivar last_modified_time: Returns the date and time the share was last modified. - :vartype last_modified_time: ~datetime.datetime - :ivar metadata: A name-value pair to associate with the share as metadata. - :vartype metadata: dict[str, str] - :ivar share_quota: The provisioned size of the share, in gibibytes. Must be greater than 0, and - less than or equal to 5TB (5120). For Large File Shares, the maximum size is 102400. For file - shares created under Files Provisioned v2 account type, please refer to the GetFileServiceUsage - API response for the minimum and maximum allowed provisioned storage size. - :vartype share_quota: int - :ivar provisioned_iops: The provisioned IOPS of the share. This property is only for file - shares created under Files Provisioned v2 account type. Please refer to the GetFileServiceUsage - API response for the minimum and maximum allowed value for provisioned IOPS. - :vartype provisioned_iops: int - :ivar provisioned_bandwidth_mibps: The provisioned bandwidth of the share, in mebibytes per - second. This property is only for file shares created under Files Provisioned v2 account type. - Please refer to the GetFileServiceUsage API response for the minimum and maximum allowed value - for provisioned bandwidth. - :vartype provisioned_bandwidth_mibps: int - :ivar included_burst_iops: The calculated burst IOPS of the share. This property is only for - file shares created under Files Provisioned v2 account type. - :vartype included_burst_iops: int - :ivar max_burst_credits_for_iops: The calculated maximum burst credits for the share. This - property is only for file shares created under Files Provisioned v2 account type. - :vartype max_burst_credits_for_iops: int - :ivar next_allowed_quota_downgrade_time: Returns the next allowed provisioned storage size - downgrade time for the share. This property is only for file shares created under Files - Provisioned v1 SSD and Files Provisioned v2 account type. - :vartype next_allowed_quota_downgrade_time: ~datetime.datetime - :ivar next_allowed_provisioned_iops_downgrade_time: Returns the next allowed provisioned IOPS - downgrade time for the share. This property is only for file shares created under Files - Provisioned v2 account type. - :vartype next_allowed_provisioned_iops_downgrade_time: ~datetime.datetime - :ivar next_allowed_provisioned_bandwidth_downgrade_time: Returns the next allowed provisioned - bandwidth downgrade time for the share. This property is only for file shares created under - Files Provisioned v2 account type. - :vartype next_allowed_provisioned_bandwidth_downgrade_time: ~datetime.datetime - :ivar enabled_protocols: The authentication protocol that is used for the file share. Can only - be specified when creating a share. Known values are: "SMB" and "NFS". - :vartype enabled_protocols: str or ~azure.mgmt.storage.models.EnabledProtocols - :ivar root_squash: The property is for NFS share only. The default is NoRootSquash. Known - values are: "NoRootSquash", "RootSquash", and "AllSquash". - :vartype root_squash: str or ~azure.mgmt.storage.models.RootSquashType - :ivar version: The version of the share. - :vartype version: str - :ivar deleted: Indicates whether the share was deleted. - :vartype deleted: bool - :ivar deleted_time: The deleted time if the share was deleted. - :vartype deleted_time: ~datetime.datetime - :ivar remaining_retention_days: Remaining retention days for share that was soft deleted. - :vartype remaining_retention_days: int - :ivar access_tier: Access tier for specific share. GpV2 account can choose between - TransactionOptimized (default), Hot, and Cool. FileStorage account can choose Premium. Known - values are: "TransactionOptimized", "Hot", "Cool", and "Premium". - :vartype access_tier: str or ~azure.mgmt.storage.models.ShareAccessTier - :ivar access_tier_change_time: Indicates the last modification time for share access tier. - :vartype access_tier_change_time: ~datetime.datetime - :ivar access_tier_status: Indicates if there is a pending transition for access tier. - :vartype access_tier_status: str - :ivar share_usage_bytes: The approximate size of the data stored on the share. Note that this - value may not include all recently created or recently resized files. - :vartype share_usage_bytes: int - :ivar lease_status: The lease status of the share. Known values are: "Locked" and "Unlocked". - :vartype lease_status: str or ~azure.mgmt.storage.models.LeaseStatus - :ivar lease_state: Lease state of the share. Known values are: "Available", "Leased", - "Expired", "Breaking", and "Broken". - :vartype lease_state: str or ~azure.mgmt.storage.models.LeaseState - :ivar lease_duration: Specifies whether the lease on a share is of infinite or fixed duration, - only when the share is leased. Known values are: "Infinite" and "Fixed". - :vartype lease_duration: str or ~azure.mgmt.storage.models.LeaseDuration - :ivar signed_identifiers: List of stored access policies specified on the share. - :vartype signed_identifiers: list[~azure.mgmt.storage.models.SignedIdentifier] - :ivar snapshot_time: Creation time of share snapshot returned in the response of list shares - with expand param "snapshots". - :vartype snapshot_time: ~datetime.datetime - :ivar file_share_paid_bursting: File Share Paid Bursting properties. - :vartype file_share_paid_bursting: - ~azure.mgmt.storage.models.FileSharePropertiesFileSharePaidBursting - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "etag": {"readonly": True}, - "last_modified_time": {"readonly": True}, - "included_burst_iops": {"readonly": True}, - "max_burst_credits_for_iops": {"readonly": True}, - "next_allowed_quota_downgrade_time": {"readonly": True}, - "next_allowed_provisioned_iops_downgrade_time": {"readonly": True}, - "next_allowed_provisioned_bandwidth_downgrade_time": {"readonly": True}, - "version": {"readonly": True}, - "deleted": {"readonly": True}, - "deleted_time": {"readonly": True}, - "remaining_retention_days": {"readonly": True}, - "access_tier_change_time": {"readonly": True}, - "access_tier_status": {"readonly": True}, - "share_usage_bytes": {"readonly": True}, - "lease_status": {"readonly": True}, - "lease_state": {"readonly": True}, - "lease_duration": {"readonly": True}, - "snapshot_time": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "etag": {"key": "etag", "type": "str"}, - "last_modified_time": {"key": "properties.lastModifiedTime", "type": "iso-8601"}, - "metadata": {"key": "properties.metadata", "type": "{str}"}, - "share_quota": {"key": "properties.shareQuota", "type": "int"}, - "provisioned_iops": {"key": "properties.provisionedIops", "type": "int"}, - "provisioned_bandwidth_mibps": {"key": "properties.provisionedBandwidthMibps", "type": "int"}, - "included_burst_iops": {"key": "properties.includedBurstIops", "type": "int"}, - "max_burst_credits_for_iops": {"key": "properties.maxBurstCreditsForIops", "type": "int"}, - "next_allowed_quota_downgrade_time": {"key": "properties.nextAllowedQuotaDowngradeTime", "type": "rfc-1123"}, - "next_allowed_provisioned_iops_downgrade_time": { - "key": "properties.nextAllowedProvisionedIopsDowngradeTime", - "type": "rfc-1123", - }, - "next_allowed_provisioned_bandwidth_downgrade_time": { - "key": "properties.nextAllowedProvisionedBandwidthDowngradeTime", - "type": "rfc-1123", - }, - "enabled_protocols": {"key": "properties.enabledProtocols", "type": "str"}, - "root_squash": {"key": "properties.rootSquash", "type": "str"}, - "version": {"key": "properties.version", "type": "str"}, - "deleted": {"key": "properties.deleted", "type": "bool"}, - "deleted_time": {"key": "properties.deletedTime", "type": "iso-8601"}, - "remaining_retention_days": {"key": "properties.remainingRetentionDays", "type": "int"}, - "access_tier": {"key": "properties.accessTier", "type": "str"}, - "access_tier_change_time": {"key": "properties.accessTierChangeTime", "type": "iso-8601"}, - "access_tier_status": {"key": "properties.accessTierStatus", "type": "str"}, - "share_usage_bytes": {"key": "properties.shareUsageBytes", "type": "int"}, - "lease_status": {"key": "properties.leaseStatus", "type": "str"}, - "lease_state": {"key": "properties.leaseState", "type": "str"}, - "lease_duration": {"key": "properties.leaseDuration", "type": "str"}, - "signed_identifiers": {"key": "properties.signedIdentifiers", "type": "[SignedIdentifier]"}, - "snapshot_time": {"key": "properties.snapshotTime", "type": "iso-8601"}, - "file_share_paid_bursting": { - "key": "properties.fileSharePaidBursting", - "type": "FileSharePropertiesFileSharePaidBursting", - }, - } - - def __init__( # pylint: disable=too-many-locals - self, - *, - metadata: Optional[dict[str, str]] = None, - share_quota: Optional[int] = None, - provisioned_iops: Optional[int] = None, - provisioned_bandwidth_mibps: Optional[int] = None, - enabled_protocols: Optional[Union[str, "_models.EnabledProtocols"]] = None, - root_squash: Optional[Union[str, "_models.RootSquashType"]] = None, - access_tier: Optional[Union[str, "_models.ShareAccessTier"]] = None, - signed_identifiers: Optional[list["_models.SignedIdentifier"]] = None, - file_share_paid_bursting: Optional["_models.FileSharePropertiesFileSharePaidBursting"] = None, - **kwargs: Any - ) -> None: - """ - :keyword metadata: A name-value pair to associate with the share as metadata. - :paramtype metadata: dict[str, str] - :keyword share_quota: The provisioned size of the share, in gibibytes. Must be greater than 0, - and less than or equal to 5TB (5120). For Large File Shares, the maximum size is 102400. For - file shares created under Files Provisioned v2 account type, please refer to the - GetFileServiceUsage API response for the minimum and maximum allowed provisioned storage size. - :paramtype share_quota: int - :keyword provisioned_iops: The provisioned IOPS of the share. This property is only for file - shares created under Files Provisioned v2 account type. Please refer to the GetFileServiceUsage - API response for the minimum and maximum allowed value for provisioned IOPS. - :paramtype provisioned_iops: int - :keyword provisioned_bandwidth_mibps: The provisioned bandwidth of the share, in mebibytes per - second. This property is only for file shares created under Files Provisioned v2 account type. - Please refer to the GetFileServiceUsage API response for the minimum and maximum allowed value - for provisioned bandwidth. - :paramtype provisioned_bandwidth_mibps: int - :keyword enabled_protocols: The authentication protocol that is used for the file share. Can - only be specified when creating a share. Known values are: "SMB" and "NFS". - :paramtype enabled_protocols: str or ~azure.mgmt.storage.models.EnabledProtocols - :keyword root_squash: The property is for NFS share only. The default is NoRootSquash. Known - values are: "NoRootSquash", "RootSquash", and "AllSquash". - :paramtype root_squash: str or ~azure.mgmt.storage.models.RootSquashType - :keyword access_tier: Access tier for specific share. GpV2 account can choose between - TransactionOptimized (default), Hot, and Cool. FileStorage account can choose Premium. Known - values are: "TransactionOptimized", "Hot", "Cool", and "Premium". - :paramtype access_tier: str or ~azure.mgmt.storage.models.ShareAccessTier - :keyword signed_identifiers: List of stored access policies specified on the share. - :paramtype signed_identifiers: list[~azure.mgmt.storage.models.SignedIdentifier] - :keyword file_share_paid_bursting: File Share Paid Bursting properties. - :paramtype file_share_paid_bursting: - ~azure.mgmt.storage.models.FileSharePropertiesFileSharePaidBursting - """ - super().__init__(**kwargs) - self.last_modified_time: Optional[datetime.datetime] = None - self.metadata = metadata - self.share_quota = share_quota - self.provisioned_iops = provisioned_iops - self.provisioned_bandwidth_mibps = provisioned_bandwidth_mibps - self.included_burst_iops: Optional[int] = None - self.max_burst_credits_for_iops: Optional[int] = None - self.next_allowed_quota_downgrade_time: Optional[datetime.datetime] = None - self.next_allowed_provisioned_iops_downgrade_time: Optional[datetime.datetime] = None - self.next_allowed_provisioned_bandwidth_downgrade_time: Optional[datetime.datetime] = None - self.enabled_protocols = enabled_protocols - self.root_squash = root_squash - self.version: Optional[str] = None - self.deleted: Optional[bool] = None - self.deleted_time: Optional[datetime.datetime] = None - self.remaining_retention_days: Optional[int] = None - self.access_tier = access_tier - self.access_tier_change_time: Optional[datetime.datetime] = None - self.access_tier_status: Optional[str] = None - self.share_usage_bytes: Optional[int] = None - self.lease_status: Optional[Union[str, "_models.LeaseStatus"]] = None - self.lease_state: Optional[Union[str, "_models.LeaseState"]] = None - self.lease_duration: Optional[Union[str, "_models.LeaseDuration"]] = None - self.signed_identifiers = signed_identifiers - self.snapshot_time: Optional[datetime.datetime] = None - self.file_share_paid_bursting = file_share_paid_bursting - - -class FileShareItems(_serialization.Model): - """Response schema. Contains list of shares returned, and if paging is requested or required, a - URL to next page of shares. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: List of file shares returned. - :vartype value: list[~azure.mgmt.storage.models.FileShareItem] - :ivar next_link: Request URL that can be used to query next page of shares. Returned when total - number of requested shares exceed maximum page size. - :vartype next_link: str - """ - - _validation = { - "value": {"readonly": True}, - "next_link": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[FileShareItem]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value: Optional[list["_models.FileShareItem"]] = None - self.next_link: Optional[str] = None - - -class FileShareLimits(_serialization.Model): - """Minimum and maximum provisioned storage, IOPS and bandwidth limits for a file share in the - storage account. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar min_provisioned_storage_gi_b: The minimum provisioned storage quota limit in gibibytes - for a file share in the storage account. - :vartype min_provisioned_storage_gi_b: int - :ivar max_provisioned_storage_gi_b: The maximum provisioned storage quota limit in gibibytes - for a file share in the storage account. - :vartype max_provisioned_storage_gi_b: int - :ivar min_provisioned_iops: The minimum provisioned IOPS limit for a file share in the storage - account. - :vartype min_provisioned_iops: int - :ivar max_provisioned_iops: The maximum provisioned IOPS limit for a file share in the storage - account. - :vartype max_provisioned_iops: int - :ivar min_provisioned_bandwidth_mi_b_per_sec: The minimum provisioned bandwidth limit in - mebibytes per second for a file share in the storage account. - :vartype min_provisioned_bandwidth_mi_b_per_sec: int - :ivar max_provisioned_bandwidth_mi_b_per_sec: The maximum provisioned bandwidth limit in - mebibytes per second for a file share in the storage account. - :vartype max_provisioned_bandwidth_mi_b_per_sec: int - """ - - _validation = { - "min_provisioned_storage_gi_b": {"readonly": True}, - "max_provisioned_storage_gi_b": {"readonly": True}, - "min_provisioned_iops": {"readonly": True}, - "max_provisioned_iops": {"readonly": True}, - "min_provisioned_bandwidth_mi_b_per_sec": {"readonly": True}, - "max_provisioned_bandwidth_mi_b_per_sec": {"readonly": True}, - } - - _attribute_map = { - "min_provisioned_storage_gi_b": {"key": "minProvisionedStorageGiB", "type": "int"}, - "max_provisioned_storage_gi_b": {"key": "maxProvisionedStorageGiB", "type": "int"}, - "min_provisioned_iops": {"key": "minProvisionedIOPS", "type": "int"}, - "max_provisioned_iops": {"key": "maxProvisionedIOPS", "type": "int"}, - "min_provisioned_bandwidth_mi_b_per_sec": {"key": "minProvisionedBandwidthMiBPerSec", "type": "int"}, - "max_provisioned_bandwidth_mi_b_per_sec": {"key": "maxProvisionedBandwidthMiBPerSec", "type": "int"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.min_provisioned_storage_gi_b: Optional[int] = None - self.max_provisioned_storage_gi_b: Optional[int] = None - self.min_provisioned_iops: Optional[int] = None - self.max_provisioned_iops: Optional[int] = None - self.min_provisioned_bandwidth_mi_b_per_sec: Optional[int] = None - self.max_provisioned_bandwidth_mi_b_per_sec: Optional[int] = None - - -class FileSharePropertiesFileSharePaidBursting(_serialization.Model): - """File Share Paid Bursting properties. - - :ivar paid_bursting_enabled: Indicates whether paid bursting is enabled for the share. This - property is only for file shares created under Files Provisioned v1 SSD account type. - :vartype paid_bursting_enabled: bool - :ivar paid_bursting_max_iops: The maximum paid bursting IOPS for the share. This property is - only for file shares created under Files Provisioned v1 SSD account type. The maximum allowed - value is 102400 which is the maximum allowed IOPS for a share. - :vartype paid_bursting_max_iops: int - :ivar paid_bursting_max_bandwidth_mibps: The maximum paid bursting bandwidth for the share, in - mebibytes per second. This property is only for file shares created under Files Provisioned v1 - SSD account type. The maximum allowed value is 10340 which is the maximum allowed bandwidth for - a share. - :vartype paid_bursting_max_bandwidth_mibps: int - """ - - _attribute_map = { - "paid_bursting_enabled": {"key": "paidBurstingEnabled", "type": "bool"}, - "paid_bursting_max_iops": {"key": "paidBurstingMaxIops", "type": "int"}, - "paid_bursting_max_bandwidth_mibps": {"key": "paidBurstingMaxBandwidthMibps", "type": "int"}, - } - - def __init__( - self, - *, - paid_bursting_enabled: Optional[bool] = None, - paid_bursting_max_iops: Optional[int] = None, - paid_bursting_max_bandwidth_mibps: Optional[int] = None, - **kwargs: Any - ) -> None: - """ - :keyword paid_bursting_enabled: Indicates whether paid bursting is enabled for the share. This - property is only for file shares created under Files Provisioned v1 SSD account type. - :paramtype paid_bursting_enabled: bool - :keyword paid_bursting_max_iops: The maximum paid bursting IOPS for the share. This property is - only for file shares created under Files Provisioned v1 SSD account type. The maximum allowed - value is 102400 which is the maximum allowed IOPS for a share. - :paramtype paid_bursting_max_iops: int - :keyword paid_bursting_max_bandwidth_mibps: The maximum paid bursting bandwidth for the share, - in mebibytes per second. This property is only for file shares created under Files Provisioned - v1 SSD account type. The maximum allowed value is 10340 which is the maximum allowed bandwidth - for a share. - :paramtype paid_bursting_max_bandwidth_mibps: int - """ - super().__init__(**kwargs) - self.paid_bursting_enabled = paid_bursting_enabled - self.paid_bursting_max_iops = paid_bursting_max_iops - self.paid_bursting_max_bandwidth_mibps = paid_bursting_max_bandwidth_mibps - - -class FileShareRecommendations(_serialization.Model): - """Constants used for calculating recommended provisioned IOPS and bandwidth for a file share in - the storage account. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar base_iops: The base IOPS in the file share provisioned IOPS recommendation formula. - :vartype base_iops: int - :ivar io_scalar: The scalar for IO in the file share provisioned IOPS recommendation formula. - :vartype io_scalar: float - :ivar base_bandwidth_mi_b_per_sec: The base bandwidth in the file share provisioned bandwidth - recommendation formula. - :vartype base_bandwidth_mi_b_per_sec: int - :ivar bandwidth_scalar: The scalar for bandwidth in the file share provisioned bandwidth - recommendation formula. - :vartype bandwidth_scalar: float - """ - - _validation = { - "base_iops": {"readonly": True}, - "io_scalar": {"readonly": True}, - "base_bandwidth_mi_b_per_sec": {"readonly": True}, - "bandwidth_scalar": {"readonly": True}, - } - - _attribute_map = { - "base_iops": {"key": "baseIOPS", "type": "int"}, - "io_scalar": {"key": "ioScalar", "type": "float"}, - "base_bandwidth_mi_b_per_sec": {"key": "baseBandwidthMiBPerSec", "type": "int"}, - "bandwidth_scalar": {"key": "bandwidthScalar", "type": "float"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.base_iops: Optional[int] = None - self.io_scalar: Optional[float] = None - self.base_bandwidth_mi_b_per_sec: Optional[int] = None - self.bandwidth_scalar: Optional[float] = None - - -class GeoPriorityReplicationStatus(_serialization.Model): - """Geo Priority Replication enablement status for the storage account. - - :ivar is_blob_enabled: Indicates whether Blob Geo Priority Replication is enabled for the - storage account. - :vartype is_blob_enabled: bool - """ - - _attribute_map = { - "is_blob_enabled": {"key": "isBlobEnabled", "type": "bool"}, - } - - def __init__(self, *, is_blob_enabled: Optional[bool] = None, **kwargs: Any) -> None: - """ - :keyword is_blob_enabled: Indicates whether Blob Geo Priority Replication is enabled for the - storage account. - :paramtype is_blob_enabled: bool - """ - super().__init__(**kwargs) - self.is_blob_enabled = is_blob_enabled - - -class GeoReplicationStats(_serialization.Model): - """Statistics related to replication for storage account's Blob, Table, Queue and File services. - It is only available when geo-redundant replication is enabled for the storage account. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar status: The status of the secondary location. Possible values are: - Live: Indicates that - the secondary location is active and operational. - Bootstrap: Indicates initial - synchronization from the primary location to the secondary location is in progress.This - typically occurs when replication is first enabled. - Unavailable: Indicates that the secondary - location is temporarily unavailable. Known values are: "Live", "Bootstrap", and "Unavailable". - :vartype status: str or ~azure.mgmt.storage.models.GeoReplicationStatus - :ivar last_sync_time: All primary writes preceding this UTC date/time value are guaranteed to - be available for read operations. Primary writes following this point in time may or may not be - available for reads. Element may be default value if value of LastSyncTime is not available, - this can happen if secondary is offline or we are in bootstrap. - :vartype last_sync_time: ~datetime.datetime - :ivar can_failover: A boolean flag which indicates whether or not account failover is supported - for the account. - :vartype can_failover: bool - :ivar can_planned_failover: A boolean flag which indicates whether or not planned account - failover is supported for the account. - :vartype can_planned_failover: bool - :ivar post_failover_redundancy: The redundancy type of the account after an account failover is - performed. Known values are: "Standard_LRS" and "Standard_ZRS". - :vartype post_failover_redundancy: str or ~azure.mgmt.storage.models.PostFailoverRedundancy - :ivar post_planned_failover_redundancy: The redundancy type of the account after a planned - account failover is performed. Known values are: "Standard_GRS", "Standard_GZRS", - "Standard_RAGRS", and "Standard_RAGZRS". - :vartype post_planned_failover_redundancy: str or - ~azure.mgmt.storage.models.PostPlannedFailoverRedundancy - """ - - _validation = { - "status": {"readonly": True}, - "last_sync_time": {"readonly": True}, - "can_failover": {"readonly": True}, - "can_planned_failover": {"readonly": True}, - "post_failover_redundancy": {"readonly": True}, - "post_planned_failover_redundancy": {"readonly": True}, - } - - _attribute_map = { - "status": {"key": "status", "type": "str"}, - "last_sync_time": {"key": "lastSyncTime", "type": "iso-8601"}, - "can_failover": {"key": "canFailover", "type": "bool"}, - "can_planned_failover": {"key": "canPlannedFailover", "type": "bool"}, - "post_failover_redundancy": {"key": "postFailoverRedundancy", "type": "str"}, - "post_planned_failover_redundancy": {"key": "postPlannedFailoverRedundancy", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.status: Optional[Union[str, "_models.GeoReplicationStatus"]] = None - self.last_sync_time: Optional[datetime.datetime] = None - self.can_failover: Optional[bool] = None - self.can_planned_failover: Optional[bool] = None - self.post_failover_redundancy: Optional[Union[str, "_models.PostFailoverRedundancy"]] = None - self.post_planned_failover_redundancy: Optional[Union[str, "_models.PostPlannedFailoverRedundancy"]] = None - - -class Identity(_serialization.Model): - """Identity for the resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to server. - - :ivar principal_id: The principal ID of resource identity. - :vartype principal_id: str - :ivar tenant_id: The tenant ID of resource. - :vartype tenant_id: str - :ivar type: The identity type. Required. Known values are: "None", "SystemAssigned", - "UserAssigned", and "SystemAssigned,UserAssigned". - :vartype type: str or ~azure.mgmt.storage.models.IdentityType - :ivar user_assigned_identities: Gets or sets a list of key value pairs that describe the set of - User Assigned identities that will be used with this storage account. The key is the ARM - resource identifier of the identity. Only 1 User Assigned identity is permitted here. - :vartype user_assigned_identities: dict[str, ~azure.mgmt.storage.models.UserAssignedIdentity] - """ - - _validation = { - "principal_id": {"readonly": True}, - "tenant_id": {"readonly": True}, - "type": {"required": True}, - } - - _attribute_map = { - "principal_id": {"key": "principalId", "type": "str"}, - "tenant_id": {"key": "tenantId", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "user_assigned_identities": {"key": "userAssignedIdentities", "type": "{UserAssignedIdentity}"}, - } - - def __init__( - self, - *, - type: Union[str, "_models.IdentityType"], - user_assigned_identities: Optional[dict[str, "_models.UserAssignedIdentity"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword type: The identity type. Required. Known values are: "None", "SystemAssigned", - "UserAssigned", and "SystemAssigned,UserAssigned". - :paramtype type: str or ~azure.mgmt.storage.models.IdentityType - :keyword user_assigned_identities: Gets or sets a list of key value pairs that describe the set - of User Assigned identities that will be used with this storage account. The key is the ARM - resource identifier of the identity. Only 1 User Assigned identity is permitted here. - :paramtype user_assigned_identities: dict[str, ~azure.mgmt.storage.models.UserAssignedIdentity] - """ - super().__init__(**kwargs) - self.principal_id: Optional[str] = None - self.tenant_id: Optional[str] = None - self.type = type - self.user_assigned_identities = user_assigned_identities - - -class ImmutabilityPolicy(AzureEntityResource): - """The ImmutabilityPolicy property of a blob container, including Id, resource name, resource - type, Etag. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar etag: Resource Etag. - :vartype etag: str - :ivar immutability_period_since_creation_in_days: The immutability period for the blobs in the - container since the policy creation, in days. - :vartype immutability_period_since_creation_in_days: int - :ivar state: The ImmutabilityPolicy state of a blob container, possible values include: Locked - and Unlocked. Known values are: "Locked" and "Unlocked". - :vartype state: str or ~azure.mgmt.storage.models.ImmutabilityPolicyState - :ivar allow_protected_append_writes: This property can only be changed for unlocked time-based - retention policies. When enabled, new blocks can be written to an append blob while maintaining - immutability protection and compliance. Only new blocks can be added and any existing blocks - cannot be modified or deleted. This property cannot be changed with ExtendImmutabilityPolicy - API. - :vartype allow_protected_append_writes: bool - :ivar allow_protected_append_writes_all: This property can only be changed for unlocked - time-based retention policies. When enabled, new blocks can be written to both 'Append and Bock - Blobs' while maintaining immutability protection and compliance. Only new blocks can be added - and any existing blocks cannot be modified or deleted. This property cannot be changed with - ExtendImmutabilityPolicy API. The 'allowProtectedAppendWrites' and - 'allowProtectedAppendWritesAll' properties are mutually exclusive. - :vartype allow_protected_append_writes_all: bool - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "etag": {"readonly": True}, - "state": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "etag": {"key": "etag", "type": "str"}, - "immutability_period_since_creation_in_days": { - "key": "properties.immutabilityPeriodSinceCreationInDays", - "type": "int", - }, - "state": {"key": "properties.state", "type": "str"}, - "allow_protected_append_writes": {"key": "properties.allowProtectedAppendWrites", "type": "bool"}, - "allow_protected_append_writes_all": {"key": "properties.allowProtectedAppendWritesAll", "type": "bool"}, - } - - def __init__( - self, - *, - immutability_period_since_creation_in_days: Optional[int] = None, - allow_protected_append_writes: Optional[bool] = None, - allow_protected_append_writes_all: Optional[bool] = None, - **kwargs: Any - ) -> None: - """ - :keyword immutability_period_since_creation_in_days: The immutability period for the blobs in - the container since the policy creation, in days. - :paramtype immutability_period_since_creation_in_days: int - :keyword allow_protected_append_writes: This property can only be changed for unlocked - time-based retention policies. When enabled, new blocks can be written to an append blob while - maintaining immutability protection and compliance. Only new blocks can be added and any - existing blocks cannot be modified or deleted. This property cannot be changed with - ExtendImmutabilityPolicy API. - :paramtype allow_protected_append_writes: bool - :keyword allow_protected_append_writes_all: This property can only be changed for unlocked - time-based retention policies. When enabled, new blocks can be written to both 'Append and Bock - Blobs' while maintaining immutability protection and compliance. Only new blocks can be added - and any existing blocks cannot be modified or deleted. This property cannot be changed with - ExtendImmutabilityPolicy API. The 'allowProtectedAppendWrites' and - 'allowProtectedAppendWritesAll' properties are mutually exclusive. - :paramtype allow_protected_append_writes_all: bool - """ - super().__init__(**kwargs) - self.immutability_period_since_creation_in_days = immutability_period_since_creation_in_days - self.state: Optional[Union[str, "_models.ImmutabilityPolicyState"]] = None - self.allow_protected_append_writes = allow_protected_append_writes - self.allow_protected_append_writes_all = allow_protected_append_writes_all - - -class ImmutabilityPolicyProperties(_serialization.Model): - """The properties of an ImmutabilityPolicy of a blob container. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar etag: ImmutabilityPolicy Etag. - :vartype etag: str - :ivar update_history: The ImmutabilityPolicy update history of the blob container. - :vartype update_history: list[~azure.mgmt.storage.models.UpdateHistoryProperty] - :ivar immutability_period_since_creation_in_days: The immutability period for the blobs in the - container since the policy creation, in days. - :vartype immutability_period_since_creation_in_days: int - :ivar state: The ImmutabilityPolicy state of a blob container, possible values include: Locked - and Unlocked. Known values are: "Locked" and "Unlocked". - :vartype state: str or ~azure.mgmt.storage.models.ImmutabilityPolicyState - :ivar allow_protected_append_writes: This property can only be changed for unlocked time-based - retention policies. When enabled, new blocks can be written to an append blob while maintaining - immutability protection and compliance. Only new blocks can be added and any existing blocks - cannot be modified or deleted. This property cannot be changed with ExtendImmutabilityPolicy - API. - :vartype allow_protected_append_writes: bool - :ivar allow_protected_append_writes_all: This property can only be changed for unlocked - time-based retention policies. When enabled, new blocks can be written to both 'Append and Bock - Blobs' while maintaining immutability protection and compliance. Only new blocks can be added - and any existing blocks cannot be modified or deleted. This property cannot be changed with - ExtendImmutabilityPolicy API. The 'allowProtectedAppendWrites' and - 'allowProtectedAppendWritesAll' properties are mutually exclusive. - :vartype allow_protected_append_writes_all: bool - """ - - _validation = { - "etag": {"readonly": True}, - "update_history": {"readonly": True}, - "state": {"readonly": True}, - } - - _attribute_map = { - "etag": {"key": "etag", "type": "str"}, - "update_history": {"key": "updateHistory", "type": "[UpdateHistoryProperty]"}, - "immutability_period_since_creation_in_days": { - "key": "properties.immutabilityPeriodSinceCreationInDays", - "type": "int", - }, - "state": {"key": "properties.state", "type": "str"}, - "allow_protected_append_writes": {"key": "properties.allowProtectedAppendWrites", "type": "bool"}, - "allow_protected_append_writes_all": {"key": "properties.allowProtectedAppendWritesAll", "type": "bool"}, - } - - def __init__( - self, - *, - immutability_period_since_creation_in_days: Optional[int] = None, - allow_protected_append_writes: Optional[bool] = None, - allow_protected_append_writes_all: Optional[bool] = None, - **kwargs: Any - ) -> None: - """ - :keyword immutability_period_since_creation_in_days: The immutability period for the blobs in - the container since the policy creation, in days. - :paramtype immutability_period_since_creation_in_days: int - :keyword allow_protected_append_writes: This property can only be changed for unlocked - time-based retention policies. When enabled, new blocks can be written to an append blob while - maintaining immutability protection and compliance. Only new blocks can be added and any - existing blocks cannot be modified or deleted. This property cannot be changed with - ExtendImmutabilityPolicy API. - :paramtype allow_protected_append_writes: bool - :keyword allow_protected_append_writes_all: This property can only be changed for unlocked - time-based retention policies. When enabled, new blocks can be written to both 'Append and Bock - Blobs' while maintaining immutability protection and compliance. Only new blocks can be added - and any existing blocks cannot be modified or deleted. This property cannot be changed with - ExtendImmutabilityPolicy API. The 'allowProtectedAppendWrites' and - 'allowProtectedAppendWritesAll' properties are mutually exclusive. - :paramtype allow_protected_append_writes_all: bool - """ - super().__init__(**kwargs) - self.etag: Optional[str] = None - self.update_history: Optional[list["_models.UpdateHistoryProperty"]] = None - self.immutability_period_since_creation_in_days = immutability_period_since_creation_in_days - self.state: Optional[Union[str, "_models.ImmutabilityPolicyState"]] = None - self.allow_protected_append_writes = allow_protected_append_writes - self.allow_protected_append_writes_all = allow_protected_append_writes_all - - -class ImmutableStorageAccount(_serialization.Model): - """This property enables and defines account-level immutability. Enabling the feature auto-enables - Blob Versioning. - - :ivar enabled: A boolean flag which enables account-level immutability. All the containers - under such an account have object-level immutability enabled by default. - :vartype enabled: bool - :ivar immutability_policy: Specifies the default account-level immutability policy which is - inherited and applied to objects that do not possess an explicit immutability policy at the - object level. The object-level immutability policy has higher precedence than the - container-level immutability policy, which has a higher precedence than the account-level - immutability policy. - :vartype immutability_policy: ~azure.mgmt.storage.models.AccountImmutabilityPolicyProperties - """ - - _attribute_map = { - "enabled": {"key": "enabled", "type": "bool"}, - "immutability_policy": {"key": "immutabilityPolicy", "type": "AccountImmutabilityPolicyProperties"}, - } - - def __init__( - self, - *, - enabled: Optional[bool] = None, - immutability_policy: Optional["_models.AccountImmutabilityPolicyProperties"] = None, - **kwargs: Any - ) -> None: - """ - :keyword enabled: A boolean flag which enables account-level immutability. All the containers - under such an account have object-level immutability enabled by default. - :paramtype enabled: bool - :keyword immutability_policy: Specifies the default account-level immutability policy which is - inherited and applied to objects that do not possess an explicit immutability policy at the - object level. The object-level immutability policy has higher precedence than the - container-level immutability policy, which has a higher precedence than the account-level - immutability policy. - :paramtype immutability_policy: ~azure.mgmt.storage.models.AccountImmutabilityPolicyProperties - """ - super().__init__(**kwargs) - self.enabled = enabled - self.immutability_policy = immutability_policy - - -class ImmutableStorageWithVersioning(_serialization.Model): - """Object level immutability properties of the container. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar enabled: This is an immutable property, when set to true it enables object level - immutability at the container level. - :vartype enabled: bool - :ivar time_stamp: Returns the date and time the object level immutability was enabled. - :vartype time_stamp: ~datetime.datetime - :ivar migration_state: This property denotes the container level immutability to object level - immutability migration state. Known values are: "InProgress" and "Completed". - :vartype migration_state: str or ~azure.mgmt.storage.models.MigrationState - """ - - _validation = { - "time_stamp": {"readonly": True}, - "migration_state": {"readonly": True}, - } - - _attribute_map = { - "enabled": {"key": "enabled", "type": "bool"}, - "time_stamp": {"key": "timeStamp", "type": "iso-8601"}, - "migration_state": {"key": "migrationState", "type": "str"}, - } - - def __init__(self, *, enabled: Optional[bool] = None, **kwargs: Any) -> None: - """ - :keyword enabled: This is an immutable property, when set to true it enables object level - immutability at the container level. - :paramtype enabled: bool - """ - super().__init__(**kwargs) - self.enabled = enabled - self.time_stamp: Optional[datetime.datetime] = None - self.migration_state: Optional[Union[str, "_models.MigrationState"]] = None - - -class IPRule(_serialization.Model): - """IP rule with specific IP or IP range in CIDR format. - - All required parameters must be populated in order to send to server. - - :ivar ip_address_or_range: Specifies the IP or IP range in CIDR format. Required. - :vartype ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value is "Allow". - :vartype action: str - """ - - _validation = { - "ip_address_or_range": {"required": True}, - } - - _attribute_map = { - "ip_address_or_range": {"key": "value", "type": "str"}, - "action": {"key": "action", "type": "str"}, - } - - def __init__(self, *, ip_address_or_range: str, action: Optional[Literal["Allow"]] = None, **kwargs: Any) -> None: - """ - :keyword ip_address_or_range: Specifies the IP or IP range in CIDR format. Required. - :paramtype ip_address_or_range: str - :keyword action: The action of IP ACL rule. Default value is "Allow". - :paramtype action: str - """ - super().__init__(**kwargs) - self.ip_address_or_range = ip_address_or_range - self.action = action - - -class KeyCreationTime(_serialization.Model): - """Storage account keys creation time. - - :ivar key1: - :vartype key1: ~datetime.datetime - :ivar key2: - :vartype key2: ~datetime.datetime - """ - - _attribute_map = { - "key1": {"key": "key1", "type": "iso-8601"}, - "key2": {"key": "key2", "type": "iso-8601"}, - } - - def __init__( - self, *, key1: Optional[datetime.datetime] = None, key2: Optional[datetime.datetime] = None, **kwargs: Any - ) -> None: - """ - :keyword key1: - :paramtype key1: ~datetime.datetime - :keyword key2: - :paramtype key2: ~datetime.datetime - """ - super().__init__(**kwargs) - self.key1 = key1 - self.key2 = key2 - - -class KeyPolicy(_serialization.Model): - """KeyPolicy assigned to the storage account. - - All required parameters must be populated in order to send to server. - - :ivar key_expiration_period_in_days: The key expiration period in days. Required. - :vartype key_expiration_period_in_days: int - """ - - _validation = { - "key_expiration_period_in_days": {"required": True}, - } - - _attribute_map = { - "key_expiration_period_in_days": {"key": "keyExpirationPeriodInDays", "type": "int"}, - } - - def __init__(self, *, key_expiration_period_in_days: int, **kwargs: Any) -> None: - """ - :keyword key_expiration_period_in_days: The key expiration period in days. Required. - :paramtype key_expiration_period_in_days: int - """ - super().__init__(**kwargs) - self.key_expiration_period_in_days = key_expiration_period_in_days - - -class KeyVaultProperties(_serialization.Model): - """Properties of key vault. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar key_name: The name of KeyVault key. - :vartype key_name: str - :ivar key_version: The version of KeyVault key. - :vartype key_version: str - :ivar key_vault_uri: The Uri of KeyVault. - :vartype key_vault_uri: str - :ivar current_versioned_key_identifier: The object identifier of the current versioned Key - Vault Key in use. - :vartype current_versioned_key_identifier: str - :ivar last_key_rotation_timestamp: Timestamp of last rotation of the Key Vault Key. - :vartype last_key_rotation_timestamp: ~datetime.datetime - :ivar current_versioned_key_expiration_timestamp: This is a read only property that represents - the expiration time of the current version of the customer managed key used for encryption. - :vartype current_versioned_key_expiration_timestamp: ~datetime.datetime - """ - - _validation = { - "current_versioned_key_identifier": {"readonly": True}, - "last_key_rotation_timestamp": {"readonly": True}, - "current_versioned_key_expiration_timestamp": {"readonly": True}, - } - - _attribute_map = { - "key_name": {"key": "keyname", "type": "str"}, - "key_version": {"key": "keyversion", "type": "str"}, - "key_vault_uri": {"key": "keyvaulturi", "type": "str"}, - "current_versioned_key_identifier": {"key": "currentVersionedKeyIdentifier", "type": "str"}, - "last_key_rotation_timestamp": {"key": "lastKeyRotationTimestamp", "type": "iso-8601"}, - "current_versioned_key_expiration_timestamp": { - "key": "currentVersionedKeyExpirationTimestamp", - "type": "iso-8601", - }, - } - - def __init__( - self, - *, - key_name: Optional[str] = None, - key_version: Optional[str] = None, - key_vault_uri: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword key_name: The name of KeyVault key. - :paramtype key_name: str - :keyword key_version: The version of KeyVault key. - :paramtype key_version: str - :keyword key_vault_uri: The Uri of KeyVault. - :paramtype key_vault_uri: str - """ - super().__init__(**kwargs) - self.key_name = key_name - self.key_version = key_version - self.key_vault_uri = key_vault_uri - self.current_versioned_key_identifier: Optional[str] = None - self.last_key_rotation_timestamp: Optional[datetime.datetime] = None - self.current_versioned_key_expiration_timestamp: Optional[datetime.datetime] = None - - -class LastAccessTimeTrackingPolicy(_serialization.Model): - """The blob service properties for Last access time based tracking policy. - - All required parameters must be populated in order to send to server. - - :ivar enable: When set to true last access time based tracking is enabled. Required. - :vartype enable: bool - :ivar name: Name of the policy. The valid value is AccessTimeTracking. This field is currently - read only. "AccessTimeTracking" - :vartype name: str or ~azure.mgmt.storage.models.Name - :ivar tracking_granularity_in_days: The field specifies blob object tracking granularity in - days, typically how often the blob object should be tracked.This field is currently read only - with value as 1. - :vartype tracking_granularity_in_days: int - :ivar blob_type: An array of predefined supported blob types. Only blockBlob is the supported - value. This field is currently read only. - :vartype blob_type: list[str] - """ - - _validation = { - "enable": {"required": True}, - } - - _attribute_map = { - "enable": {"key": "enable", "type": "bool"}, - "name": {"key": "name", "type": "str"}, - "tracking_granularity_in_days": {"key": "trackingGranularityInDays", "type": "int"}, - "blob_type": {"key": "blobType", "type": "[str]"}, - } - - def __init__( - self, - *, - enable: bool, - name: Optional[Union[str, "_models.Name"]] = None, - tracking_granularity_in_days: Optional[int] = None, - blob_type: Optional[list[str]] = None, - **kwargs: Any - ) -> None: - """ - :keyword enable: When set to true last access time based tracking is enabled. Required. - :paramtype enable: bool - :keyword name: Name of the policy. The valid value is AccessTimeTracking. This field is - currently read only. "AccessTimeTracking" - :paramtype name: str or ~azure.mgmt.storage.models.Name - :keyword tracking_granularity_in_days: The field specifies blob object tracking granularity in - days, typically how often the blob object should be tracked.This field is currently read only - with value as 1. - :paramtype tracking_granularity_in_days: int - :keyword blob_type: An array of predefined supported blob types. Only blockBlob is the - supported value. This field is currently read only. - :paramtype blob_type: list[str] - """ - super().__init__(**kwargs) - self.enable = enable - self.name = name - self.tracking_granularity_in_days = tracking_granularity_in_days - self.blob_type = blob_type - - -class LeaseContainerRequest(_serialization.Model): - """Lease Container request schema. - - All required parameters must be populated in order to send to server. - - :ivar action: Specifies the lease action. Can be one of the available actions. Required. Known - values are: "Acquire", "Renew", "Change", "Release", and "Break". - :vartype action: str or ~azure.mgmt.storage.models.LeaseContainerRequestEnum - :ivar lease_id: Identifies the lease. Can be specified in any valid GUID string format. - :vartype lease_id: str - :ivar break_period: Optional. For a break action, proposed duration the lease should continue - before it is broken, in seconds, between 0 and 60. - :vartype break_period: int - :ivar lease_duration: Required for acquire. Specifies the duration of the lease, in seconds, or - negative one (-1) for a lease that never expires. - :vartype lease_duration: int - :ivar proposed_lease_id: Optional for acquire, required for change. Proposed lease ID, in a - GUID string format. - :vartype proposed_lease_id: str - """ - - _validation = { - "action": {"required": True}, - } - - _attribute_map = { - "action": {"key": "action", "type": "str"}, - "lease_id": {"key": "leaseId", "type": "str"}, - "break_period": {"key": "breakPeriod", "type": "int"}, - "lease_duration": {"key": "leaseDuration", "type": "int"}, - "proposed_lease_id": {"key": "proposedLeaseId", "type": "str"}, - } - - def __init__( - self, - *, - action: Union[str, "_models.LeaseContainerRequestEnum"], - lease_id: Optional[str] = None, - break_period: Optional[int] = None, - lease_duration: Optional[int] = None, - proposed_lease_id: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword action: Specifies the lease action. Can be one of the available actions. Required. - Known values are: "Acquire", "Renew", "Change", "Release", and "Break". - :paramtype action: str or ~azure.mgmt.storage.models.LeaseContainerRequestEnum - :keyword lease_id: Identifies the lease. Can be specified in any valid GUID string format. - :paramtype lease_id: str - :keyword break_period: Optional. For a break action, proposed duration the lease should - continue before it is broken, in seconds, between 0 and 60. - :paramtype break_period: int - :keyword lease_duration: Required for acquire. Specifies the duration of the lease, in seconds, - or negative one (-1) for a lease that never expires. - :paramtype lease_duration: int - :keyword proposed_lease_id: Optional for acquire, required for change. Proposed lease ID, in a - GUID string format. - :paramtype proposed_lease_id: str - """ - super().__init__(**kwargs) - self.action = action - self.lease_id = lease_id - self.break_period = break_period - self.lease_duration = lease_duration - self.proposed_lease_id = proposed_lease_id - - -class LeaseContainerResponse(_serialization.Model): - """Lease Container response schema. - - :ivar lease_id: Returned unique lease ID that must be included with any request to delete the - container, or to renew, change, or release the lease. - :vartype lease_id: str - :ivar lease_time_seconds: Approximate time remaining in the lease period, in seconds. - :vartype lease_time_seconds: str - """ - - _attribute_map = { - "lease_id": {"key": "leaseId", "type": "str"}, - "lease_time_seconds": {"key": "leaseTimeSeconds", "type": "str"}, - } - - def __init__( - self, *, lease_id: Optional[str] = None, lease_time_seconds: Optional[str] = None, **kwargs: Any - ) -> None: - """ - :keyword lease_id: Returned unique lease ID that must be included with any request to delete - the container, or to renew, change, or release the lease. - :paramtype lease_id: str - :keyword lease_time_seconds: Approximate time remaining in the lease period, in seconds. - :paramtype lease_time_seconds: str - """ - super().__init__(**kwargs) - self.lease_id = lease_id - self.lease_time_seconds = lease_time_seconds - - -class LeaseShareRequest(_serialization.Model): - """Lease Share request schema. - - All required parameters must be populated in order to send to server. - - :ivar action: Specifies the lease action. Can be one of the available actions. Required. Known - values are: "Acquire", "Renew", "Change", "Release", and "Break". - :vartype action: str or ~azure.mgmt.storage.models.LeaseShareAction - :ivar lease_id: Identifies the lease. Can be specified in any valid GUID string format. - :vartype lease_id: str - :ivar break_period: Optional. For a break action, proposed duration the lease should continue - before it is broken, in seconds, between 0 and 60. - :vartype break_period: int - :ivar lease_duration: Required for acquire. Specifies the duration of the lease, in seconds, or - negative one (-1) for a lease that never expires. - :vartype lease_duration: int - :ivar proposed_lease_id: Optional for acquire, required for change. Proposed lease ID, in a - GUID string format. - :vartype proposed_lease_id: str - """ - - _validation = { - "action": {"required": True}, - } - - _attribute_map = { - "action": {"key": "action", "type": "str"}, - "lease_id": {"key": "leaseId", "type": "str"}, - "break_period": {"key": "breakPeriod", "type": "int"}, - "lease_duration": {"key": "leaseDuration", "type": "int"}, - "proposed_lease_id": {"key": "proposedLeaseId", "type": "str"}, - } - - def __init__( - self, - *, - action: Union[str, "_models.LeaseShareAction"], - lease_id: Optional[str] = None, - break_period: Optional[int] = None, - lease_duration: Optional[int] = None, - proposed_lease_id: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword action: Specifies the lease action. Can be one of the available actions. Required. - Known values are: "Acquire", "Renew", "Change", "Release", and "Break". - :paramtype action: str or ~azure.mgmt.storage.models.LeaseShareAction - :keyword lease_id: Identifies the lease. Can be specified in any valid GUID string format. - :paramtype lease_id: str - :keyword break_period: Optional. For a break action, proposed duration the lease should - continue before it is broken, in seconds, between 0 and 60. - :paramtype break_period: int - :keyword lease_duration: Required for acquire. Specifies the duration of the lease, in seconds, - or negative one (-1) for a lease that never expires. - :paramtype lease_duration: int - :keyword proposed_lease_id: Optional for acquire, required for change. Proposed lease ID, in a - GUID string format. - :paramtype proposed_lease_id: str - """ - super().__init__(**kwargs) - self.action = action - self.lease_id = lease_id - self.break_period = break_period - self.lease_duration = lease_duration - self.proposed_lease_id = proposed_lease_id - - -class LeaseShareResponse(_serialization.Model): - """Lease Share response schema. - - :ivar lease_id: Returned unique lease ID that must be included with any request to delete the - share, or to renew, change, or release the lease. - :vartype lease_id: str - :ivar lease_time_seconds: Approximate time remaining in the lease period, in seconds. - :vartype lease_time_seconds: str - """ - - _attribute_map = { - "lease_id": {"key": "leaseId", "type": "str"}, - "lease_time_seconds": {"key": "leaseTimeSeconds", "type": "str"}, - } - - def __init__( - self, *, lease_id: Optional[str] = None, lease_time_seconds: Optional[str] = None, **kwargs: Any - ) -> None: - """ - :keyword lease_id: Returned unique lease ID that must be included with any request to delete - the share, or to renew, change, or release the lease. - :paramtype lease_id: str - :keyword lease_time_seconds: Approximate time remaining in the lease period, in seconds. - :paramtype lease_time_seconds: str - """ - super().__init__(**kwargs) - self.lease_id = lease_id - self.lease_time_seconds = lease_time_seconds - - -class LegalHold(_serialization.Model): - """The LegalHold property of a blob container. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to server. - - :ivar has_legal_hold: The hasLegalHold public property is set to true by SRP if there are at - least one existing tag. The hasLegalHold public property is set to false by SRP if all existing - legal hold tags are cleared out. There can be a maximum of 1000 blob containers with - hasLegalHold=true for a given account. - :vartype has_legal_hold: bool - :ivar tags: Each tag should be 3 to 23 alphanumeric characters and is normalized to lower case - at SRP. Required. - :vartype tags: list[str] - :ivar allow_protected_append_writes_all: When enabled, new blocks can be written to both - 'Append and Bock Blobs' while maintaining legal hold protection and compliance. Only new blocks - can be added and any existing blocks cannot be modified or deleted. - :vartype allow_protected_append_writes_all: bool - """ - - _validation = { - "has_legal_hold": {"readonly": True}, - "tags": {"required": True}, - } - - _attribute_map = { - "has_legal_hold": {"key": "hasLegalHold", "type": "bool"}, - "tags": {"key": "tags", "type": "[str]"}, - "allow_protected_append_writes_all": {"key": "allowProtectedAppendWritesAll", "type": "bool"}, - } - - def __init__( - self, *, tags: list[str], allow_protected_append_writes_all: Optional[bool] = None, **kwargs: Any - ) -> None: - """ - :keyword tags: Each tag should be 3 to 23 alphanumeric characters and is normalized to lower - case at SRP. Required. - :paramtype tags: list[str] - :keyword allow_protected_append_writes_all: When enabled, new blocks can be written to both - 'Append and Bock Blobs' while maintaining legal hold protection and compliance. Only new blocks - can be added and any existing blocks cannot be modified or deleted. - :paramtype allow_protected_append_writes_all: bool - """ - super().__init__(**kwargs) - self.has_legal_hold: Optional[bool] = None - self.tags = tags - self.allow_protected_append_writes_all = allow_protected_append_writes_all - - -class LegalHoldProperties(_serialization.Model): - """The LegalHold property of a blob container. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar has_legal_hold: The hasLegalHold public property is set to true by SRP if there are at - least one existing tag. The hasLegalHold public property is set to false by SRP if all existing - legal hold tags are cleared out. There can be a maximum of 1000 blob containers with - hasLegalHold=true for a given account. - :vartype has_legal_hold: bool - :ivar tags: The list of LegalHold tags of a blob container. - :vartype tags: list[~azure.mgmt.storage.models.TagProperty] - :ivar protected_append_writes_history: Protected append blob writes history. - :vartype protected_append_writes_history: - ~azure.mgmt.storage.models.ProtectedAppendWritesHistory - """ - - _validation = { - "has_legal_hold": {"readonly": True}, - } - - _attribute_map = { - "has_legal_hold": {"key": "hasLegalHold", "type": "bool"}, - "tags": {"key": "tags", "type": "[TagProperty]"}, - "protected_append_writes_history": { - "key": "protectedAppendWritesHistory", - "type": "ProtectedAppendWritesHistory", - }, - } - - def __init__( - self, - *, - tags: Optional[list["_models.TagProperty"]] = None, - protected_append_writes_history: Optional["_models.ProtectedAppendWritesHistory"] = None, - **kwargs: Any - ) -> None: - """ - :keyword tags: The list of LegalHold tags of a blob container. - :paramtype tags: list[~azure.mgmt.storage.models.TagProperty] - :keyword protected_append_writes_history: Protected append blob writes history. - :paramtype protected_append_writes_history: - ~azure.mgmt.storage.models.ProtectedAppendWritesHistory - """ - super().__init__(**kwargs) - self.has_legal_hold: Optional[bool] = None - self.tags = tags - self.protected_append_writes_history = protected_append_writes_history - - -class ListAccountSasResponse(_serialization.Model): - """The List SAS credentials operation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar account_sas_token: List SAS credentials of storage account. - :vartype account_sas_token: str - """ - - _validation = { - "account_sas_token": {"readonly": True}, - } - - _attribute_map = { - "account_sas_token": {"key": "accountSasToken", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.account_sas_token: Optional[str] = None - - -class ListBlobInventoryPolicy(_serialization.Model): - """List of blob inventory policies returned. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: List of blob inventory policies. - :vartype value: list[~azure.mgmt.storage.models.BlobInventoryPolicy] - """ - - _validation = { - "value": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[BlobInventoryPolicy]"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value: Optional[list["_models.BlobInventoryPolicy"]] = None - - -class ListContainerItem(AzureEntityResource): - """The blob container properties be listed out. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar etag: Resource Etag. - :vartype etag: str - :ivar version: The version of the deleted blob container. - :vartype version: str - :ivar deleted: Indicates whether the blob container was deleted. - :vartype deleted: bool - :ivar deleted_time: Blob container deletion time. - :vartype deleted_time: ~datetime.datetime - :ivar remaining_retention_days: Remaining retention days for soft deleted blob container. - :vartype remaining_retention_days: int - :ivar default_encryption_scope: Default the container to use specified encryption scope for all - writes. - :vartype default_encryption_scope: str - :ivar deny_encryption_scope_override: Block override of encryption scope from the container - default. - :vartype deny_encryption_scope_override: bool - :ivar public_access: Specifies whether data in the container may be accessed publicly and the - level of access. Known values are: "Container", "Blob", and "None". - :vartype public_access: str or ~azure.mgmt.storage.models.PublicAccess - :ivar last_modified_time: Returns the date and time the container was last modified. - :vartype last_modified_time: ~datetime.datetime - :ivar lease_status: The lease status of the container. Known values are: "Locked" and - "Unlocked". - :vartype lease_status: str or ~azure.mgmt.storage.models.LeaseStatus - :ivar lease_state: Lease state of the container. Known values are: "Available", "Leased", - "Expired", "Breaking", and "Broken". - :vartype lease_state: str or ~azure.mgmt.storage.models.LeaseState - :ivar lease_duration: Specifies whether the lease on a container is of infinite or fixed - duration, only when the container is leased. Known values are: "Infinite" and "Fixed". - :vartype lease_duration: str or ~azure.mgmt.storage.models.LeaseDuration - :ivar metadata: A name-value pair to associate with the container as metadata. - :vartype metadata: dict[str, str] - :ivar immutability_policy: The ImmutabilityPolicy property of the container. - :vartype immutability_policy: ~azure.mgmt.storage.models.ImmutabilityPolicyProperties - :ivar legal_hold: The LegalHold property of the container. - :vartype legal_hold: ~azure.mgmt.storage.models.LegalHoldProperties - :ivar has_legal_hold: The hasLegalHold public property is set to true by SRP if there are at - least one existing tag. The hasLegalHold public property is set to false by SRP if all existing - legal hold tags are cleared out. There can be a maximum of 1000 blob containers with - hasLegalHold=true for a given account. - :vartype has_legal_hold: bool - :ivar has_immutability_policy: The hasImmutabilityPolicy public property is set to true by SRP - if ImmutabilityPolicy has been created for this container. The hasImmutabilityPolicy public - property is set to false by SRP if ImmutabilityPolicy has not been created for this container. - :vartype has_immutability_policy: bool - :ivar immutable_storage_with_versioning: The object level immutability property of the - container. The property is immutable and can only be set to true at the container creation - time. Existing containers must undergo a migration process. - :vartype immutable_storage_with_versioning: - ~azure.mgmt.storage.models.ImmutableStorageWithVersioning - :ivar enable_nfs_v3_root_squash: Enable NFSv3 root squash on blob container. - :vartype enable_nfs_v3_root_squash: bool - :ivar enable_nfs_v3_all_squash: Enable NFSv3 all squash on blob container. - :vartype enable_nfs_v3_all_squash: bool - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "etag": {"readonly": True}, - "version": {"readonly": True}, - "deleted": {"readonly": True}, - "deleted_time": {"readonly": True}, - "remaining_retention_days": {"readonly": True}, - "last_modified_time": {"readonly": True}, - "lease_status": {"readonly": True}, - "lease_state": {"readonly": True}, - "lease_duration": {"readonly": True}, - "immutability_policy": {"readonly": True}, - "legal_hold": {"readonly": True}, - "has_legal_hold": {"readonly": True}, - "has_immutability_policy": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "etag": {"key": "etag", "type": "str"}, - "version": {"key": "properties.version", "type": "str"}, - "deleted": {"key": "properties.deleted", "type": "bool"}, - "deleted_time": {"key": "properties.deletedTime", "type": "iso-8601"}, - "remaining_retention_days": {"key": "properties.remainingRetentionDays", "type": "int"}, - "default_encryption_scope": {"key": "properties.defaultEncryptionScope", "type": "str"}, - "deny_encryption_scope_override": {"key": "properties.denyEncryptionScopeOverride", "type": "bool"}, - "public_access": {"key": "properties.publicAccess", "type": "str"}, - "last_modified_time": {"key": "properties.lastModifiedTime", "type": "iso-8601"}, - "lease_status": {"key": "properties.leaseStatus", "type": "str"}, - "lease_state": {"key": "properties.leaseState", "type": "str"}, - "lease_duration": {"key": "properties.leaseDuration", "type": "str"}, - "metadata": {"key": "properties.metadata", "type": "{str}"}, - "immutability_policy": {"key": "properties.immutabilityPolicy", "type": "ImmutabilityPolicyProperties"}, - "legal_hold": {"key": "properties.legalHold", "type": "LegalHoldProperties"}, - "has_legal_hold": {"key": "properties.hasLegalHold", "type": "bool"}, - "has_immutability_policy": {"key": "properties.hasImmutabilityPolicy", "type": "bool"}, - "immutable_storage_with_versioning": { - "key": "properties.immutableStorageWithVersioning", - "type": "ImmutableStorageWithVersioning", - }, - "enable_nfs_v3_root_squash": {"key": "properties.enableNfsV3RootSquash", "type": "bool"}, - "enable_nfs_v3_all_squash": {"key": "properties.enableNfsV3AllSquash", "type": "bool"}, - } - - def __init__( - self, - *, - default_encryption_scope: Optional[str] = None, - deny_encryption_scope_override: Optional[bool] = None, - public_access: Optional[Union[str, "_models.PublicAccess"]] = None, - metadata: Optional[dict[str, str]] = None, - immutable_storage_with_versioning: Optional["_models.ImmutableStorageWithVersioning"] = None, - enable_nfs_v3_root_squash: Optional[bool] = None, - enable_nfs_v3_all_squash: Optional[bool] = None, - **kwargs: Any - ) -> None: - """ - :keyword default_encryption_scope: Default the container to use specified encryption scope for - all writes. - :paramtype default_encryption_scope: str - :keyword deny_encryption_scope_override: Block override of encryption scope from the container - default. - :paramtype deny_encryption_scope_override: bool - :keyword public_access: Specifies whether data in the container may be accessed publicly and - the level of access. Known values are: "Container", "Blob", and "None". - :paramtype public_access: str or ~azure.mgmt.storage.models.PublicAccess - :keyword metadata: A name-value pair to associate with the container as metadata. - :paramtype metadata: dict[str, str] - :keyword immutable_storage_with_versioning: The object level immutability property of the - container. The property is immutable and can only be set to true at the container creation - time. Existing containers must undergo a migration process. - :paramtype immutable_storage_with_versioning: - ~azure.mgmt.storage.models.ImmutableStorageWithVersioning - :keyword enable_nfs_v3_root_squash: Enable NFSv3 root squash on blob container. - :paramtype enable_nfs_v3_root_squash: bool - :keyword enable_nfs_v3_all_squash: Enable NFSv3 all squash on blob container. - :paramtype enable_nfs_v3_all_squash: bool - """ - super().__init__(**kwargs) - self.version: Optional[str] = None - self.deleted: Optional[bool] = None - self.deleted_time: Optional[datetime.datetime] = None - self.remaining_retention_days: Optional[int] = None - self.default_encryption_scope = default_encryption_scope - self.deny_encryption_scope_override = deny_encryption_scope_override - self.public_access = public_access - self.last_modified_time: Optional[datetime.datetime] = None - self.lease_status: Optional[Union[str, "_models.LeaseStatus"]] = None - self.lease_state: Optional[Union[str, "_models.LeaseState"]] = None - self.lease_duration: Optional[Union[str, "_models.LeaseDuration"]] = None - self.metadata = metadata - self.immutability_policy: Optional["_models.ImmutabilityPolicyProperties"] = None - self.legal_hold: Optional["_models.LegalHoldProperties"] = None - self.has_legal_hold: Optional[bool] = None - self.has_immutability_policy: Optional[bool] = None - self.immutable_storage_with_versioning = immutable_storage_with_versioning - self.enable_nfs_v3_root_squash = enable_nfs_v3_root_squash - self.enable_nfs_v3_all_squash = enable_nfs_v3_all_squash - - -class ListContainerItems(_serialization.Model): - """Response schema. Contains list of blobs returned, and if paging is requested or required, a URL - to next page of containers. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: List of blobs containers returned. - :vartype value: list[~azure.mgmt.storage.models.ListContainerItem] - :ivar next_link: Request URL that can be used to query next page of containers. Returned when - total number of requested containers exceed maximum page size. - :vartype next_link: str - """ - - _validation = { - "value": {"readonly": True}, - "next_link": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[ListContainerItem]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value: Optional[list["_models.ListContainerItem"]] = None - self.next_link: Optional[str] = None - - -class ListQueue(Resource): - """ListQueue. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar metadata: A name-value pair that represents queue metadata. - :vartype metadata: dict[str, str] - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "metadata": {"key": "properties.metadata", "type": "{str}"}, - } - - def __init__(self, *, metadata: Optional[dict[str, str]] = None, **kwargs: Any) -> None: - """ - :keyword metadata: A name-value pair that represents queue metadata. - :paramtype metadata: dict[str, str] - """ - super().__init__(**kwargs) - self.metadata = metadata - - -class ListQueueResource(_serialization.Model): - """Response schema. Contains list of queues returned. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: List of queues returned. - :vartype value: list[~azure.mgmt.storage.models.ListQueue] - :ivar next_link: Request URL that can be used to list next page of queues. - :vartype next_link: str - """ - - _validation = { - "value": {"readonly": True}, - "next_link": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[ListQueue]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value: Optional[list["_models.ListQueue"]] = None - self.next_link: Optional[str] = None - - -class ListQueueServices(_serialization.Model): - """ListQueueServices. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: List of queue services returned. - :vartype value: list[~azure.mgmt.storage.models.QueueServiceProperties] - """ - - _validation = { - "value": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[QueueServiceProperties]"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value: Optional[list["_models.QueueServiceProperties"]] = None - - -class ListServiceSasResponse(_serialization.Model): - """The List service SAS credentials operation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar service_sas_token: List service SAS credentials of specific resource. - :vartype service_sas_token: str - """ - - _validation = { - "service_sas_token": {"readonly": True}, - } - - _attribute_map = { - "service_sas_token": {"key": "serviceSasToken", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.service_sas_token: Optional[str] = None - - -class ListTableResource(_serialization.Model): - """Response schema. Contains list of tables returned. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: List of tables returned. - :vartype value: list[~azure.mgmt.storage.models.Table] - :ivar next_link: Request URL that can be used to query next page of tables. - :vartype next_link: str - """ - - _validation = { - "value": {"readonly": True}, - "next_link": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[Table]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value: Optional[list["_models.Table"]] = None - self.next_link: Optional[str] = None - - -class ListTableServices(_serialization.Model): - """ListTableServices. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: List of table services returned. - :vartype value: list[~azure.mgmt.storage.models.TableServiceProperties] - """ - - _validation = { - "value": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[TableServiceProperties]"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value: Optional[list["_models.TableServiceProperties"]] = None - - -class LocalUser(Resource): - """The local user associated with the storage accounts. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Metadata pertaining to creation and last modification of the resource. - :vartype system_data: ~azure.mgmt.storage.models.SystemData - :ivar permission_scopes: The permission scopes of the local user. - :vartype permission_scopes: list[~azure.mgmt.storage.models.PermissionScope] - :ivar home_directory: Optional, local user home directory. - :vartype home_directory: str - :ivar ssh_authorized_keys: Optional, local user ssh authorized keys for SFTP. - :vartype ssh_authorized_keys: list[~azure.mgmt.storage.models.SshPublicKey] - :ivar sid: A unique Security Identifier that is generated by the server. - :vartype sid: str - :ivar has_shared_key: Indicates whether shared key exists. Set it to false to remove existing - shared key. - :vartype has_shared_key: bool - :ivar has_ssh_key: Indicates whether ssh key exists. Set it to false to remove existing SSH - key. - :vartype has_ssh_key: bool - :ivar has_ssh_password: Indicates whether ssh password exists. Set it to false to remove - existing SSH password. - :vartype has_ssh_password: bool - :ivar user_id: A unique Identifier that is generated by the server. - :vartype user_id: int - :ivar group_id: An identifier for associating a group of users. - :vartype group_id: int - :ivar allow_acl_authorization: Indicates whether ACL authorization is allowed for this user. - Set it to false to disallow using ACL authorization. - :vartype allow_acl_authorization: bool - :ivar extended_groups: Supplementary group membership. Only applicable for local users enabled - for NFSv3 access. - :vartype extended_groups: list[int] - :ivar is_nf_sv3_enabled: Indicates if the local user is enabled for access with NFSv3 protocol. - :vartype is_nf_sv3_enabled: bool - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "system_data": {"readonly": True}, - "sid": {"readonly": True}, - "user_id": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemData"}, - "permission_scopes": {"key": "properties.permissionScopes", "type": "[PermissionScope]"}, - "home_directory": {"key": "properties.homeDirectory", "type": "str"}, - "ssh_authorized_keys": {"key": "properties.sshAuthorizedKeys", "type": "[SshPublicKey]"}, - "sid": {"key": "properties.sid", "type": "str"}, - "has_shared_key": {"key": "properties.hasSharedKey", "type": "bool"}, - "has_ssh_key": {"key": "properties.hasSshKey", "type": "bool"}, - "has_ssh_password": {"key": "properties.hasSshPassword", "type": "bool"}, - "user_id": {"key": "properties.userId", "type": "int"}, - "group_id": {"key": "properties.groupId", "type": "int"}, - "allow_acl_authorization": {"key": "properties.allowAclAuthorization", "type": "bool"}, - "extended_groups": {"key": "properties.extendedGroups", "type": "[int]"}, - "is_nf_sv3_enabled": {"key": "properties.isNFSv3Enabled", "type": "bool"}, - } - - def __init__( - self, - *, - permission_scopes: Optional[list["_models.PermissionScope"]] = None, - home_directory: Optional[str] = None, - ssh_authorized_keys: Optional[list["_models.SshPublicKey"]] = None, - has_shared_key: Optional[bool] = None, - has_ssh_key: Optional[bool] = None, - has_ssh_password: Optional[bool] = None, - group_id: Optional[int] = None, - allow_acl_authorization: Optional[bool] = None, - extended_groups: Optional[list[int]] = None, - is_nf_sv3_enabled: Optional[bool] = None, - **kwargs: Any - ) -> None: - """ - :keyword permission_scopes: The permission scopes of the local user. - :paramtype permission_scopes: list[~azure.mgmt.storage.models.PermissionScope] - :keyword home_directory: Optional, local user home directory. - :paramtype home_directory: str - :keyword ssh_authorized_keys: Optional, local user ssh authorized keys for SFTP. - :paramtype ssh_authorized_keys: list[~azure.mgmt.storage.models.SshPublicKey] - :keyword has_shared_key: Indicates whether shared key exists. Set it to false to remove - existing shared key. - :paramtype has_shared_key: bool - :keyword has_ssh_key: Indicates whether ssh key exists. Set it to false to remove existing SSH - key. - :paramtype has_ssh_key: bool - :keyword has_ssh_password: Indicates whether ssh password exists. Set it to false to remove - existing SSH password. - :paramtype has_ssh_password: bool - :keyword group_id: An identifier for associating a group of users. - :paramtype group_id: int - :keyword allow_acl_authorization: Indicates whether ACL authorization is allowed for this user. - Set it to false to disallow using ACL authorization. - :paramtype allow_acl_authorization: bool - :keyword extended_groups: Supplementary group membership. Only applicable for local users - enabled for NFSv3 access. - :paramtype extended_groups: list[int] - :keyword is_nf_sv3_enabled: Indicates if the local user is enabled for access with NFSv3 - protocol. - :paramtype is_nf_sv3_enabled: bool - """ - super().__init__(**kwargs) - self.system_data: Optional["_models.SystemData"] = None - self.permission_scopes = permission_scopes - self.home_directory = home_directory - self.ssh_authorized_keys = ssh_authorized_keys - self.sid: Optional[str] = None - self.has_shared_key = has_shared_key - self.has_ssh_key = has_ssh_key - self.has_ssh_password = has_ssh_password - self.user_id: Optional[int] = None - self.group_id = group_id - self.allow_acl_authorization = allow_acl_authorization - self.extended_groups = extended_groups - self.is_nf_sv3_enabled = is_nf_sv3_enabled - - -class LocalUserKeys(_serialization.Model): - """The Storage Account Local User keys. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar ssh_authorized_keys: Optional, local user ssh authorized keys for SFTP. - :vartype ssh_authorized_keys: list[~azure.mgmt.storage.models.SshPublicKey] - :ivar shared_key: Auto generated by the server for SMB authentication. - :vartype shared_key: str - """ - - _validation = { - "shared_key": {"readonly": True}, - } - - _attribute_map = { - "ssh_authorized_keys": {"key": "sshAuthorizedKeys", "type": "[SshPublicKey]"}, - "shared_key": {"key": "sharedKey", "type": "str"}, - } - - def __init__(self, *, ssh_authorized_keys: Optional[list["_models.SshPublicKey"]] = None, **kwargs: Any) -> None: - """ - :keyword ssh_authorized_keys: Optional, local user ssh authorized keys for SFTP. - :paramtype ssh_authorized_keys: list[~azure.mgmt.storage.models.SshPublicKey] - """ - super().__init__(**kwargs) - self.ssh_authorized_keys = ssh_authorized_keys - self.shared_key: Optional[str] = None - - -class LocalUserRegeneratePasswordResult(_serialization.Model): - """The secrets of Storage Account Local User. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar ssh_password: Auto generated password by the server for SSH authentication if - hasSshPassword is set to true on the creation of local user. - :vartype ssh_password: str - """ - - _validation = { - "ssh_password": {"readonly": True}, - } - - _attribute_map = { - "ssh_password": {"key": "sshPassword", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.ssh_password: Optional[str] = None - - -class LocalUsers(_serialization.Model): - """List of local users requested, and if paging is required, a URL to the next page of local - users. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The list of local users associated with the storage account. - :vartype value: list[~azure.mgmt.storage.models.LocalUser] - :ivar next_link: Request URL that can be used to query next page of local users. Returned when - total number of requested local users exceeds the maximum page size. - :vartype next_link: str - """ - - _validation = { - "next_link": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[LocalUser]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } - - def __init__(self, *, value: Optional[list["_models.LocalUser"]] = None, **kwargs: Any) -> None: - """ - :keyword value: The list of local users associated with the storage account. - :paramtype value: list[~azure.mgmt.storage.models.LocalUser] - """ - super().__init__(**kwargs) - self.value = value - self.next_link: Optional[str] = None - - -class ManagementPolicy(Resource): - """The Get Storage Account ManagementPolicies operation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar last_modified_time: Returns the date and time the ManagementPolicies was last modified. - :vartype last_modified_time: ~datetime.datetime - :ivar policy: The Storage Account ManagementPolicy, in JSON format. See more details in: - https://learn.microsoft.com/azure/storage/blobs/lifecycle-management-overview. - :vartype policy: ~azure.mgmt.storage.models.ManagementPolicySchema - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "last_modified_time": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "last_modified_time": {"key": "properties.lastModifiedTime", "type": "iso-8601"}, - "policy": {"key": "properties.policy", "type": "ManagementPolicySchema"}, - } - - def __init__(self, *, policy: Optional["_models.ManagementPolicySchema"] = None, **kwargs: Any) -> None: - """ - :keyword policy: The Storage Account ManagementPolicy, in JSON format. See more details in: - https://learn.microsoft.com/azure/storage/blobs/lifecycle-management-overview. - :paramtype policy: ~azure.mgmt.storage.models.ManagementPolicySchema - """ - super().__init__(**kwargs) - self.last_modified_time: Optional[datetime.datetime] = None - self.policy = policy - - -class ManagementPolicyAction(_serialization.Model): - """Actions are applied to the filtered blobs when the execution condition is met. - - :ivar base_blob: The management policy action for base blob. - :vartype base_blob: ~azure.mgmt.storage.models.ManagementPolicyBaseBlob - :ivar snapshot: The management policy action for snapshot. - :vartype snapshot: ~azure.mgmt.storage.models.ManagementPolicySnapShot - :ivar version: The management policy action for version. - :vartype version: ~azure.mgmt.storage.models.ManagementPolicyVersion - """ - - _attribute_map = { - "base_blob": {"key": "baseBlob", "type": "ManagementPolicyBaseBlob"}, - "snapshot": {"key": "snapshot", "type": "ManagementPolicySnapShot"}, - "version": {"key": "version", "type": "ManagementPolicyVersion"}, - } - - def __init__( - self, - *, - base_blob: Optional["_models.ManagementPolicyBaseBlob"] = None, - snapshot: Optional["_models.ManagementPolicySnapShot"] = None, - version: Optional["_models.ManagementPolicyVersion"] = None, - **kwargs: Any - ) -> None: - """ - :keyword base_blob: The management policy action for base blob. - :paramtype base_blob: ~azure.mgmt.storage.models.ManagementPolicyBaseBlob - :keyword snapshot: The management policy action for snapshot. - :paramtype snapshot: ~azure.mgmt.storage.models.ManagementPolicySnapShot - :keyword version: The management policy action for version. - :paramtype version: ~azure.mgmt.storage.models.ManagementPolicyVersion - """ - super().__init__(**kwargs) - self.base_blob = base_blob - self.snapshot = snapshot - self.version = version - - -class ManagementPolicyBaseBlob(_serialization.Model): - """Management policy action for base blob. - - :ivar tier_to_cool: The function to tier blobs to cool storage. - :vartype tier_to_cool: ~azure.mgmt.storage.models.DateAfterModification - :ivar tier_to_archive: The function to tier blobs to archive storage. - :vartype tier_to_archive: ~azure.mgmt.storage.models.DateAfterModification - :ivar tier_to_cold: The function to tier blobs to cold storage. - :vartype tier_to_cold: ~azure.mgmt.storage.models.DateAfterModification - :ivar tier_to_hot: The function to tier blobs to hot storage. This action can only be used with - Premium Block Blob Storage Accounts. - :vartype tier_to_hot: ~azure.mgmt.storage.models.DateAfterModification - :ivar delete: The function to delete the blob. - :vartype delete: ~azure.mgmt.storage.models.DateAfterModification - :ivar enable_auto_tier_to_hot_from_cool: This property enables auto tiering of a blob from cool - to hot on a blob access. This property requires tierToCool.daysAfterLastAccessTimeGreaterThan. - :vartype enable_auto_tier_to_hot_from_cool: bool - """ - - _attribute_map = { - "tier_to_cool": {"key": "tierToCool", "type": "DateAfterModification"}, - "tier_to_archive": {"key": "tierToArchive", "type": "DateAfterModification"}, - "tier_to_cold": {"key": "tierToCold", "type": "DateAfterModification"}, - "tier_to_hot": {"key": "tierToHot", "type": "DateAfterModification"}, - "delete": {"key": "delete", "type": "DateAfterModification"}, - "enable_auto_tier_to_hot_from_cool": {"key": "enableAutoTierToHotFromCool", "type": "bool"}, - } - - def __init__( - self, - *, - tier_to_cool: Optional["_models.DateAfterModification"] = None, - tier_to_archive: Optional["_models.DateAfterModification"] = None, - tier_to_cold: Optional["_models.DateAfterModification"] = None, - tier_to_hot: Optional["_models.DateAfterModification"] = None, - delete: Optional["_models.DateAfterModification"] = None, - enable_auto_tier_to_hot_from_cool: Optional[bool] = None, - **kwargs: Any - ) -> None: - """ - :keyword tier_to_cool: The function to tier blobs to cool storage. - :paramtype tier_to_cool: ~azure.mgmt.storage.models.DateAfterModification - :keyword tier_to_archive: The function to tier blobs to archive storage. - :paramtype tier_to_archive: ~azure.mgmt.storage.models.DateAfterModification - :keyword tier_to_cold: The function to tier blobs to cold storage. - :paramtype tier_to_cold: ~azure.mgmt.storage.models.DateAfterModification - :keyword tier_to_hot: The function to tier blobs to hot storage. This action can only be used - with Premium Block Blob Storage Accounts. - :paramtype tier_to_hot: ~azure.mgmt.storage.models.DateAfterModification - :keyword delete: The function to delete the blob. - :paramtype delete: ~azure.mgmt.storage.models.DateAfterModification - :keyword enable_auto_tier_to_hot_from_cool: This property enables auto tiering of a blob from - cool to hot on a blob access. This property requires - tierToCool.daysAfterLastAccessTimeGreaterThan. - :paramtype enable_auto_tier_to_hot_from_cool: bool - """ - super().__init__(**kwargs) - self.tier_to_cool = tier_to_cool - self.tier_to_archive = tier_to_archive - self.tier_to_cold = tier_to_cold - self.tier_to_hot = tier_to_hot - self.delete = delete - self.enable_auto_tier_to_hot_from_cool = enable_auto_tier_to_hot_from_cool - - -class ManagementPolicyDefinition(_serialization.Model): - """An object that defines the Lifecycle rule. Each definition is made up with a filters set and an - actions set. - - All required parameters must be populated in order to send to server. - - :ivar actions: An object that defines the action set. Required. - :vartype actions: ~azure.mgmt.storage.models.ManagementPolicyAction - :ivar filters: An object that defines the filter set. - :vartype filters: ~azure.mgmt.storage.models.ManagementPolicyFilter - """ - - _validation = { - "actions": {"required": True}, - } - - _attribute_map = { - "actions": {"key": "actions", "type": "ManagementPolicyAction"}, - "filters": {"key": "filters", "type": "ManagementPolicyFilter"}, - } - - def __init__( - self, - *, - actions: "_models.ManagementPolicyAction", - filters: Optional["_models.ManagementPolicyFilter"] = None, - **kwargs: Any - ) -> None: - """ - :keyword actions: An object that defines the action set. Required. - :paramtype actions: ~azure.mgmt.storage.models.ManagementPolicyAction - :keyword filters: An object that defines the filter set. - :paramtype filters: ~azure.mgmt.storage.models.ManagementPolicyFilter - """ - super().__init__(**kwargs) - self.actions = actions - self.filters = filters - - -class ManagementPolicyFilter(_serialization.Model): - """Filters limit rule actions to a subset of blobs within the storage account. If multiple filters - are defined, a logical AND is performed on all filters. - - All required parameters must be populated in order to send to server. - - :ivar prefix_match: An array of strings for prefixes to be match. - :vartype prefix_match: list[str] - :ivar blob_types: An array of predefined enum values. Currently blockBlob supports all tiering - and delete actions. Only delete actions are supported for appendBlob. Required. - :vartype blob_types: list[str] - :ivar blob_index_match: An array of blob index tag based filters, there can be at most 10 tag - filters. - :vartype blob_index_match: list[~azure.mgmt.storage.models.TagFilter] - """ - - _validation = { - "blob_types": {"required": True}, - } - - _attribute_map = { - "prefix_match": {"key": "prefixMatch", "type": "[str]"}, - "blob_types": {"key": "blobTypes", "type": "[str]"}, - "blob_index_match": {"key": "blobIndexMatch", "type": "[TagFilter]"}, - } - - def __init__( - self, - *, - blob_types: list[str], - prefix_match: Optional[list[str]] = None, - blob_index_match: Optional[list["_models.TagFilter"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword prefix_match: An array of strings for prefixes to be match. - :paramtype prefix_match: list[str] - :keyword blob_types: An array of predefined enum values. Currently blockBlob supports all - tiering and delete actions. Only delete actions are supported for appendBlob. Required. - :paramtype blob_types: list[str] - :keyword blob_index_match: An array of blob index tag based filters, there can be at most 10 - tag filters. - :paramtype blob_index_match: list[~azure.mgmt.storage.models.TagFilter] - """ - super().__init__(**kwargs) - self.prefix_match = prefix_match - self.blob_types = blob_types - self.blob_index_match = blob_index_match - - -class ManagementPolicyRule(_serialization.Model): - """An object that wraps the Lifecycle rule. Each rule is uniquely defined by name. - - All required parameters must be populated in order to send to server. - - :ivar enabled: Rule is enabled if set to true. - :vartype enabled: bool - :ivar name: A rule name can contain any combination of alpha numeric characters. Rule name is - case-sensitive. It must be unique within a policy. Required. - :vartype name: str - :ivar type: The valid value is Lifecycle. Required. "Lifecycle" - :vartype type: str or ~azure.mgmt.storage.models.RuleType - :ivar definition: An object that defines the Lifecycle rule. Required. - :vartype definition: ~azure.mgmt.storage.models.ManagementPolicyDefinition - """ - - _validation = { - "name": {"required": True}, - "type": {"required": True}, - "definition": {"required": True}, - } - - _attribute_map = { - "enabled": {"key": "enabled", "type": "bool"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "definition": {"key": "definition", "type": "ManagementPolicyDefinition"}, - } - - def __init__( - self, - *, - name: str, - type: Union[str, "_models.RuleType"], - definition: "_models.ManagementPolicyDefinition", - enabled: Optional[bool] = None, - **kwargs: Any - ) -> None: - """ - :keyword enabled: Rule is enabled if set to true. - :paramtype enabled: bool - :keyword name: A rule name can contain any combination of alpha numeric characters. Rule name - is case-sensitive. It must be unique within a policy. Required. - :paramtype name: str - :keyword type: The valid value is Lifecycle. Required. "Lifecycle" - :paramtype type: str or ~azure.mgmt.storage.models.RuleType - :keyword definition: An object that defines the Lifecycle rule. Required. - :paramtype definition: ~azure.mgmt.storage.models.ManagementPolicyDefinition - """ - super().__init__(**kwargs) - self.enabled = enabled - self.name = name - self.type = type - self.definition = definition - - -class ManagementPolicySchema(_serialization.Model): - """The Storage Account ManagementPolicies Rules. See more details in: - https://learn.microsoft.com/azure/storage/blobs/lifecycle-management-overview. - - All required parameters must be populated in order to send to server. - - :ivar rules: The Storage Account ManagementPolicies Rules. See more details in: - https://learn.microsoft.com/azure/storage/blobs/lifecycle-management-overview. Required. - :vartype rules: list[~azure.mgmt.storage.models.ManagementPolicyRule] - """ - - _validation = { - "rules": {"required": True}, - } - - _attribute_map = { - "rules": {"key": "rules", "type": "[ManagementPolicyRule]"}, - } - - def __init__(self, *, rules: list["_models.ManagementPolicyRule"], **kwargs: Any) -> None: - """ - :keyword rules: The Storage Account ManagementPolicies Rules. See more details in: - https://learn.microsoft.com/azure/storage/blobs/lifecycle-management-overview. Required. - :paramtype rules: list[~azure.mgmt.storage.models.ManagementPolicyRule] - """ - super().__init__(**kwargs) - self.rules = rules - - -class ManagementPolicySnapShot(_serialization.Model): - """Management policy action for snapshot. - - :ivar tier_to_cool: The function to tier blob snapshot to cool storage. - :vartype tier_to_cool: ~azure.mgmt.storage.models.DateAfterCreation - :ivar tier_to_archive: The function to tier blob snapshot to archive storage. - :vartype tier_to_archive: ~azure.mgmt.storage.models.DateAfterCreation - :ivar tier_to_cold: The function to tier blobs to cold storage. - :vartype tier_to_cold: ~azure.mgmt.storage.models.DateAfterCreation - :ivar tier_to_hot: The function to tier blobs to hot storage. This action can only be used with - Premium Block Blob Storage Accounts. - :vartype tier_to_hot: ~azure.mgmt.storage.models.DateAfterCreation - :ivar delete: The function to delete the blob snapshot. - :vartype delete: ~azure.mgmt.storage.models.DateAfterCreation - """ - - _attribute_map = { - "tier_to_cool": {"key": "tierToCool", "type": "DateAfterCreation"}, - "tier_to_archive": {"key": "tierToArchive", "type": "DateAfterCreation"}, - "tier_to_cold": {"key": "tierToCold", "type": "DateAfterCreation"}, - "tier_to_hot": {"key": "tierToHot", "type": "DateAfterCreation"}, - "delete": {"key": "delete", "type": "DateAfterCreation"}, - } - - def __init__( - self, - *, - tier_to_cool: Optional["_models.DateAfterCreation"] = None, - tier_to_archive: Optional["_models.DateAfterCreation"] = None, - tier_to_cold: Optional["_models.DateAfterCreation"] = None, - tier_to_hot: Optional["_models.DateAfterCreation"] = None, - delete: Optional["_models.DateAfterCreation"] = None, - **kwargs: Any - ) -> None: - """ - :keyword tier_to_cool: The function to tier blob snapshot to cool storage. - :paramtype tier_to_cool: ~azure.mgmt.storage.models.DateAfterCreation - :keyword tier_to_archive: The function to tier blob snapshot to archive storage. - :paramtype tier_to_archive: ~azure.mgmt.storage.models.DateAfterCreation - :keyword tier_to_cold: The function to tier blobs to cold storage. - :paramtype tier_to_cold: ~azure.mgmt.storage.models.DateAfterCreation - :keyword tier_to_hot: The function to tier blobs to hot storage. This action can only be used - with Premium Block Blob Storage Accounts. - :paramtype tier_to_hot: ~azure.mgmt.storage.models.DateAfterCreation - :keyword delete: The function to delete the blob snapshot. - :paramtype delete: ~azure.mgmt.storage.models.DateAfterCreation - """ - super().__init__(**kwargs) - self.tier_to_cool = tier_to_cool - self.tier_to_archive = tier_to_archive - self.tier_to_cold = tier_to_cold - self.tier_to_hot = tier_to_hot - self.delete = delete - - -class ManagementPolicyVersion(_serialization.Model): - """Management policy action for blob version. - - :ivar tier_to_cool: The function to tier blob version to cool storage. - :vartype tier_to_cool: ~azure.mgmt.storage.models.DateAfterCreation - :ivar tier_to_archive: The function to tier blob version to archive storage. - :vartype tier_to_archive: ~azure.mgmt.storage.models.DateAfterCreation - :ivar tier_to_cold: The function to tier blobs to cold storage. - :vartype tier_to_cold: ~azure.mgmt.storage.models.DateAfterCreation - :ivar tier_to_hot: The function to tier blobs to hot storage. This action can only be used with - Premium Block Blob Storage Accounts. - :vartype tier_to_hot: ~azure.mgmt.storage.models.DateAfterCreation - :ivar delete: The function to delete the blob version. - :vartype delete: ~azure.mgmt.storage.models.DateAfterCreation - """ - - _attribute_map = { - "tier_to_cool": {"key": "tierToCool", "type": "DateAfterCreation"}, - "tier_to_archive": {"key": "tierToArchive", "type": "DateAfterCreation"}, - "tier_to_cold": {"key": "tierToCold", "type": "DateAfterCreation"}, - "tier_to_hot": {"key": "tierToHot", "type": "DateAfterCreation"}, - "delete": {"key": "delete", "type": "DateAfterCreation"}, - } - - def __init__( - self, - *, - tier_to_cool: Optional["_models.DateAfterCreation"] = None, - tier_to_archive: Optional["_models.DateAfterCreation"] = None, - tier_to_cold: Optional["_models.DateAfterCreation"] = None, - tier_to_hot: Optional["_models.DateAfterCreation"] = None, - delete: Optional["_models.DateAfterCreation"] = None, - **kwargs: Any - ) -> None: - """ - :keyword tier_to_cool: The function to tier blob version to cool storage. - :paramtype tier_to_cool: ~azure.mgmt.storage.models.DateAfterCreation - :keyword tier_to_archive: The function to tier blob version to archive storage. - :paramtype tier_to_archive: ~azure.mgmt.storage.models.DateAfterCreation - :keyword tier_to_cold: The function to tier blobs to cold storage. - :paramtype tier_to_cold: ~azure.mgmt.storage.models.DateAfterCreation - :keyword tier_to_hot: The function to tier blobs to hot storage. This action can only be used - with Premium Block Blob Storage Accounts. - :paramtype tier_to_hot: ~azure.mgmt.storage.models.DateAfterCreation - :keyword delete: The function to delete the blob version. - :paramtype delete: ~azure.mgmt.storage.models.DateAfterCreation - """ - super().__init__(**kwargs) - self.tier_to_cool = tier_to_cool - self.tier_to_archive = tier_to_archive - self.tier_to_cold = tier_to_cold - self.tier_to_hot = tier_to_hot - self.delete = delete - - -class MetricSpecification(_serialization.Model): - """Metric specification of operation. - - :ivar name: Name of metric specification. - :vartype name: str - :ivar display_name: Display name of metric specification. - :vartype display_name: str - :ivar display_description: Display description of metric specification. - :vartype display_description: str - :ivar unit: Unit could be Bytes or Count. - :vartype unit: str - :ivar dimensions: Dimensions of blobs, including blob type and access tier. - :vartype dimensions: list[~azure.mgmt.storage.models.Dimension] - :ivar aggregation_type: Aggregation type could be Average. - :vartype aggregation_type: str - :ivar fill_gap_with_zero: The property to decide fill gap with zero or not. - :vartype fill_gap_with_zero: bool - :ivar category: The category this metric specification belong to, could be Capacity. - :vartype category: str - :ivar resource_id_dimension_name_override: Account Resource Id. - :vartype resource_id_dimension_name_override: str - """ - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "display_name": {"key": "displayName", "type": "str"}, - "display_description": {"key": "displayDescription", "type": "str"}, - "unit": {"key": "unit", "type": "str"}, - "dimensions": {"key": "dimensions", "type": "[Dimension]"}, - "aggregation_type": {"key": "aggregationType", "type": "str"}, - "fill_gap_with_zero": {"key": "fillGapWithZero", "type": "bool"}, - "category": {"key": "category", "type": "str"}, - "resource_id_dimension_name_override": {"key": "resourceIdDimensionNameOverride", "type": "str"}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - display_name: Optional[str] = None, - display_description: Optional[str] = None, - unit: Optional[str] = None, - dimensions: Optional[list["_models.Dimension"]] = None, - aggregation_type: Optional[str] = None, - fill_gap_with_zero: Optional[bool] = None, - category: Optional[str] = None, - resource_id_dimension_name_override: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword name: Name of metric specification. - :paramtype name: str - :keyword display_name: Display name of metric specification. - :paramtype display_name: str - :keyword display_description: Display description of metric specification. - :paramtype display_description: str - :keyword unit: Unit could be Bytes or Count. - :paramtype unit: str - :keyword dimensions: Dimensions of blobs, including blob type and access tier. - :paramtype dimensions: list[~azure.mgmt.storage.models.Dimension] - :keyword aggregation_type: Aggregation type could be Average. - :paramtype aggregation_type: str - :keyword fill_gap_with_zero: The property to decide fill gap with zero or not. - :paramtype fill_gap_with_zero: bool - :keyword category: The category this metric specification belong to, could be Capacity. - :paramtype category: str - :keyword resource_id_dimension_name_override: Account Resource Id. - :paramtype resource_id_dimension_name_override: str - """ - super().__init__(**kwargs) - self.name = name - self.display_name = display_name - self.display_description = display_description - self.unit = unit - self.dimensions = dimensions - self.aggregation_type = aggregation_type - self.fill_gap_with_zero = fill_gap_with_zero - self.category = category - self.resource_id_dimension_name_override = resource_id_dimension_name_override - - -class Multichannel(_serialization.Model): - """Multichannel setting. Applies to Premium FileStorage only. - - :ivar enabled: Indicates whether multichannel is enabled. - :vartype enabled: bool - """ - - _attribute_map = { - "enabled": {"key": "enabled", "type": "bool"}, - } - - def __init__(self, *, enabled: Optional[bool] = None, **kwargs: Any) -> None: - """ - :keyword enabled: Indicates whether multichannel is enabled. - :paramtype enabled: bool - """ - super().__init__(**kwargs) - self.enabled = enabled - - -class NetworkRuleSet(_serialization.Model): - """Network rule set. - - All required parameters must be populated in order to send to server. - - :ivar bypass: Specifies whether traffic is bypassed for Logging/Metrics/AzureServices. Possible - values are any combination of Logging|Metrics|AzureServices (For example, "Logging, Metrics"), - or None to bypass none of those traffics. Known values are: "None", "Logging", "Metrics", and - "AzureServices". - :vartype bypass: str or ~azure.mgmt.storage.models.Bypass - :ivar resource_access_rules: Sets the resource access rules. - :vartype resource_access_rules: list[~azure.mgmt.storage.models.ResourceAccessRule] - :ivar virtual_network_rules: Sets the virtual network rules. - :vartype virtual_network_rules: list[~azure.mgmt.storage.models.VirtualNetworkRule] - :ivar ip_rules: Sets the IP ACL rules. - :vartype ip_rules: list[~azure.mgmt.storage.models.IPRule] - :ivar ipv6_rules: Sets the IPv6 ACL rules. - :vartype ipv6_rules: list[~azure.mgmt.storage.models.IPRule] - :ivar default_action: Specifies the default action of allow or deny when no other rules match. - Known values are: "Allow" and "Deny". - :vartype default_action: str or ~azure.mgmt.storage.models.DefaultAction - """ - - _validation = { - "default_action": {"required": True}, - } - - _attribute_map = { - "bypass": {"key": "bypass", "type": "str"}, - "resource_access_rules": {"key": "resourceAccessRules", "type": "[ResourceAccessRule]"}, - "virtual_network_rules": {"key": "virtualNetworkRules", "type": "[VirtualNetworkRule]"}, - "ip_rules": {"key": "ipRules", "type": "[IPRule]"}, - "ipv6_rules": {"key": "ipv6Rules", "type": "[IPRule]"}, - "default_action": {"key": "defaultAction", "type": "str"}, - } - - def __init__( - self, - *, - default_action: Union[str, "_models.DefaultAction"] = "Allow", - bypass: Union[str, "_models.Bypass"] = "AzureServices", - resource_access_rules: Optional[list["_models.ResourceAccessRule"]] = None, - virtual_network_rules: Optional[list["_models.VirtualNetworkRule"]] = None, - ip_rules: Optional[list["_models.IPRule"]] = None, - ipv6_rules: Optional[list["_models.IPRule"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword bypass: Specifies whether traffic is bypassed for Logging/Metrics/AzureServices. - Possible values are any combination of Logging|Metrics|AzureServices (For example, "Logging, - Metrics"), or None to bypass none of those traffics. Known values are: "None", "Logging", - "Metrics", and "AzureServices". - :paramtype bypass: str or ~azure.mgmt.storage.models.Bypass - :keyword resource_access_rules: Sets the resource access rules. - :paramtype resource_access_rules: list[~azure.mgmt.storage.models.ResourceAccessRule] - :keyword virtual_network_rules: Sets the virtual network rules. - :paramtype virtual_network_rules: list[~azure.mgmt.storage.models.VirtualNetworkRule] - :keyword ip_rules: Sets the IP ACL rules. - :paramtype ip_rules: list[~azure.mgmt.storage.models.IPRule] - :keyword ipv6_rules: Sets the IPv6 ACL rules. - :paramtype ipv6_rules: list[~azure.mgmt.storage.models.IPRule] - :keyword default_action: Specifies the default action of allow or deny when no other rules - match. Known values are: "Allow" and "Deny". - :paramtype default_action: str or ~azure.mgmt.storage.models.DefaultAction - """ - super().__init__(**kwargs) - self.bypass = bypass - self.resource_access_rules = resource_access_rules - self.virtual_network_rules = virtual_network_rules - self.ip_rules = ip_rules - self.ipv6_rules = ipv6_rules - self.default_action = default_action - - -class NetworkSecurityPerimeter(_serialization.Model): - """NetworkSecurityPerimeter related information. - - :ivar id: The ARM identifier of the resource. - :vartype id: str - :ivar perimeter_guid: Guid of the resource. - :vartype perimeter_guid: str - :ivar location: Location of the resource. - :vartype location: str - """ - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "perimeter_guid": {"key": "perimeterGuid", "type": "str"}, - "location": {"key": "location", "type": "str"}, - } - - def __init__( - self, - *, - id: Optional[str] = None, # pylint: disable=redefined-builtin - perimeter_guid: Optional[str] = None, - location: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword id: The ARM identifier of the resource. - :paramtype id: str - :keyword perimeter_guid: Guid of the resource. - :paramtype perimeter_guid: str - :keyword location: Location of the resource. - :paramtype location: str - """ - super().__init__(**kwargs) - self.id = id - self.perimeter_guid = perimeter_guid - self.location = location - - -class ResourceAutoGenerated(_serialization.Model): - """Common fields that are returned in the response for all Azure Resource Manager resources. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. E.g. - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.storage.models.SystemData - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "system_data": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemData"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.id: Optional[str] = None - self.name: Optional[str] = None - self.type: Optional[str] = None - self.system_data: Optional["_models.SystemData"] = None - - -class ProxyResourceAutoGenerated(ResourceAutoGenerated): - """The resource model definition for a Azure Resource Manager proxy resource. It will not have - tags and a location. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. E.g. - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.storage.models.SystemData - """ - - -class NetworkSecurityPerimeterConfiguration(ProxyResourceAutoGenerated): - """The Network Security Perimeter configuration resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. E.g. - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.storage.models.SystemData - :ivar provisioning_state: Provisioning state of Network Security Perimeter configuration - propagation. Known values are: "Accepted", "Succeeded", "Failed", "Deleting", and "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.storage.models.NetworkSecurityPerimeterConfigurationProvisioningState - :ivar provisioning_issues: List of Provisioning Issues if any. - :vartype provisioning_issues: list[~azure.mgmt.storage.models.ProvisioningIssue] - :ivar network_security_perimeter: NetworkSecurityPerimeter related information. - :vartype network_security_perimeter: ~azure.mgmt.storage.models.NetworkSecurityPerimeter - :ivar resource_association: Information about resource association. - :vartype resource_association: - ~azure.mgmt.storage.models.NetworkSecurityPerimeterConfigurationPropertiesResourceAssociation - :ivar profile: Network Security Perimeter profile. - :vartype profile: - ~azure.mgmt.storage.models.NetworkSecurityPerimeterConfigurationPropertiesProfile - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "system_data": {"readonly": True}, - "provisioning_state": {"readonly": True}, - "provisioning_issues": {"readonly": True}, - "network_security_perimeter": {"readonly": True}, - "resource_association": {"readonly": True}, - "profile": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemData"}, - "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, - "provisioning_issues": {"key": "properties.provisioningIssues", "type": "[ProvisioningIssue]"}, - "network_security_perimeter": { - "key": "properties.networkSecurityPerimeter", - "type": "NetworkSecurityPerimeter", - }, - "resource_association": { - "key": "properties.resourceAssociation", - "type": "NetworkSecurityPerimeterConfigurationPropertiesResourceAssociation", - }, - "profile": {"key": "properties.profile", "type": "NetworkSecurityPerimeterConfigurationPropertiesProfile"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.provisioning_state: Optional[ - Union[str, "_models.NetworkSecurityPerimeterConfigurationProvisioningState"] - ] = None - self.provisioning_issues: Optional[list["_models.ProvisioningIssue"]] = None - self.network_security_perimeter: Optional["_models.NetworkSecurityPerimeter"] = None - self.resource_association: Optional[ - "_models.NetworkSecurityPerimeterConfigurationPropertiesResourceAssociation" - ] = None - self.profile: Optional["_models.NetworkSecurityPerimeterConfigurationPropertiesProfile"] = None - - -class NetworkSecurityPerimeterConfigurationList(_serialization.Model): # pylint: disable=name-too-long - """Result of the List Network Security Perimeter configuration operation. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: A collection of Network Security Perimeter configurations. - :vartype value: list[~azure.mgmt.storage.models.NetworkSecurityPerimeterConfiguration] - :ivar next_link: The URI that can be used to request the next set of paged results. - :vartype next_link: str - """ - - _validation = { - "value": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[NetworkSecurityPerimeterConfiguration]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } - - def __init__(self, *, next_link: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword next_link: The URI that can be used to request the next set of paged results. - :paramtype next_link: str - """ - super().__init__(**kwargs) - self.value: Optional[list["_models.NetworkSecurityPerimeterConfiguration"]] = None - self.next_link = next_link - - -class NetworkSecurityPerimeterConfigurationPropertiesProfile(_serialization.Model): # pylint: disable=name-too-long - """Network Security Perimeter profile. - - :ivar name: Name of the resource. - :vartype name: str - :ivar access_rules_version: Current access rules version. - :vartype access_rules_version: float - :ivar access_rules: List of Access Rules. - :vartype access_rules: list[~azure.mgmt.storage.models.NspAccessRule] - :ivar diagnostic_settings_version: Diagnostic settings version. - :vartype diagnostic_settings_version: float - :ivar enabled_log_categories: Enabled logging categories. - :vartype enabled_log_categories: list[str] - """ - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "access_rules_version": {"key": "accessRulesVersion", "type": "float"}, - "access_rules": {"key": "accessRules", "type": "[NspAccessRule]"}, - "diagnostic_settings_version": {"key": "diagnosticSettingsVersion", "type": "float"}, - "enabled_log_categories": {"key": "enabledLogCategories", "type": "[str]"}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - access_rules_version: Optional[float] = None, - access_rules: Optional[list["_models.NspAccessRule"]] = None, - diagnostic_settings_version: Optional[float] = None, - enabled_log_categories: Optional[list[str]] = None, - **kwargs: Any - ) -> None: - """ - :keyword name: Name of the resource. - :paramtype name: str - :keyword access_rules_version: Current access rules version. - :paramtype access_rules_version: float - :keyword access_rules: List of Access Rules. - :paramtype access_rules: list[~azure.mgmt.storage.models.NspAccessRule] - :keyword diagnostic_settings_version: Diagnostic settings version. - :paramtype diagnostic_settings_version: float - :keyword enabled_log_categories: Enabled logging categories. - :paramtype enabled_log_categories: list[str] - """ - super().__init__(**kwargs) - self.name = name - self.access_rules_version = access_rules_version - self.access_rules = access_rules - self.diagnostic_settings_version = diagnostic_settings_version - self.enabled_log_categories = enabled_log_categories - - -class NetworkSecurityPerimeterConfigurationPropertiesResourceAssociation( - _serialization.Model -): # pylint: disable=name-too-long - """Information about resource association. - - :ivar name: Name of the resource association. - :vartype name: str - :ivar access_mode: Access Mode of the resource association. Known values are: "Enforced", - "Learning", and "Audit". - :vartype access_mode: str or ~azure.mgmt.storage.models.ResourceAssociationAccessMode - """ - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "access_mode": {"key": "accessMode", "type": "str"}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - access_mode: Optional[Union[str, "_models.ResourceAssociationAccessMode"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword name: Name of the resource association. - :paramtype name: str - :keyword access_mode: Access Mode of the resource association. Known values are: "Enforced", - "Learning", and "Audit". - :paramtype access_mode: str or ~azure.mgmt.storage.models.ResourceAssociationAccessMode - """ - super().__init__(**kwargs) - self.name = name - self.access_mode = access_mode - - -class NfsSetting(_serialization.Model): - """Setting for NFS protocol. - - :ivar encryption_in_transit: Encryption in transit setting. - :vartype encryption_in_transit: ~azure.mgmt.storage.models.EncryptionInTransit - """ - - _attribute_map = { - "encryption_in_transit": {"key": "encryptionInTransit", "type": "EncryptionInTransit"}, - } - - def __init__(self, *, encryption_in_transit: Optional["_models.EncryptionInTransit"] = None, **kwargs: Any) -> None: - """ - :keyword encryption_in_transit: Encryption in transit setting. - :paramtype encryption_in_transit: ~azure.mgmt.storage.models.EncryptionInTransit - """ - super().__init__(**kwargs) - self.encryption_in_transit = encryption_in_transit - - -class NspAccessRule(_serialization.Model): - """Information of Access Rule in Network Security Perimeter profile. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: Name of the resource. - :vartype name: str - :ivar properties: Properties of Access Rule. - :vartype properties: ~azure.mgmt.storage.models.NspAccessRuleProperties - """ - - _validation = { - "properties": {"readonly": True}, - } - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "properties": {"key": "properties", "type": "NspAccessRuleProperties"}, - } - - def __init__(self, *, name: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword name: Name of the resource. - :paramtype name: str - """ - super().__init__(**kwargs) - self.name = name - self.properties: Optional["_models.NspAccessRuleProperties"] = None - - -class NspAccessRuleProperties(_serialization.Model): - """Properties of Access Rule. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar direction: Direction of Access Rule. Known values are: "Inbound" and "Outbound". - :vartype direction: str or ~azure.mgmt.storage.models.NspAccessRuleDirection - :ivar address_prefixes: Address prefixes in the CIDR format for inbound rules. - :vartype address_prefixes: list[str] - :ivar subscriptions: Subscriptions for inbound rules. - :vartype subscriptions: - list[~azure.mgmt.storage.models.NspAccessRulePropertiesSubscriptionsItem] - :ivar network_security_perimeters: NetworkSecurityPerimeters for inbound rules. - :vartype network_security_perimeters: list[~azure.mgmt.storage.models.NetworkSecurityPerimeter] - :ivar fully_qualified_domain_names: FQDN for outbound rules. - :vartype fully_qualified_domain_names: list[str] - """ - - _validation = { - "network_security_perimeters": {"readonly": True}, - "fully_qualified_domain_names": {"readonly": True}, - } - - _attribute_map = { - "direction": {"key": "direction", "type": "str"}, - "address_prefixes": {"key": "addressPrefixes", "type": "[str]"}, - "subscriptions": {"key": "subscriptions", "type": "[NspAccessRulePropertiesSubscriptionsItem]"}, - "network_security_perimeters": {"key": "networkSecurityPerimeters", "type": "[NetworkSecurityPerimeter]"}, - "fully_qualified_domain_names": {"key": "fullyQualifiedDomainNames", "type": "[str]"}, - } - - def __init__( - self, - *, - direction: Optional[Union[str, "_models.NspAccessRuleDirection"]] = None, - address_prefixes: Optional[list[str]] = None, - subscriptions: Optional[list["_models.NspAccessRulePropertiesSubscriptionsItem"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword direction: Direction of Access Rule. Known values are: "Inbound" and "Outbound". - :paramtype direction: str or ~azure.mgmt.storage.models.NspAccessRuleDirection - :keyword address_prefixes: Address prefixes in the CIDR format for inbound rules. - :paramtype address_prefixes: list[str] - :keyword subscriptions: Subscriptions for inbound rules. - :paramtype subscriptions: - list[~azure.mgmt.storage.models.NspAccessRulePropertiesSubscriptionsItem] - """ - super().__init__(**kwargs) - self.direction = direction - self.address_prefixes = address_prefixes - self.subscriptions = subscriptions - self.network_security_perimeters: Optional[list["_models.NetworkSecurityPerimeter"]] = None - self.fully_qualified_domain_names: Optional[list[str]] = None - - -class NspAccessRulePropertiesSubscriptionsItem(_serialization.Model): - """Subscription for inbound rule. - - :ivar id: The ARM identifier of subscription. - :vartype id: str - """ - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - } - - def __init__(self, *, id: Optional[str] = None, **kwargs: Any) -> None: # pylint: disable=redefined-builtin - """ - :keyword id: The ARM identifier of subscription. - :paramtype id: str - """ - super().__init__(**kwargs) - self.id = id - - -class ObjectReplicationPolicies(_serialization.Model): - """List storage account object replication policies. - - :ivar value: The replication policy between two storage accounts. - :vartype value: list[~azure.mgmt.storage.models.ObjectReplicationPolicy] - """ - - _attribute_map = { - "value": {"key": "value", "type": "[ObjectReplicationPolicy]"}, - } - - def __init__(self, *, value: Optional[list["_models.ObjectReplicationPolicy"]] = None, **kwargs: Any) -> None: - """ - :keyword value: The replication policy between two storage accounts. - :paramtype value: list[~azure.mgmt.storage.models.ObjectReplicationPolicy] - """ - super().__init__(**kwargs) - self.value = value - - -class ObjectReplicationPolicy(Resource): - """The replication policy between two storage accounts. Multiple rules can be defined in one - policy. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar policy_id: A unique id for object replication policy. - :vartype policy_id: str - :ivar enabled_time: Indicates when the policy is enabled on the source account. - :vartype enabled_time: ~datetime.datetime - :ivar source_account: Required. Source account name. It should be full resource id if - allowCrossTenantReplication set to false. - :vartype source_account: str - :ivar destination_account: Required. Destination account name. It should be full resource id if - allowCrossTenantReplication set to false. - :vartype destination_account: str - :ivar rules: The storage account object replication rules. - :vartype rules: list[~azure.mgmt.storage.models.ObjectReplicationPolicyRule] - :ivar metrics: Optional. The object replication policy metrics feature options. - :vartype metrics: ~azure.mgmt.storage.models.ObjectReplicationPolicyPropertiesMetrics - :ivar priority_replication: Optional. The object replication policy priority replication - feature options. - :vartype priority_replication: - ~azure.mgmt.storage.models.ObjectReplicationPolicyPropertiesPriorityReplication - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "policy_id": {"readonly": True}, - "enabled_time": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "policy_id": {"key": "properties.policyId", "type": "str"}, - "enabled_time": {"key": "properties.enabledTime", "type": "iso-8601"}, - "source_account": {"key": "properties.sourceAccount", "type": "str"}, - "destination_account": {"key": "properties.destinationAccount", "type": "str"}, - "rules": {"key": "properties.rules", "type": "[ObjectReplicationPolicyRule]"}, - "metrics": {"key": "properties.metrics", "type": "ObjectReplicationPolicyPropertiesMetrics"}, - "priority_replication": { - "key": "properties.priorityReplication", - "type": "ObjectReplicationPolicyPropertiesPriorityReplication", - }, - } - - def __init__( - self, - *, - source_account: Optional[str] = None, - destination_account: Optional[str] = None, - rules: Optional[list["_models.ObjectReplicationPolicyRule"]] = None, - metrics: Optional["_models.ObjectReplicationPolicyPropertiesMetrics"] = None, - priority_replication: Optional["_models.ObjectReplicationPolicyPropertiesPriorityReplication"] = None, - **kwargs: Any - ) -> None: - """ - :keyword source_account: Required. Source account name. It should be full resource id if - allowCrossTenantReplication set to false. - :paramtype source_account: str - :keyword destination_account: Required. Destination account name. It should be full resource id - if allowCrossTenantReplication set to false. - :paramtype destination_account: str - :keyword rules: The storage account object replication rules. - :paramtype rules: list[~azure.mgmt.storage.models.ObjectReplicationPolicyRule] - :keyword metrics: Optional. The object replication policy metrics feature options. - :paramtype metrics: ~azure.mgmt.storage.models.ObjectReplicationPolicyPropertiesMetrics - :keyword priority_replication: Optional. The object replication policy priority replication - feature options. - :paramtype priority_replication: - ~azure.mgmt.storage.models.ObjectReplicationPolicyPropertiesPriorityReplication - """ - super().__init__(**kwargs) - self.policy_id: Optional[str] = None - self.enabled_time: Optional[datetime.datetime] = None - self.source_account = source_account - self.destination_account = destination_account - self.rules = rules - self.metrics = metrics - self.priority_replication = priority_replication - - -class ObjectReplicationPolicyFilter(_serialization.Model): - """Filters limit replication to a subset of blobs within the storage account. A logical OR is - performed on values in the filter. If multiple filters are defined, a logical AND is performed - on all filters. - - :ivar prefix_match: Optional. Filters the results to replicate only blobs whose names begin - with the specified prefix. - :vartype prefix_match: list[str] - :ivar min_creation_time: Blobs created after the time will be replicated to the destination. It - must be in datetime format 'yyyy-MM-ddTHH:mm:ssZ'. Example: 2020-02-19T16:05:00Z. - :vartype min_creation_time: str - """ - - _attribute_map = { - "prefix_match": {"key": "prefixMatch", "type": "[str]"}, - "min_creation_time": {"key": "minCreationTime", "type": "str"}, - } - - def __init__( - self, *, prefix_match: Optional[list[str]] = None, min_creation_time: Optional[str] = None, **kwargs: Any - ) -> None: - """ - :keyword prefix_match: Optional. Filters the results to replicate only blobs whose names begin - with the specified prefix. - :paramtype prefix_match: list[str] - :keyword min_creation_time: Blobs created after the time will be replicated to the destination. - It must be in datetime format 'yyyy-MM-ddTHH:mm:ssZ'. Example: 2020-02-19T16:05:00Z. - :paramtype min_creation_time: str - """ - super().__init__(**kwargs) - self.prefix_match = prefix_match - self.min_creation_time = min_creation_time - - -class ObjectReplicationPolicyPropertiesMetrics(_serialization.Model): - """Optional. The object replication policy metrics feature options. - - :ivar enabled: Indicates whether object replication metrics feature is enabled for the policy. - :vartype enabled: bool - """ - - _attribute_map = { - "enabled": {"key": "enabled", "type": "bool"}, - } - - def __init__(self, *, enabled: Optional[bool] = None, **kwargs: Any) -> None: - """ - :keyword enabled: Indicates whether object replication metrics feature is enabled for the - policy. - :paramtype enabled: bool - """ - super().__init__(**kwargs) - self.enabled = enabled - - -class ObjectReplicationPolicyPropertiesPriorityReplication(_serialization.Model): # pylint: disable=name-too-long - """Optional. The object replication policy priority replication feature options. - - :ivar enabled: Indicates whether object replication priority replication feature is enabled for - the policy. - :vartype enabled: bool - """ - - _attribute_map = { - "enabled": {"key": "enabled", "type": "bool"}, - } - - def __init__(self, *, enabled: Optional[bool] = None, **kwargs: Any) -> None: - """ - :keyword enabled: Indicates whether object replication priority replication feature is enabled - for the policy. - :paramtype enabled: bool - """ - super().__init__(**kwargs) - self.enabled = enabled - - -class ObjectReplicationPolicyRule(_serialization.Model): - """The replication policy rule between two containers. - - All required parameters must be populated in order to send to server. - - :ivar rule_id: Rule Id is auto-generated for each new rule on destination account. It is - required for put policy on source account. - :vartype rule_id: str - :ivar source_container: Required. Source container name. Required. - :vartype source_container: str - :ivar destination_container: Required. Destination container name. Required. - :vartype destination_container: str - :ivar filters: Optional. An object that defines the filter set. - :vartype filters: ~azure.mgmt.storage.models.ObjectReplicationPolicyFilter - """ - - _validation = { - "source_container": {"required": True}, - "destination_container": {"required": True}, - } - - _attribute_map = { - "rule_id": {"key": "ruleId", "type": "str"}, - "source_container": {"key": "sourceContainer", "type": "str"}, - "destination_container": {"key": "destinationContainer", "type": "str"}, - "filters": {"key": "filters", "type": "ObjectReplicationPolicyFilter"}, - } - - def __init__( - self, - *, - source_container: str, - destination_container: str, - rule_id: Optional[str] = None, - filters: Optional["_models.ObjectReplicationPolicyFilter"] = None, - **kwargs: Any - ) -> None: - """ - :keyword rule_id: Rule Id is auto-generated for each new rule on destination account. It is - required for put policy on source account. - :paramtype rule_id: str - :keyword source_container: Required. Source container name. Required. - :paramtype source_container: str - :keyword destination_container: Required. Destination container name. Required. - :paramtype destination_container: str - :keyword filters: Optional. An object that defines the filter set. - :paramtype filters: ~azure.mgmt.storage.models.ObjectReplicationPolicyFilter - """ - super().__init__(**kwargs) - self.rule_id = rule_id - self.source_container = source_container - self.destination_container = destination_container - self.filters = filters - - -class Operation(_serialization.Model): - """Storage REST API operation definition. - - :ivar name: Operation name: {provider}/{resource}/{operation}. - :vartype name: str - :ivar display: Display metadata associated with the operation. - :vartype display: ~azure.mgmt.storage.models.OperationDisplay - :ivar origin: The origin of operations. - :vartype origin: str - :ivar service_specification: One property of operation, include metric specifications. - :vartype service_specification: ~azure.mgmt.storage.models.ServiceSpecification - """ - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "display": {"key": "display", "type": "OperationDisplay"}, - "origin": {"key": "origin", "type": "str"}, - "service_specification": {"key": "properties.serviceSpecification", "type": "ServiceSpecification"}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - display: Optional["_models.OperationDisplay"] = None, - origin: Optional[str] = None, - service_specification: Optional["_models.ServiceSpecification"] = None, - **kwargs: Any - ) -> None: - """ - :keyword name: Operation name: {provider}/{resource}/{operation}. - :paramtype name: str - :keyword display: Display metadata associated with the operation. - :paramtype display: ~azure.mgmt.storage.models.OperationDisplay - :keyword origin: The origin of operations. - :paramtype origin: str - :keyword service_specification: One property of operation, include metric specifications. - :paramtype service_specification: ~azure.mgmt.storage.models.ServiceSpecification - """ - super().__init__(**kwargs) - self.name = name - self.display = display - self.origin = origin - self.service_specification = service_specification - - -class OperationDisplay(_serialization.Model): - """Display metadata associated with the operation. - - :ivar provider: Service provider: Microsoft Storage. - :vartype provider: str - :ivar resource: Resource on which the operation is performed etc. - :vartype resource: str - :ivar operation: Type of operation: get, read, delete, etc. - :vartype operation: str - :ivar description: Description of the operation. - :vartype description: str - """ - - _attribute_map = { - "provider": {"key": "provider", "type": "str"}, - "resource": {"key": "resource", "type": "str"}, - "operation": {"key": "operation", "type": "str"}, - "description": {"key": "description", "type": "str"}, - } - - def __init__( - self, - *, - provider: Optional[str] = None, - resource: Optional[str] = None, - operation: Optional[str] = None, - description: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword provider: Service provider: Microsoft Storage. - :paramtype provider: str - :keyword resource: Resource on which the operation is performed etc. - :paramtype resource: str - :keyword operation: Type of operation: get, read, delete, etc. - :paramtype operation: str - :keyword description: Description of the operation. - :paramtype description: str - """ - super().__init__(**kwargs) - self.provider = provider - self.resource = resource - self.operation = operation - self.description = description - - -class OperationListResult(_serialization.Model): - """Result of the request to list Storage operations. It contains a list of operations and a URL - link to get the next set of results. - - :ivar value: List of Storage operations supported by the Storage resource provider. - :vartype value: list[~azure.mgmt.storage.models.Operation] - """ - - _attribute_map = { - "value": {"key": "value", "type": "[Operation]"}, - } - - def __init__(self, *, value: Optional[list["_models.Operation"]] = None, **kwargs: Any) -> None: - """ - :keyword value: List of Storage operations supported by the Storage resource provider. - :paramtype value: list[~azure.mgmt.storage.models.Operation] - """ - super().__init__(**kwargs) - self.value = value - - -class PermissionScope(_serialization.Model): - """PermissionScope. - - All required parameters must be populated in order to send to server. - - :ivar permissions: The permissions for the local user. Possible values include: Read (r), Write - (w), Delete (d), List (l), Create (c), Modify Ownership (o), and Modify Permissions (p). - Required. - :vartype permissions: str - :ivar service: The service used by the local user, e.g. blob, file. Required. - :vartype service: str - :ivar resource_name: The name of resource, normally the container name or the file share name, - used by the local user. Required. - :vartype resource_name: str - """ - - _validation = { - "permissions": {"required": True}, - "service": {"required": True}, - "resource_name": {"required": True}, - } - - _attribute_map = { - "permissions": {"key": "permissions", "type": "str"}, - "service": {"key": "service", "type": "str"}, - "resource_name": {"key": "resourceName", "type": "str"}, - } - - def __init__(self, *, permissions: str, service: str, resource_name: str, **kwargs: Any) -> None: - """ - :keyword permissions: The permissions for the local user. Possible values include: Read (r), - Write (w), Delete (d), List (l), Create (c), Modify Ownership (o), and Modify Permissions (p). - Required. - :paramtype permissions: str - :keyword service: The service used by the local user, e.g. blob, file. Required. - :paramtype service: str - :keyword resource_name: The name of resource, normally the container name or the file share - name, used by the local user. Required. - :paramtype resource_name: str - """ - super().__init__(**kwargs) - self.permissions = permissions - self.service = service - self.resource_name = resource_name - - -class Placement(_serialization.Model): - """The complex type of the zonal placement details. - - :ivar zone_placement_policy: The availability zone pinning policy for the storage account. - Known values are: "Any" and "None". - :vartype zone_placement_policy: str or ~azure.mgmt.storage.models.ZonePlacementPolicy - """ - - _attribute_map = { - "zone_placement_policy": {"key": "zonePlacementPolicy", "type": "str"}, - } - - def __init__( - self, *, zone_placement_policy: Optional[Union[str, "_models.ZonePlacementPolicy"]] = None, **kwargs: Any - ) -> None: - """ - :keyword zone_placement_policy: The availability zone pinning policy for the storage account. - Known values are: "Any" and "None". - :paramtype zone_placement_policy: str or ~azure.mgmt.storage.models.ZonePlacementPolicy - """ - super().__init__(**kwargs) - self.zone_placement_policy = zone_placement_policy - - -class PrivateEndpoint(_serialization.Model): - """The Private Endpoint resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The ARM identifier for Private Endpoint. - :vartype id: str - """ - - _validation = { - "id": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.id: Optional[str] = None - - -class PrivateEndpointConnection(Resource): - """The Private Endpoint Connection resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar private_endpoint: The resource of private end point. - :vartype private_endpoint: ~azure.mgmt.storage.models.PrivateEndpoint - :ivar private_link_service_connection_state: A collection of information about the state of the - connection between service consumer and provider. - :vartype private_link_service_connection_state: - ~azure.mgmt.storage.models.PrivateLinkServiceConnectionState - :ivar provisioning_state: The provisioning state of the private endpoint connection resource. - Known values are: "Succeeded", "Creating", "Deleting", and "Failed". - :vartype provisioning_state: str or - ~azure.mgmt.storage.models.PrivateEndpointConnectionProvisioningState - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "provisioning_state": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "private_endpoint": {"key": "properties.privateEndpoint", "type": "PrivateEndpoint"}, - "private_link_service_connection_state": { - "key": "properties.privateLinkServiceConnectionState", - "type": "PrivateLinkServiceConnectionState", - }, - "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, - } - - def __init__( - self, - *, - private_endpoint: Optional["_models.PrivateEndpoint"] = None, - private_link_service_connection_state: Optional["_models.PrivateLinkServiceConnectionState"] = None, - **kwargs: Any - ) -> None: - """ - :keyword private_endpoint: The resource of private end point. - :paramtype private_endpoint: ~azure.mgmt.storage.models.PrivateEndpoint - :keyword private_link_service_connection_state: A collection of information about the state of - the connection between service consumer and provider. - :paramtype private_link_service_connection_state: - ~azure.mgmt.storage.models.PrivateLinkServiceConnectionState - """ - super().__init__(**kwargs) - self.private_endpoint = private_endpoint - self.private_link_service_connection_state = private_link_service_connection_state - self.provisioning_state: Optional[Union[str, "_models.PrivateEndpointConnectionProvisioningState"]] = None - - -class PrivateEndpointConnectionListResult(_serialization.Model): - """List of private endpoint connection associated with the specified storage account. - - :ivar value: Array of private endpoint connections. - :vartype value: list[~azure.mgmt.storage.models.PrivateEndpointConnection] - """ - - _attribute_map = { - "value": {"key": "value", "type": "[PrivateEndpointConnection]"}, - } - - def __init__(self, *, value: Optional[list["_models.PrivateEndpointConnection"]] = None, **kwargs: Any) -> None: - """ - :keyword value: Array of private endpoint connections. - :paramtype value: list[~azure.mgmt.storage.models.PrivateEndpointConnection] - """ - super().__init__(**kwargs) - self.value = value - - -class PrivateLinkResource(Resource): - """A private link resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar group_id: The private link resource group id. - :vartype group_id: str - :ivar required_members: The private link resource required member names. - :vartype required_members: list[str] - :ivar required_zone_names: The private link resource Private link DNS zone name. - :vartype required_zone_names: list[str] - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "group_id": {"readonly": True}, - "required_members": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "group_id": {"key": "properties.groupId", "type": "str"}, - "required_members": {"key": "properties.requiredMembers", "type": "[str]"}, - "required_zone_names": {"key": "properties.requiredZoneNames", "type": "[str]"}, - } - - def __init__(self, *, required_zone_names: Optional[list[str]] = None, **kwargs: Any) -> None: - """ - :keyword required_zone_names: The private link resource Private link DNS zone name. - :paramtype required_zone_names: list[str] - """ - super().__init__(**kwargs) - self.group_id: Optional[str] = None - self.required_members: Optional[list[str]] = None - self.required_zone_names = required_zone_names - - -class PrivateLinkResourceListResult(_serialization.Model): - """A list of private link resources. - - :ivar value: Array of private link resources. - :vartype value: list[~azure.mgmt.storage.models.PrivateLinkResource] - """ - - _attribute_map = { - "value": {"key": "value", "type": "[PrivateLinkResource]"}, - } - - def __init__(self, *, value: Optional[list["_models.PrivateLinkResource"]] = None, **kwargs: Any) -> None: - """ - :keyword value: Array of private link resources. - :paramtype value: list[~azure.mgmt.storage.models.PrivateLinkResource] - """ - super().__init__(**kwargs) - self.value = value - - -class PrivateLinkServiceConnectionState(_serialization.Model): - """A collection of information about the state of the connection between service consumer and - provider. - - :ivar status: Indicates whether the connection has been Approved/Rejected/Removed by the owner - of the service. Known values are: "Pending", "Approved", and "Rejected". - :vartype status: str or ~azure.mgmt.storage.models.PrivateEndpointServiceConnectionStatus - :ivar description: The reason for approval/rejection of the connection. - :vartype description: str - :ivar action_required: A message indicating if changes on the service provider require any - updates on the consumer. - :vartype action_required: str - """ - - _attribute_map = { - "status": {"key": "status", "type": "str"}, - "description": {"key": "description", "type": "str"}, - "action_required": {"key": "actionRequired", "type": "str"}, - } - - def __init__( - self, - *, - status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = None, - description: Optional[str] = None, - action_required: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword status: Indicates whether the connection has been Approved/Rejected/Removed by the - owner of the service. Known values are: "Pending", "Approved", and "Rejected". - :paramtype status: str or ~azure.mgmt.storage.models.PrivateEndpointServiceConnectionStatus - :keyword description: The reason for approval/rejection of the connection. - :paramtype description: str - :keyword action_required: A message indicating if changes on the service provider require any - updates on the consumer. - :paramtype action_required: str - """ - super().__init__(**kwargs) - self.status = status - self.description = description - self.action_required = action_required - - -class ProtectedAppendWritesHistory(_serialization.Model): - """Protected append writes history setting for the blob container with Legal holds. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar allow_protected_append_writes_all: When enabled, new blocks can be written to both - 'Append and Bock Blobs' while maintaining legal hold protection and compliance. Only new blocks - can be added and any existing blocks cannot be modified or deleted. - :vartype allow_protected_append_writes_all: bool - :ivar timestamp: Returns the date and time the tag was added. - :vartype timestamp: ~datetime.datetime - """ - - _validation = { - "timestamp": {"readonly": True}, - } - - _attribute_map = { - "allow_protected_append_writes_all": {"key": "allowProtectedAppendWritesAll", "type": "bool"}, - "timestamp": {"key": "timestamp", "type": "iso-8601"}, - } - - def __init__(self, *, allow_protected_append_writes_all: Optional[bool] = None, **kwargs: Any) -> None: - """ - :keyword allow_protected_append_writes_all: When enabled, new blocks can be written to both - 'Append and Bock Blobs' while maintaining legal hold protection and compliance. Only new blocks - can be added and any existing blocks cannot be modified or deleted. - :paramtype allow_protected_append_writes_all: bool - """ - super().__init__(**kwargs) - self.allow_protected_append_writes_all = allow_protected_append_writes_all - self.timestamp: Optional[datetime.datetime] = None - - -class ProtocolSettings(_serialization.Model): - """Protocol settings for file service. - - :ivar smb: Setting for SMB protocol. - :vartype smb: ~azure.mgmt.storage.models.SmbSetting - :ivar nfs: Setting for NFS protocol. - :vartype nfs: ~azure.mgmt.storage.models.NfsSetting - """ - - _attribute_map = { - "smb": {"key": "smb", "type": "SmbSetting"}, - "nfs": {"key": "nfs", "type": "NfsSetting"}, - } - - def __init__( - self, *, smb: Optional["_models.SmbSetting"] = None, nfs: Optional["_models.NfsSetting"] = None, **kwargs: Any - ) -> None: - """ - :keyword smb: Setting for SMB protocol. - :paramtype smb: ~azure.mgmt.storage.models.SmbSetting - :keyword nfs: Setting for NFS protocol. - :paramtype nfs: ~azure.mgmt.storage.models.NfsSetting - """ - super().__init__(**kwargs) - self.smb = smb - self.nfs = nfs - - -class ProvisioningIssue(_serialization.Model): - """Describes provisioning issue for given NetworkSecurityPerimeterConfiguration. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: Name of the issue. - :vartype name: str - :ivar properties: Properties of provisioning issue. - :vartype properties: ~azure.mgmt.storage.models.ProvisioningIssueProperties - """ - - _validation = { - "properties": {"readonly": True}, - } - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "properties": {"key": "properties", "type": "ProvisioningIssueProperties"}, - } - - def __init__(self, *, name: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword name: Name of the issue. - :paramtype name: str - """ - super().__init__(**kwargs) - self.name = name - self.properties: Optional["_models.ProvisioningIssueProperties"] = None - - -class ProvisioningIssueProperties(_serialization.Model): - """Properties of provisioning issue. - - :ivar issue_type: Type of issue. Known values are: "Unknown" and - "ConfigurationPropagationFailure". - :vartype issue_type: str or ~azure.mgmt.storage.models.IssueType - :ivar severity: Severity of the issue. Known values are: "Warning" and "Error". - :vartype severity: str or ~azure.mgmt.storage.models.Severity - :ivar description: Description of the issue. - :vartype description: str - """ - - _attribute_map = { - "issue_type": {"key": "issueType", "type": "str"}, - "severity": {"key": "severity", "type": "str"}, - "description": {"key": "description", "type": "str"}, - } - - def __init__( - self, - *, - issue_type: Optional[Union[str, "_models.IssueType"]] = None, - severity: Optional[Union[str, "_models.Severity"]] = None, - description: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword issue_type: Type of issue. Known values are: "Unknown" and - "ConfigurationPropagationFailure". - :paramtype issue_type: str or ~azure.mgmt.storage.models.IssueType - :keyword severity: Severity of the issue. Known values are: "Warning" and "Error". - :paramtype severity: str or ~azure.mgmt.storage.models.Severity - :keyword description: Description of the issue. - :paramtype description: str - """ - super().__init__(**kwargs) - self.issue_type = issue_type - self.severity = severity - self.description = description - - -class QueueServiceProperties(Resource): - """The properties of a storage account’s Queue service. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar cors: Specifies CORS rules for the Queue service. You can include up to five CorsRule - elements in the request. If no CorsRule elements are included in the request body, all CORS - rules will be deleted, and CORS will be disabled for the Queue service. - :vartype cors: ~azure.mgmt.storage.models.CorsRules - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "cors": {"key": "properties.cors", "type": "CorsRules"}, - } - - def __init__(self, *, cors: Optional["_models.CorsRules"] = None, **kwargs: Any) -> None: - """ - :keyword cors: Specifies CORS rules for the Queue service. You can include up to five CorsRule - elements in the request. If no CorsRule elements are included in the request body, all CORS - rules will be deleted, and CORS will be disabled for the Queue service. - :paramtype cors: ~azure.mgmt.storage.models.CorsRules - """ - super().__init__(**kwargs) - self.cors = cors - - -class ResourceAccessRule(_serialization.Model): - """Resource Access Rule. - - :ivar tenant_id: Tenant Id. - :vartype tenant_id: str - :ivar resource_id: Resource Id. - :vartype resource_id: str - """ - - _attribute_map = { - "tenant_id": {"key": "tenantId", "type": "str"}, - "resource_id": {"key": "resourceId", "type": "str"}, - } - - def __init__(self, *, tenant_id: Optional[str] = None, resource_id: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword tenant_id: Tenant Id. - :paramtype tenant_id: str - :keyword resource_id: Resource Id. - :paramtype resource_id: str - """ - super().__init__(**kwargs) - self.tenant_id = tenant_id - self.resource_id = resource_id - - -class RestorePolicyProperties(_serialization.Model): - """The blob service properties for blob restore policy. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to server. - - :ivar enabled: Blob restore is enabled if set to true. Required. - :vartype enabled: bool - :ivar days: how long this blob can be restored. It should be great than zero and less than - DeleteRetentionPolicy.days. - :vartype days: int - :ivar last_enabled_time: Deprecated in favor of minRestoreTime property. - :vartype last_enabled_time: ~datetime.datetime - :ivar min_restore_time: Returns the minimum date and time that the restore can be started. - :vartype min_restore_time: ~datetime.datetime - """ - - _validation = { - "enabled": {"required": True}, - "days": {"maximum": 365, "minimum": 1}, - "last_enabled_time": {"readonly": True}, - "min_restore_time": {"readonly": True}, - } - - _attribute_map = { - "enabled": {"key": "enabled", "type": "bool"}, - "days": {"key": "days", "type": "int"}, - "last_enabled_time": {"key": "lastEnabledTime", "type": "iso-8601"}, - "min_restore_time": {"key": "minRestoreTime", "type": "iso-8601"}, - } - - def __init__(self, *, enabled: bool, days: Optional[int] = None, **kwargs: Any) -> None: - """ - :keyword enabled: Blob restore is enabled if set to true. Required. - :paramtype enabled: bool - :keyword days: how long this blob can be restored. It should be great than zero and less than - DeleteRetentionPolicy.days. - :paramtype days: int - """ - super().__init__(**kwargs) - self.enabled = enabled - self.days = days - self.last_enabled_time: Optional[datetime.datetime] = None - self.min_restore_time: Optional[datetime.datetime] = None - - -class Restriction(_serialization.Model): - """The restriction because of which SKU cannot be used. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar type: The type of restrictions. As of now only possible value for this is location. - :vartype type: str - :ivar values: The value of restrictions. If the restriction type is set to location. This would - be different locations where the SKU is restricted. - :vartype values: list[str] - :ivar reason_code: The reason for the restriction. As of now this can be "QuotaId" or - "NotAvailableForSubscription". Quota Id is set when the SKU has requiredQuotas parameter as the - subscription does not belong to that quota. The "NotAvailableForSubscription" is related to - capacity at DC. Known values are: "QuotaId" and "NotAvailableForSubscription". - :vartype reason_code: str or ~azure.mgmt.storage.models.ReasonCode - """ - - _validation = { - "type": {"readonly": True}, - "values": {"readonly": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "values": {"key": "values", "type": "[str]"}, - "reason_code": {"key": "reasonCode", "type": "str"}, - } - - def __init__(self, *, reason_code: Optional[Union[str, "_models.ReasonCode"]] = None, **kwargs: Any) -> None: - """ - :keyword reason_code: The reason for the restriction. As of now this can be "QuotaId" or - "NotAvailableForSubscription". Quota Id is set when the SKU has requiredQuotas parameter as the - subscription does not belong to that quota. The "NotAvailableForSubscription" is related to - capacity at DC. Known values are: "QuotaId" and "NotAvailableForSubscription". - :paramtype reason_code: str or ~azure.mgmt.storage.models.ReasonCode - """ - super().__init__(**kwargs) - self.type: Optional[str] = None - self.values: Optional[list[str]] = None - self.reason_code = reason_code - - -class RoutingPreference(_serialization.Model): - """Routing preference defines the type of network, either microsoft or internet routing to be used - to deliver the user data, the default option is microsoft routing. - - :ivar routing_choice: Routing Choice defines the kind of network routing opted by the user. - Known values are: "MicrosoftRouting" and "InternetRouting". - :vartype routing_choice: str or ~azure.mgmt.storage.models.RoutingChoice - :ivar publish_microsoft_endpoints: A boolean flag which indicates whether microsoft routing - storage endpoints are to be published. - :vartype publish_microsoft_endpoints: bool - :ivar publish_internet_endpoints: A boolean flag which indicates whether internet routing - storage endpoints are to be published. - :vartype publish_internet_endpoints: bool - """ - - _attribute_map = { - "routing_choice": {"key": "routingChoice", "type": "str"}, - "publish_microsoft_endpoints": {"key": "publishMicrosoftEndpoints", "type": "bool"}, - "publish_internet_endpoints": {"key": "publishInternetEndpoints", "type": "bool"}, - } - - def __init__( - self, - *, - routing_choice: Optional[Union[str, "_models.RoutingChoice"]] = None, - publish_microsoft_endpoints: Optional[bool] = None, - publish_internet_endpoints: Optional[bool] = None, - **kwargs: Any - ) -> None: - """ - :keyword routing_choice: Routing Choice defines the kind of network routing opted by the user. - Known values are: "MicrosoftRouting" and "InternetRouting". - :paramtype routing_choice: str or ~azure.mgmt.storage.models.RoutingChoice - :keyword publish_microsoft_endpoints: A boolean flag which indicates whether microsoft routing - storage endpoints are to be published. - :paramtype publish_microsoft_endpoints: bool - :keyword publish_internet_endpoints: A boolean flag which indicates whether internet routing - storage endpoints are to be published. - :paramtype publish_internet_endpoints: bool - """ - super().__init__(**kwargs) - self.routing_choice = routing_choice - self.publish_microsoft_endpoints = publish_microsoft_endpoints - self.publish_internet_endpoints = publish_internet_endpoints - - -class SasPolicy(_serialization.Model): - """SasPolicy assigned to the storage account. - - All required parameters must be populated in order to send to server. - - :ivar sas_expiration_period: The SAS expiration period, DD.HH:MM:SS. Required. - :vartype sas_expiration_period: str - :ivar expiration_action: The SAS Expiration Action defines the action to be performed when - sasPolicy.sasExpirationPeriod is violated. The 'Log' action can be used for audit purposes and - the 'Block' action can be used to block and deny the usage of SAS tokens that do not adhere to - the sas policy expiration period. Known values are: "Log" and "Block". - :vartype expiration_action: str or ~azure.mgmt.storage.models.ExpirationAction - """ - - _validation = { - "sas_expiration_period": {"required": True}, - "expiration_action": {"required": True}, - } - - _attribute_map = { - "sas_expiration_period": {"key": "sasExpirationPeriod", "type": "str"}, - "expiration_action": {"key": "expirationAction", "type": "str"}, - } - - def __init__( - self, - *, - sas_expiration_period: str, - expiration_action: Union[str, "_models.ExpirationAction"] = "Log", - **kwargs: Any - ) -> None: - """ - :keyword sas_expiration_period: The SAS expiration period, DD.HH:MM:SS. Required. - :paramtype sas_expiration_period: str - :keyword expiration_action: The SAS Expiration Action defines the action to be performed when - sasPolicy.sasExpirationPeriod is violated. The 'Log' action can be used for audit purposes and - the 'Block' action can be used to block and deny the usage of SAS tokens that do not adhere to - the sas policy expiration period. Known values are: "Log" and "Block". - :paramtype expiration_action: str or ~azure.mgmt.storage.models.ExpirationAction - """ - super().__init__(**kwargs) - self.sas_expiration_period = sas_expiration_period - self.expiration_action = expiration_action - - -class ServiceSasParameters(_serialization.Model): - """The parameters to list service SAS credentials of a specific resource. - - All required parameters must be populated in order to send to server. - - :ivar canonicalized_resource: The canonical path to the signed resource. Required. - :vartype canonicalized_resource: str - :ivar resource: The signed services accessible with the service SAS. Possible values include: - Blob (b), Container (c), File (f), Share (s). Known values are: "b", "c", "f", and "s". - :vartype resource: str or ~azure.mgmt.storage.models.SignedResource - :ivar permissions: The signed permissions for the service SAS. Possible values include: Read - (r), Write (w), Delete (d), List (l), Add (a), Create (c), Update (u) and Process (p). Known - values are: "r", "d", "w", "l", "a", "c", "u", and "p". - :vartype permissions: str or ~azure.mgmt.storage.models.Permissions - :ivar ip_address_or_range: An IP address or a range of IP addresses from which to accept - requests. - :vartype ip_address_or_range: str - :ivar protocols: The protocol permitted for a request made with the account SAS. Known values - are: "https,http" and "https". - :vartype protocols: str or ~azure.mgmt.storage.models.HttpProtocol - :ivar shared_access_start_time: The time at which the SAS becomes valid. - :vartype shared_access_start_time: ~datetime.datetime - :ivar shared_access_expiry_time: The time at which the shared access signature becomes invalid. - :vartype shared_access_expiry_time: ~datetime.datetime - :ivar identifier: A unique value up to 64 characters in length that correlates to an access - policy specified for the container, queue, or table. - :vartype identifier: str - :ivar partition_key_start: The start of partition key. - :vartype partition_key_start: str - :ivar partition_key_end: The end of partition key. - :vartype partition_key_end: str - :ivar row_key_start: The start of row key. - :vartype row_key_start: str - :ivar row_key_end: The end of row key. - :vartype row_key_end: str - :ivar key_to_sign: The key to sign the account SAS token with. - :vartype key_to_sign: str - :ivar cache_control: The response header override for cache control. - :vartype cache_control: str - :ivar content_disposition: The response header override for content disposition. - :vartype content_disposition: str - :ivar content_encoding: The response header override for content encoding. - :vartype content_encoding: str - :ivar content_language: The response header override for content language. - :vartype content_language: str - :ivar content_type: The response header override for content type. - :vartype content_type: str - """ - - _validation = { - "canonicalized_resource": {"required": True}, - "identifier": {"max_length": 64}, - } - - _attribute_map = { - "canonicalized_resource": {"key": "canonicalizedResource", "type": "str"}, - "resource": {"key": "signedResource", "type": "str"}, - "permissions": {"key": "signedPermission", "type": "str"}, - "ip_address_or_range": {"key": "signedIp", "type": "str"}, - "protocols": {"key": "signedProtocol", "type": "str"}, - "shared_access_start_time": {"key": "signedStart", "type": "iso-8601"}, - "shared_access_expiry_time": {"key": "signedExpiry", "type": "iso-8601"}, - "identifier": {"key": "signedIdentifier", "type": "str"}, - "partition_key_start": {"key": "startPk", "type": "str"}, - "partition_key_end": {"key": "endPk", "type": "str"}, - "row_key_start": {"key": "startRk", "type": "str"}, - "row_key_end": {"key": "endRk", "type": "str"}, - "key_to_sign": {"key": "keyToSign", "type": "str"}, - "cache_control": {"key": "rscc", "type": "str"}, - "content_disposition": {"key": "rscd", "type": "str"}, - "content_encoding": {"key": "rsce", "type": "str"}, - "content_language": {"key": "rscl", "type": "str"}, - "content_type": {"key": "rsct", "type": "str"}, - } - - def __init__( - self, - *, - canonicalized_resource: str, - resource: Optional[Union[str, "_models.SignedResource"]] = None, - permissions: Optional[Union[str, "_models.Permissions"]] = None, - ip_address_or_range: Optional[str] = None, - protocols: Optional[Union[str, "_models.HttpProtocol"]] = None, - shared_access_start_time: Optional[datetime.datetime] = None, - shared_access_expiry_time: Optional[datetime.datetime] = None, - identifier: Optional[str] = None, - partition_key_start: Optional[str] = None, - partition_key_end: Optional[str] = None, - row_key_start: Optional[str] = None, - row_key_end: Optional[str] = None, - key_to_sign: Optional[str] = None, - cache_control: Optional[str] = None, - content_disposition: Optional[str] = None, - content_encoding: Optional[str] = None, - content_language: Optional[str] = None, - content_type: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword canonicalized_resource: The canonical path to the signed resource. Required. - :paramtype canonicalized_resource: str - :keyword resource: The signed services accessible with the service SAS. Possible values - include: Blob (b), Container (c), File (f), Share (s). Known values are: "b", "c", "f", and - "s". - :paramtype resource: str or ~azure.mgmt.storage.models.SignedResource - :keyword permissions: The signed permissions for the service SAS. Possible values include: Read - (r), Write (w), Delete (d), List (l), Add (a), Create (c), Update (u) and Process (p). Known - values are: "r", "d", "w", "l", "a", "c", "u", and "p". - :paramtype permissions: str or ~azure.mgmt.storage.models.Permissions - :keyword ip_address_or_range: An IP address or a range of IP addresses from which to accept - requests. - :paramtype ip_address_or_range: str - :keyword protocols: The protocol permitted for a request made with the account SAS. Known - values are: "https,http" and "https". - :paramtype protocols: str or ~azure.mgmt.storage.models.HttpProtocol - :keyword shared_access_start_time: The time at which the SAS becomes valid. - :paramtype shared_access_start_time: ~datetime.datetime - :keyword shared_access_expiry_time: The time at which the shared access signature becomes - invalid. - :paramtype shared_access_expiry_time: ~datetime.datetime - :keyword identifier: A unique value up to 64 characters in length that correlates to an access - policy specified for the container, queue, or table. - :paramtype identifier: str - :keyword partition_key_start: The start of partition key. - :paramtype partition_key_start: str - :keyword partition_key_end: The end of partition key. - :paramtype partition_key_end: str - :keyword row_key_start: The start of row key. - :paramtype row_key_start: str - :keyword row_key_end: The end of row key. - :paramtype row_key_end: str - :keyword key_to_sign: The key to sign the account SAS token with. - :paramtype key_to_sign: str - :keyword cache_control: The response header override for cache control. - :paramtype cache_control: str - :keyword content_disposition: The response header override for content disposition. - :paramtype content_disposition: str - :keyword content_encoding: The response header override for content encoding. - :paramtype content_encoding: str - :keyword content_language: The response header override for content language. - :paramtype content_language: str - :keyword content_type: The response header override for content type. - :paramtype content_type: str - """ - super().__init__(**kwargs) - self.canonicalized_resource = canonicalized_resource - self.resource = resource - self.permissions = permissions - self.ip_address_or_range = ip_address_or_range - self.protocols = protocols - self.shared_access_start_time = shared_access_start_time - self.shared_access_expiry_time = shared_access_expiry_time - self.identifier = identifier - self.partition_key_start = partition_key_start - self.partition_key_end = partition_key_end - self.row_key_start = row_key_start - self.row_key_end = row_key_end - self.key_to_sign = key_to_sign - self.cache_control = cache_control - self.content_disposition = content_disposition - self.content_encoding = content_encoding - self.content_language = content_language - self.content_type = content_type - - -class ServiceSpecification(_serialization.Model): - """One property of operation, include metric specifications. - - :ivar metric_specifications: Metric specifications of operation. - :vartype metric_specifications: list[~azure.mgmt.storage.models.MetricSpecification] - """ - - _attribute_map = { - "metric_specifications": {"key": "metricSpecifications", "type": "[MetricSpecification]"}, - } - - def __init__( - self, *, metric_specifications: Optional[list["_models.MetricSpecification"]] = None, **kwargs: Any - ) -> None: - """ - :keyword metric_specifications: Metric specifications of operation. - :paramtype metric_specifications: list[~azure.mgmt.storage.models.MetricSpecification] - """ - super().__init__(**kwargs) - self.metric_specifications = metric_specifications - - -class SignedIdentifier(_serialization.Model): - """SignedIdentifier. - - :ivar id: An unique identifier of the stored access policy. - :vartype id: str - :ivar access_policy: Access policy. - :vartype access_policy: ~azure.mgmt.storage.models.AccessPolicy - """ - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "access_policy": {"key": "accessPolicy", "type": "AccessPolicy"}, - } - - def __init__( - self, - *, - id: Optional[str] = None, # pylint: disable=redefined-builtin - access_policy: Optional["_models.AccessPolicy"] = None, - **kwargs: Any - ) -> None: - """ - :keyword id: An unique identifier of the stored access policy. - :paramtype id: str - :keyword access_policy: Access policy. - :paramtype access_policy: ~azure.mgmt.storage.models.AccessPolicy - """ - super().__init__(**kwargs) - self.id = id - self.access_policy = access_policy - - -class Sku(_serialization.Model): - """The SKU of the storage account. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to server. - - :ivar name: The SKU name. Required for account creation; optional for update. Note that in - older versions, SKU name was called accountType. Required. Known values are: "Standard_LRS", - "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", "Premium_ZRS", - "Standard_GZRS", "Standard_RAGZRS", "StandardV2_LRS", "StandardV2_GRS", "StandardV2_ZRS", - "StandardV2_GZRS", "PremiumV2_LRS", and "PremiumV2_ZRS". - :vartype name: str or ~azure.mgmt.storage.models.SkuName - :ivar tier: The SKU tier. This is based on the SKU name. Known values are: "Standard" and - "Premium". - :vartype tier: str or ~azure.mgmt.storage.models.SkuTier - """ - - _validation = { - "name": {"required": True}, - "tier": {"readonly": True}, - } - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "tier": {"key": "tier", "type": "str"}, - } - - def __init__(self, *, name: Union[str, "_models.SkuName"], **kwargs: Any) -> None: - """ - :keyword name: The SKU name. Required for account creation; optional for update. Note that in - older versions, SKU name was called accountType. Required. Known values are: "Standard_LRS", - "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", "Premium_ZRS", - "Standard_GZRS", "Standard_RAGZRS", "StandardV2_LRS", "StandardV2_GRS", "StandardV2_ZRS", - "StandardV2_GZRS", "PremiumV2_LRS", and "PremiumV2_ZRS". - :paramtype name: str or ~azure.mgmt.storage.models.SkuName - """ - super().__init__(**kwargs) - self.name = name - self.tier: Optional[Union[str, "_models.SkuTier"]] = None - - -class SKUCapability(_serialization.Model): - """The capability information in the specified SKU, including file encryption, network ACLs, - change notification, etc. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: The name of capability, The capability information in the specified SKU, including - file encryption, network ACLs, change notification, etc. - :vartype name: str - :ivar value: A string value to indicate states of given capability. Possibly 'true' or 'false'. - :vartype value: str - """ - - _validation = { - "name": {"readonly": True}, - "value": {"readonly": True}, - } - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "value": {"key": "value", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.name: Optional[str] = None - self.value: Optional[str] = None - - -class SkuInformation(_serialization.Model): - """Storage SKU and its properties. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to server. - - :ivar name: The SKU name. Required for account creation; optional for update. Note that in - older versions, SKU name was called accountType. Required. Known values are: "Standard_LRS", - "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", "Premium_ZRS", - "Standard_GZRS", "Standard_RAGZRS", "StandardV2_LRS", "StandardV2_GRS", "StandardV2_ZRS", - "StandardV2_GZRS", "PremiumV2_LRS", and "PremiumV2_ZRS". - :vartype name: str or ~azure.mgmt.storage.models.SkuName - :ivar tier: The SKU tier. This is based on the SKU name. Known values are: "Standard" and - "Premium". - :vartype tier: str or ~azure.mgmt.storage.models.SkuTier - :ivar resource_type: The type of the resource, usually it is 'storageAccounts'. - :vartype resource_type: str - :ivar kind: Indicates the type of storage account. Known values are: "Storage", "StorageV2", - "BlobStorage", "FileStorage", and "BlockBlobStorage". - :vartype kind: str or ~azure.mgmt.storage.models.Kind - :ivar locations: The set of locations that the SKU is available. This will be supported and - registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.). - :vartype locations: list[str] - :ivar location_info: - :vartype location_info: list[~azure.mgmt.storage.models.SkuInformationLocationInfoItem] - :ivar capabilities: The capability information in the specified SKU, including file encryption, - network ACLs, change notification, etc. - :vartype capabilities: list[~azure.mgmt.storage.models.SKUCapability] - :ivar restrictions: The restrictions because of which SKU cannot be used. This is empty if - there are no restrictions. - :vartype restrictions: list[~azure.mgmt.storage.models.Restriction] - """ - - _validation = { - "name": {"required": True}, - "tier": {"readonly": True}, - "resource_type": {"readonly": True}, - "kind": {"readonly": True}, - "locations": {"readonly": True}, - "capabilities": {"readonly": True}, - } - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "tier": {"key": "tier", "type": "str"}, - "resource_type": {"key": "resourceType", "type": "str"}, - "kind": {"key": "kind", "type": "str"}, - "locations": {"key": "locations", "type": "[str]"}, - "location_info": {"key": "locationInfo", "type": "[SkuInformationLocationInfoItem]"}, - "capabilities": {"key": "capabilities", "type": "[SKUCapability]"}, - "restrictions": {"key": "restrictions", "type": "[Restriction]"}, - } - - def __init__( - self, - *, - name: Union[str, "_models.SkuName"], - location_info: Optional[list["_models.SkuInformationLocationInfoItem"]] = None, - restrictions: Optional[list["_models.Restriction"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword name: The SKU name. Required for account creation; optional for update. Note that in - older versions, SKU name was called accountType. Required. Known values are: "Standard_LRS", - "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", "Premium_ZRS", - "Standard_GZRS", "Standard_RAGZRS", "StandardV2_LRS", "StandardV2_GRS", "StandardV2_ZRS", - "StandardV2_GZRS", "PremiumV2_LRS", and "PremiumV2_ZRS". - :paramtype name: str or ~azure.mgmt.storage.models.SkuName - :keyword location_info: - :paramtype location_info: list[~azure.mgmt.storage.models.SkuInformationLocationInfoItem] - :keyword restrictions: The restrictions because of which SKU cannot be used. This is empty if - there are no restrictions. - :paramtype restrictions: list[~azure.mgmt.storage.models.Restriction] - """ - super().__init__(**kwargs) - self.name = name - self.tier: Optional[Union[str, "_models.SkuTier"]] = None - self.resource_type: Optional[str] = None - self.kind: Optional[Union[str, "_models.Kind"]] = None - self.locations: Optional[list[str]] = None - self.location_info = location_info - self.capabilities: Optional[list["_models.SKUCapability"]] = None - self.restrictions = restrictions - - -class SkuInformationLocationInfoItem(_serialization.Model): - """SkuInformationLocationInfoItem. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar location: Describes the location for the product where storage account resource can be - created. - :vartype location: str - :ivar zones: Describes the available zones for the product where storage account resource can - be created. - :vartype zones: list[str] - """ - - _validation = { - "location": {"readonly": True}, - "zones": {"readonly": True}, - } - - _attribute_map = { - "location": {"key": "location", "type": "str"}, - "zones": {"key": "zones", "type": "[str]"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.location: Optional[str] = None - self.zones: Optional[list[str]] = None - - -class SmbOAuthSettings(_serialization.Model): - """Setting property for Managed Identity access over SMB using OAuth. - - :ivar is_smb_o_auth_enabled: Specifies if managed identities can access SMB shares using OAuth. - The default interpretation is false for this property. - :vartype is_smb_o_auth_enabled: bool - """ - - _attribute_map = { - "is_smb_o_auth_enabled": {"key": "isSmbOAuthEnabled", "type": "bool"}, - } - - def __init__(self, *, is_smb_o_auth_enabled: Optional[bool] = None, **kwargs: Any) -> None: - """ - :keyword is_smb_o_auth_enabled: Specifies if managed identities can access SMB shares using - OAuth. The default interpretation is false for this property. - :paramtype is_smb_o_auth_enabled: bool - """ - super().__init__(**kwargs) - self.is_smb_o_auth_enabled = is_smb_o_auth_enabled - - -class SmbSetting(_serialization.Model): - """Setting for SMB protocol. - - :ivar multichannel: Multichannel setting. Applies to Premium FileStorage only. - :vartype multichannel: ~azure.mgmt.storage.models.Multichannel - :ivar versions: SMB protocol versions supported by server. Valid values are SMB2.1, SMB3.0, - SMB3.1.1. Should be passed as a string with delimiter ';'. - :vartype versions: str - :ivar authentication_methods: SMB authentication methods supported by server. Valid values are - NTLMv2, Kerberos. Should be passed as a string with delimiter ';'. - :vartype authentication_methods: str - :ivar kerberos_ticket_encryption: Kerberos ticket encryption supported by server. Valid values - are RC4-HMAC, AES-256. Should be passed as a string with delimiter ';'. - :vartype kerberos_ticket_encryption: str - :ivar channel_encryption: SMB channel encryption supported by server. Valid values are - AES-128-CCM, AES-128-GCM, AES-256-GCM. Should be passed as a string with delimiter ';'. - :vartype channel_encryption: str - :ivar encryption_in_transit: Encryption in transit setting. - :vartype encryption_in_transit: ~azure.mgmt.storage.models.EncryptionInTransit - """ - - _attribute_map = { - "multichannel": {"key": "multichannel", "type": "Multichannel"}, - "versions": {"key": "versions", "type": "str"}, - "authentication_methods": {"key": "authenticationMethods", "type": "str"}, - "kerberos_ticket_encryption": {"key": "kerberosTicketEncryption", "type": "str"}, - "channel_encryption": {"key": "channelEncryption", "type": "str"}, - "encryption_in_transit": {"key": "encryptionInTransit", "type": "EncryptionInTransit"}, - } - - def __init__( - self, - *, - multichannel: Optional["_models.Multichannel"] = None, - versions: Optional[str] = None, - authentication_methods: Optional[str] = None, - kerberos_ticket_encryption: Optional[str] = None, - channel_encryption: Optional[str] = None, - encryption_in_transit: Optional["_models.EncryptionInTransit"] = None, - **kwargs: Any - ) -> None: - """ - :keyword multichannel: Multichannel setting. Applies to Premium FileStorage only. - :paramtype multichannel: ~azure.mgmt.storage.models.Multichannel - :keyword versions: SMB protocol versions supported by server. Valid values are SMB2.1, SMB3.0, - SMB3.1.1. Should be passed as a string with delimiter ';'. - :paramtype versions: str - :keyword authentication_methods: SMB authentication methods supported by server. Valid values - are NTLMv2, Kerberos. Should be passed as a string with delimiter ';'. - :paramtype authentication_methods: str - :keyword kerberos_ticket_encryption: Kerberos ticket encryption supported by server. Valid - values are RC4-HMAC, AES-256. Should be passed as a string with delimiter ';'. - :paramtype kerberos_ticket_encryption: str - :keyword channel_encryption: SMB channel encryption supported by server. Valid values are - AES-128-CCM, AES-128-GCM, AES-256-GCM. Should be passed as a string with delimiter ';'. - :paramtype channel_encryption: str - :keyword encryption_in_transit: Encryption in transit setting. - :paramtype encryption_in_transit: ~azure.mgmt.storage.models.EncryptionInTransit - """ - super().__init__(**kwargs) - self.multichannel = multichannel - self.versions = versions - self.authentication_methods = authentication_methods - self.kerberos_ticket_encryption = kerberos_ticket_encryption - self.channel_encryption = channel_encryption - self.encryption_in_transit = encryption_in_transit - - -class SshPublicKey(_serialization.Model): - """SshPublicKey. - - :ivar description: Optional. It is used to store the function/usage of the key. - :vartype description: str - :ivar key: Ssh public key base64 encoded. The format should be: '\\ :code:`` - :code:``', e.g. ssh-rsa AAAABBBB. - :vartype key: str - """ - - _attribute_map = { - "description": {"key": "description", "type": "str"}, - "key": {"key": "key", "type": "str"}, - } - - def __init__(self, *, description: Optional[str] = None, key: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword description: Optional. It is used to store the function/usage of the key. - :paramtype description: str - :keyword key: Ssh public key base64 encoded. The format should be: '\\ :code:`` - :code:``', e.g. ssh-rsa AAAABBBB. - :paramtype key: str - """ - super().__init__(**kwargs) - self.description = description - self.key = key - - -class TrackedResource(Resource): - """The resource model definition for an Azure Resource Manager tracked top level resource which - has 'tags' and a 'location'. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to server. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar tags: Resource tags. - :vartype tags: dict[str, str] - :ivar location: The geo-location where the resource lives. Required. - :vartype location: str - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "location": {"required": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "tags": {"key": "tags", "type": "{str}"}, - "location": {"key": "location", "type": "str"}, - } - - def __init__(self, *, location: str, tags: Optional[dict[str, str]] = None, **kwargs: Any) -> None: - """ - :keyword tags: Resource tags. - :paramtype tags: dict[str, str] - :keyword location: The geo-location where the resource lives. Required. - :paramtype location: str - """ - super().__init__(**kwargs) - self.tags = tags - self.location = location - - -class StorageAccount(TrackedResource): - """The storage account. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to server. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar tags: Resource tags. - :vartype tags: dict[str, str] - :ivar location: The geo-location where the resource lives. Required. - :vartype location: str - :ivar sku: Gets the SKU. - :vartype sku: ~azure.mgmt.storage.models.Sku - :ivar kind: Gets the Kind. Known values are: "Storage", "StorageV2", "BlobStorage", - "FileStorage", and "BlockBlobStorage". - :vartype kind: str or ~azure.mgmt.storage.models.Kind - :ivar identity: The identity of the resource. - :vartype identity: ~azure.mgmt.storage.models.Identity - :ivar extended_location: The extendedLocation of the resource. - :vartype extended_location: ~azure.mgmt.storage.models.ExtendedLocation - :ivar zones: Optional. Gets or sets the pinned logical availability zone for the storage - account. - :vartype zones: list[str] - :ivar placement: Optional. Gets or sets the zonal placement details for the storage account. - :vartype placement: ~azure.mgmt.storage.models.Placement - :ivar provisioning_state: Gets the status of the storage account at the time the operation was - called. Known values are: "Creating", "ResolvingDNS", "Succeeded", - "ValidateSubscriptionQuotaBegin", "ValidateSubscriptionQuotaEnd", "Accepted", "Deleting", - "Canceled", and "Failed". - :vartype provisioning_state: str or ~azure.mgmt.storage.models.ProvisioningState - :ivar primary_endpoints: Gets the URLs that are used to perform a retrieval of a public blob, - queue, or table object. Note that Standard_ZRS and Premium_LRS accounts only return the blob - endpoint. - :vartype primary_endpoints: ~azure.mgmt.storage.models.Endpoints - :ivar primary_location: Gets the location of the primary data center for the storage account. - :vartype primary_location: str - :ivar status_of_primary: Gets the status indicating whether the primary location of the storage - account is available or unavailable. Known values are: "available" and "unavailable". - :vartype status_of_primary: str or ~azure.mgmt.storage.models.AccountStatus - :ivar last_geo_failover_time: Gets the timestamp of the most recent instance of a failover to - the secondary location. Only the most recent timestamp is retained. This element is not - returned if there has never been a failover instance. Only available if the accountType is - Standard_GRS or Standard_RAGRS. - :vartype last_geo_failover_time: ~datetime.datetime - :ivar secondary_location: Gets the location of the geo-replicated secondary for the storage - account. Only available if the accountType is Standard_GRS or Standard_RAGRS. - :vartype secondary_location: str - :ivar status_of_secondary: Gets the status indicating whether the secondary location of the - storage account is available or unavailable. Only available if the SKU name is Standard_GRS or - Standard_RAGRS. Known values are: "available" and "unavailable". - :vartype status_of_secondary: str or ~azure.mgmt.storage.models.AccountStatus - :ivar creation_time: Gets the creation date and time of the storage account in UTC. - :vartype creation_time: ~datetime.datetime - :ivar custom_domain: Gets the custom domain the user assigned to this storage account. - :vartype custom_domain: ~azure.mgmt.storage.models.CustomDomain - :ivar sas_policy: SasPolicy assigned to the storage account. - :vartype sas_policy: ~azure.mgmt.storage.models.SasPolicy - :ivar key_policy: KeyPolicy assigned to the storage account. - :vartype key_policy: ~azure.mgmt.storage.models.KeyPolicy - :ivar key_creation_time: Storage account keys creation time. - :vartype key_creation_time: ~azure.mgmt.storage.models.KeyCreationTime - :ivar secondary_endpoints: Gets the URLs that are used to perform a retrieval of a public blob, - queue, or table object from the secondary location of the storage account. Only available if - the SKU name is Standard_RAGRS. - :vartype secondary_endpoints: ~azure.mgmt.storage.models.Endpoints - :ivar encryption: Encryption settings to be used for server-side encryption for the storage - account. - :vartype encryption: ~azure.mgmt.storage.models.Encryption - :ivar access_tier: Required for storage accounts where kind = BlobStorage. The access tier is - used for billing. The 'Premium' access tier is the default value for premium block blobs - storage account type and it cannot be changed for the premium block blobs storage account type. - Known values are: "Hot", "Cool", "Premium", and "Cold". - :vartype access_tier: str or ~azure.mgmt.storage.models.AccessTier - :ivar azure_files_identity_based_authentication: Provides the identity based authentication - settings for Azure Files. - :vartype azure_files_identity_based_authentication: - ~azure.mgmt.storage.models.AzureFilesIdentityBasedAuthentication - :ivar enable_https_traffic_only: Allows https traffic only to storage service if sets to true. - :vartype enable_https_traffic_only: bool - :ivar network_rule_set: Network rule set. - :vartype network_rule_set: ~azure.mgmt.storage.models.NetworkRuleSet - :ivar is_sftp_enabled: Enables Secure File Transfer Protocol, if set to true. - :vartype is_sftp_enabled: bool - :ivar is_local_user_enabled: Enables local users feature, if set to true. - :vartype is_local_user_enabled: bool - :ivar enable_extended_groups: Enables extended group support with local users feature, if set - to true. - :vartype enable_extended_groups: bool - :ivar is_hns_enabled: Account HierarchicalNamespace enabled if sets to true. - :vartype is_hns_enabled: bool - :ivar geo_replication_stats: Geo Replication Stats. - :vartype geo_replication_stats: ~azure.mgmt.storage.models.GeoReplicationStats - :ivar failover_in_progress: If the failover is in progress, the value will be true, otherwise, - it will be null. - :vartype failover_in_progress: bool - :ivar large_file_shares_state: Allow large file shares if sets to Enabled. It cannot be - disabled once it is enabled. Known values are: "Disabled" and "Enabled". - :vartype large_file_shares_state: str or ~azure.mgmt.storage.models.LargeFileSharesState - :ivar private_endpoint_connections: List of private endpoint connection associated with the - specified storage account. - :vartype private_endpoint_connections: - list[~azure.mgmt.storage.models.PrivateEndpointConnection] - :ivar routing_preference: Maintains information about the network routing choice opted by the - user for data transfer. - :vartype routing_preference: ~azure.mgmt.storage.models.RoutingPreference - :ivar dual_stack_endpoint_preference: Maintains information about the Internet protocol opted - by the user. - :vartype dual_stack_endpoint_preference: ~azure.mgmt.storage.models.DualStackEndpointPreference - :ivar blob_restore_status: Blob restore status. - :vartype blob_restore_status: ~azure.mgmt.storage.models.BlobRestoreStatus - :ivar allow_blob_public_access: Allow or disallow public access to all blobs or containers in - the storage account. The default interpretation is false for this property. - :vartype allow_blob_public_access: bool - :ivar minimum_tls_version: Set the minimum TLS version to be permitted on requests to storage. - The default interpretation is TLS 1.0 for this property. Known values are: "TLS1_0", "TLS1_1", - "TLS1_2", and "TLS1_3". - :vartype minimum_tls_version: str or ~azure.mgmt.storage.models.MinimumTlsVersion - :ivar allow_shared_key_access: Indicates whether the storage account permits requests to be - authorized with the account access key via Shared Key. If false, then all requests, including - shared access signatures, must be authorized with Azure Active Directory (Azure AD). The - default value is null, which is equivalent to true. - :vartype allow_shared_key_access: bool - :ivar enable_nfs_v3: NFS 3.0 protocol support enabled if set to true. - :vartype enable_nfs_v3: bool - :ivar allow_cross_tenant_replication: Allow or disallow cross AAD tenant object replication. - Set this property to true for new or existing accounts only if object replication policies will - involve storage accounts in different AAD tenants. The default interpretation is false for new - accounts to follow best security practices by default. - :vartype allow_cross_tenant_replication: bool - :ivar default_to_o_auth_authentication: A boolean flag which indicates whether the default - authentication is OAuth or not. The default interpretation is false for this property. - :vartype default_to_o_auth_authentication: bool - :ivar public_network_access: Allow, disallow, or let Network Security Perimeter configuration - to evaluate public network access to Storage Account. Known values are: "Enabled", "Disabled", - and "SecuredByPerimeter". - :vartype public_network_access: str or ~azure.mgmt.storage.models.PublicNetworkAccess - :ivar immutable_storage_with_versioning: The property is immutable and can only be set to true - at the account creation time. When set to true, it enables object level immutability for all - the containers in the account by default. - :vartype immutable_storage_with_versioning: ~azure.mgmt.storage.models.ImmutableStorageAccount - :ivar allowed_copy_scope: Restrict copy to and from Storage Accounts within an AAD tenant or - with Private Links to the same VNet. Known values are: "PrivateLink" and "AAD". - :vartype allowed_copy_scope: str or ~azure.mgmt.storage.models.AllowedCopyScope - :ivar storage_account_sku_conversion_status: This property is readOnly and is set by server - during asynchronous storage account sku conversion operations. - :vartype storage_account_sku_conversion_status: - ~azure.mgmt.storage.models.StorageAccountSkuConversionStatus - :ivar dns_endpoint_type: Allows you to specify the type of endpoint. Set this to AzureDNSZone - to create a large number of accounts in a single subscription, which creates accounts in an - Azure DNS Zone and the endpoint URL will have an alphanumeric DNS Zone identifier. Known values - are: "Standard" and "AzureDnsZone". - :vartype dns_endpoint_type: str or ~azure.mgmt.storage.models.DnsEndpointType - :ivar is_sku_conversion_blocked: This property will be set to true or false on an event of - ongoing migration. Default value is null. - :vartype is_sku_conversion_blocked: bool - :ivar account_migration_in_progress: If customer initiated account migration is in progress, - the value will be true else it will be null. - :vartype account_migration_in_progress: bool - :ivar geo_priority_replication_status: Status indicating whether Geo Priority Replication is - enabled for the account. - :vartype geo_priority_replication_status: - ~azure.mgmt.storage.models.GeoPriorityReplicationStatus - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "location": {"required": True}, - "sku": {"readonly": True}, - "kind": {"readonly": True}, - "provisioning_state": {"readonly": True}, - "primary_endpoints": {"readonly": True}, - "primary_location": {"readonly": True}, - "status_of_primary": {"readonly": True}, - "last_geo_failover_time": {"readonly": True}, - "secondary_location": {"readonly": True}, - "status_of_secondary": {"readonly": True}, - "creation_time": {"readonly": True}, - "custom_domain": {"readonly": True}, - "sas_policy": {"readonly": True}, - "key_policy": {"readonly": True}, - "key_creation_time": {"readonly": True}, - "secondary_endpoints": {"readonly": True}, - "encryption": {"readonly": True}, - "access_tier": {"readonly": True}, - "network_rule_set": {"readonly": True}, - "geo_replication_stats": {"readonly": True}, - "failover_in_progress": {"readonly": True}, - "private_endpoint_connections": {"readonly": True}, - "blob_restore_status": {"readonly": True}, - "is_sku_conversion_blocked": {"readonly": True}, - "account_migration_in_progress": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "tags": {"key": "tags", "type": "{str}"}, - "location": {"key": "location", "type": "str"}, - "sku": {"key": "sku", "type": "Sku"}, - "kind": {"key": "kind", "type": "str"}, - "identity": {"key": "identity", "type": "Identity"}, - "extended_location": {"key": "extendedLocation", "type": "ExtendedLocation"}, - "zones": {"key": "zones", "type": "[str]"}, - "placement": {"key": "placement", "type": "Placement"}, - "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, - "primary_endpoints": {"key": "properties.primaryEndpoints", "type": "Endpoints"}, - "primary_location": {"key": "properties.primaryLocation", "type": "str"}, - "status_of_primary": {"key": "properties.statusOfPrimary", "type": "str"}, - "last_geo_failover_time": {"key": "properties.lastGeoFailoverTime", "type": "iso-8601"}, - "secondary_location": {"key": "properties.secondaryLocation", "type": "str"}, - "status_of_secondary": {"key": "properties.statusOfSecondary", "type": "str"}, - "creation_time": {"key": "properties.creationTime", "type": "iso-8601"}, - "custom_domain": {"key": "properties.customDomain", "type": "CustomDomain"}, - "sas_policy": {"key": "properties.sasPolicy", "type": "SasPolicy"}, - "key_policy": {"key": "properties.keyPolicy", "type": "KeyPolicy"}, - "key_creation_time": {"key": "properties.keyCreationTime", "type": "KeyCreationTime"}, - "secondary_endpoints": {"key": "properties.secondaryEndpoints", "type": "Endpoints"}, - "encryption": {"key": "properties.encryption", "type": "Encryption"}, - "access_tier": {"key": "properties.accessTier", "type": "str"}, - "azure_files_identity_based_authentication": { - "key": "properties.azureFilesIdentityBasedAuthentication", - "type": "AzureFilesIdentityBasedAuthentication", - }, - "enable_https_traffic_only": {"key": "properties.supportsHttpsTrafficOnly", "type": "bool"}, - "network_rule_set": {"key": "properties.networkAcls", "type": "NetworkRuleSet"}, - "is_sftp_enabled": {"key": "properties.isSftpEnabled", "type": "bool"}, - "is_local_user_enabled": {"key": "properties.isLocalUserEnabled", "type": "bool"}, - "enable_extended_groups": {"key": "properties.enableExtendedGroups", "type": "bool"}, - "is_hns_enabled": {"key": "properties.isHnsEnabled", "type": "bool"}, - "geo_replication_stats": {"key": "properties.geoReplicationStats", "type": "GeoReplicationStats"}, - "failover_in_progress": {"key": "properties.failoverInProgress", "type": "bool"}, - "large_file_shares_state": {"key": "properties.largeFileSharesState", "type": "str"}, - "private_endpoint_connections": { - "key": "properties.privateEndpointConnections", - "type": "[PrivateEndpointConnection]", - }, - "routing_preference": {"key": "properties.routingPreference", "type": "RoutingPreference"}, - "dual_stack_endpoint_preference": { - "key": "properties.dualStackEndpointPreference", - "type": "DualStackEndpointPreference", - }, - "blob_restore_status": {"key": "properties.blobRestoreStatus", "type": "BlobRestoreStatus"}, - "allow_blob_public_access": {"key": "properties.allowBlobPublicAccess", "type": "bool"}, - "minimum_tls_version": {"key": "properties.minimumTlsVersion", "type": "str"}, - "allow_shared_key_access": {"key": "properties.allowSharedKeyAccess", "type": "bool"}, - "enable_nfs_v3": {"key": "properties.isNfsV3Enabled", "type": "bool"}, - "allow_cross_tenant_replication": {"key": "properties.allowCrossTenantReplication", "type": "bool"}, - "default_to_o_auth_authentication": {"key": "properties.defaultToOAuthAuthentication", "type": "bool"}, - "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, - "immutable_storage_with_versioning": { - "key": "properties.immutableStorageWithVersioning", - "type": "ImmutableStorageAccount", - }, - "allowed_copy_scope": {"key": "properties.allowedCopyScope", "type": "str"}, - "storage_account_sku_conversion_status": { - "key": "properties.storageAccountSkuConversionStatus", - "type": "StorageAccountSkuConversionStatus", - }, - "dns_endpoint_type": {"key": "properties.dnsEndpointType", "type": "str"}, - "is_sku_conversion_blocked": {"key": "properties.isSkuConversionBlocked", "type": "bool"}, - "account_migration_in_progress": {"key": "properties.accountMigrationInProgress", "type": "bool"}, - "geo_priority_replication_status": { - "key": "properties.geoPriorityReplicationStatus", - "type": "GeoPriorityReplicationStatus", - }, - } - - def __init__( # pylint: disable=too-many-locals - self, - *, - location: str, - tags: Optional[dict[str, str]] = None, - identity: Optional["_models.Identity"] = None, - extended_location: Optional["_models.ExtendedLocation"] = None, - zones: Optional[list[str]] = None, - placement: Optional["_models.Placement"] = None, - azure_files_identity_based_authentication: Optional["_models.AzureFilesIdentityBasedAuthentication"] = None, - enable_https_traffic_only: Optional[bool] = None, - is_sftp_enabled: Optional[bool] = None, - is_local_user_enabled: Optional[bool] = None, - enable_extended_groups: Optional[bool] = None, - is_hns_enabled: Optional[bool] = None, - large_file_shares_state: Optional[Union[str, "_models.LargeFileSharesState"]] = None, - routing_preference: Optional["_models.RoutingPreference"] = None, - dual_stack_endpoint_preference: Optional["_models.DualStackEndpointPreference"] = None, - allow_blob_public_access: Optional[bool] = None, - minimum_tls_version: Optional[Union[str, "_models.MinimumTlsVersion"]] = None, - allow_shared_key_access: Optional[bool] = None, - enable_nfs_v3: Optional[bool] = None, - allow_cross_tenant_replication: Optional[bool] = None, - default_to_o_auth_authentication: Optional[bool] = None, - public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, - immutable_storage_with_versioning: Optional["_models.ImmutableStorageAccount"] = None, - allowed_copy_scope: Optional[Union[str, "_models.AllowedCopyScope"]] = None, - storage_account_sku_conversion_status: Optional["_models.StorageAccountSkuConversionStatus"] = None, - dns_endpoint_type: Optional[Union[str, "_models.DnsEndpointType"]] = None, - geo_priority_replication_status: Optional["_models.GeoPriorityReplicationStatus"] = None, - **kwargs: Any - ) -> None: - """ - :keyword tags: Resource tags. - :paramtype tags: dict[str, str] - :keyword location: The geo-location where the resource lives. Required. - :paramtype location: str - :keyword identity: The identity of the resource. - :paramtype identity: ~azure.mgmt.storage.models.Identity - :keyword extended_location: The extendedLocation of the resource. - :paramtype extended_location: ~azure.mgmt.storage.models.ExtendedLocation - :keyword zones: Optional. Gets or sets the pinned logical availability zone for the storage - account. - :paramtype zones: list[str] - :keyword placement: Optional. Gets or sets the zonal placement details for the storage account. - :paramtype placement: ~azure.mgmt.storage.models.Placement - :keyword azure_files_identity_based_authentication: Provides the identity based authentication - settings for Azure Files. - :paramtype azure_files_identity_based_authentication: - ~azure.mgmt.storage.models.AzureFilesIdentityBasedAuthentication - :keyword enable_https_traffic_only: Allows https traffic only to storage service if sets to - true. - :paramtype enable_https_traffic_only: bool - :keyword is_sftp_enabled: Enables Secure File Transfer Protocol, if set to true. - :paramtype is_sftp_enabled: bool - :keyword is_local_user_enabled: Enables local users feature, if set to true. - :paramtype is_local_user_enabled: bool - :keyword enable_extended_groups: Enables extended group support with local users feature, if - set to true. - :paramtype enable_extended_groups: bool - :keyword is_hns_enabled: Account HierarchicalNamespace enabled if sets to true. - :paramtype is_hns_enabled: bool - :keyword large_file_shares_state: Allow large file shares if sets to Enabled. It cannot be - disabled once it is enabled. Known values are: "Disabled" and "Enabled". - :paramtype large_file_shares_state: str or ~azure.mgmt.storage.models.LargeFileSharesState - :keyword routing_preference: Maintains information about the network routing choice opted by - the user for data transfer. - :paramtype routing_preference: ~azure.mgmt.storage.models.RoutingPreference - :keyword dual_stack_endpoint_preference: Maintains information about the Internet protocol - opted by the user. - :paramtype dual_stack_endpoint_preference: - ~azure.mgmt.storage.models.DualStackEndpointPreference - :keyword allow_blob_public_access: Allow or disallow public access to all blobs or containers - in the storage account. The default interpretation is false for this property. - :paramtype allow_blob_public_access: bool - :keyword minimum_tls_version: Set the minimum TLS version to be permitted on requests to - storage. The default interpretation is TLS 1.0 for this property. Known values are: "TLS1_0", - "TLS1_1", "TLS1_2", and "TLS1_3". - :paramtype minimum_tls_version: str or ~azure.mgmt.storage.models.MinimumTlsVersion - :keyword allow_shared_key_access: Indicates whether the storage account permits requests to be - authorized with the account access key via Shared Key. If false, then all requests, including - shared access signatures, must be authorized with Azure Active Directory (Azure AD). The - default value is null, which is equivalent to true. - :paramtype allow_shared_key_access: bool - :keyword enable_nfs_v3: NFS 3.0 protocol support enabled if set to true. - :paramtype enable_nfs_v3: bool - :keyword allow_cross_tenant_replication: Allow or disallow cross AAD tenant object replication. - Set this property to true for new or existing accounts only if object replication policies will - involve storage accounts in different AAD tenants. The default interpretation is false for new - accounts to follow best security practices by default. - :paramtype allow_cross_tenant_replication: bool - :keyword default_to_o_auth_authentication: A boolean flag which indicates whether the default - authentication is OAuth or not. The default interpretation is false for this property. - :paramtype default_to_o_auth_authentication: bool - :keyword public_network_access: Allow, disallow, or let Network Security Perimeter - configuration to evaluate public network access to Storage Account. Known values are: - "Enabled", "Disabled", and "SecuredByPerimeter". - :paramtype public_network_access: str or ~azure.mgmt.storage.models.PublicNetworkAccess - :keyword immutable_storage_with_versioning: The property is immutable and can only be set to - true at the account creation time. When set to true, it enables object level immutability for - all the containers in the account by default. - :paramtype immutable_storage_with_versioning: - ~azure.mgmt.storage.models.ImmutableStorageAccount - :keyword allowed_copy_scope: Restrict copy to and from Storage Accounts within an AAD tenant or - with Private Links to the same VNet. Known values are: "PrivateLink" and "AAD". - :paramtype allowed_copy_scope: str or ~azure.mgmt.storage.models.AllowedCopyScope - :keyword storage_account_sku_conversion_status: This property is readOnly and is set by server - during asynchronous storage account sku conversion operations. - :paramtype storage_account_sku_conversion_status: - ~azure.mgmt.storage.models.StorageAccountSkuConversionStatus - :keyword dns_endpoint_type: Allows you to specify the type of endpoint. Set this to - AzureDNSZone to create a large number of accounts in a single subscription, which creates - accounts in an Azure DNS Zone and the endpoint URL will have an alphanumeric DNS Zone - identifier. Known values are: "Standard" and "AzureDnsZone". - :paramtype dns_endpoint_type: str or ~azure.mgmt.storage.models.DnsEndpointType - :keyword geo_priority_replication_status: Status indicating whether Geo Priority Replication is - enabled for the account. - :paramtype geo_priority_replication_status: - ~azure.mgmt.storage.models.GeoPriorityReplicationStatus - """ - super().__init__(tags=tags, location=location, **kwargs) - self.sku: Optional["_models.Sku"] = None - self.kind: Optional[Union[str, "_models.Kind"]] = None - self.identity = identity - self.extended_location = extended_location - self.zones = zones - self.placement = placement - self.provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None - self.primary_endpoints: Optional["_models.Endpoints"] = None - self.primary_location: Optional[str] = None - self.status_of_primary: Optional[Union[str, "_models.AccountStatus"]] = None - self.last_geo_failover_time: Optional[datetime.datetime] = None - self.secondary_location: Optional[str] = None - self.status_of_secondary: Optional[Union[str, "_models.AccountStatus"]] = None - self.creation_time: Optional[datetime.datetime] = None - self.custom_domain: Optional["_models.CustomDomain"] = None - self.sas_policy: Optional["_models.SasPolicy"] = None - self.key_policy: Optional["_models.KeyPolicy"] = None - self.key_creation_time: Optional["_models.KeyCreationTime"] = None - self.secondary_endpoints: Optional["_models.Endpoints"] = None - self.encryption: Optional["_models.Encryption"] = None - self.access_tier: Optional[Union[str, "_models.AccessTier"]] = None - self.azure_files_identity_based_authentication = azure_files_identity_based_authentication - self.enable_https_traffic_only = enable_https_traffic_only - self.network_rule_set: Optional["_models.NetworkRuleSet"] = None - self.is_sftp_enabled = is_sftp_enabled - self.is_local_user_enabled = is_local_user_enabled - self.enable_extended_groups = enable_extended_groups - self.is_hns_enabled = is_hns_enabled - self.geo_replication_stats: Optional["_models.GeoReplicationStats"] = None - self.failover_in_progress: Optional[bool] = None - self.large_file_shares_state = large_file_shares_state - self.private_endpoint_connections: Optional[list["_models.PrivateEndpointConnection"]] = None - self.routing_preference = routing_preference - self.dual_stack_endpoint_preference = dual_stack_endpoint_preference - self.blob_restore_status: Optional["_models.BlobRestoreStatus"] = None - self.allow_blob_public_access = allow_blob_public_access - self.minimum_tls_version = minimum_tls_version - self.allow_shared_key_access = allow_shared_key_access - self.enable_nfs_v3 = enable_nfs_v3 - self.allow_cross_tenant_replication = allow_cross_tenant_replication - self.default_to_o_auth_authentication = default_to_o_auth_authentication - self.public_network_access = public_network_access - self.immutable_storage_with_versioning = immutable_storage_with_versioning - self.allowed_copy_scope = allowed_copy_scope - self.storage_account_sku_conversion_status = storage_account_sku_conversion_status - self.dns_endpoint_type = dns_endpoint_type - self.is_sku_conversion_blocked: Optional[bool] = None - self.account_migration_in_progress: Optional[bool] = None - self.geo_priority_replication_status = geo_priority_replication_status - - -class StorageAccountCheckNameAvailabilityParameters(_serialization.Model): # pylint: disable=name-too-long - """The parameters used to check the availability of the storage account name. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to server. - - :ivar name: The storage account name. Required. - :vartype name: str - :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Required. Default value is - "Microsoft.Storage/storageAccounts". - :vartype type: str - """ - - _validation = { - "name": {"required": True}, - "type": {"required": True, "constant": True}, - } - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - } - - type = "Microsoft.Storage/storageAccounts" - - def __init__(self, *, name: str, **kwargs: Any) -> None: - """ - :keyword name: The storage account name. Required. - :paramtype name: str - """ - super().__init__(**kwargs) - self.name = name - - -class StorageAccountCreateParameters(_serialization.Model): - """The parameters used when creating a storage account. - - All required parameters must be populated in order to send to server. - - :ivar sku: Required. Gets or sets the SKU name. Required. - :vartype sku: ~azure.mgmt.storage.models.Sku - :ivar kind: Required. Indicates the type of storage account. Required. Known values are: - "Storage", "StorageV2", "BlobStorage", "FileStorage", and "BlockBlobStorage". - :vartype kind: str or ~azure.mgmt.storage.models.Kind - :ivar location: Required. Gets or sets the location of the resource. This will be one of the - supported and registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.). The - geo region of a resource cannot be changed once it is created, but if an identical geo region - is specified on update, the request will succeed. Required. - :vartype location: str - :ivar extended_location: Optional. Set the extended location of the resource. If not set, the - storage account will be created in Azure main region. Otherwise it will be created in the - specified extended location. - :vartype extended_location: ~azure.mgmt.storage.models.ExtendedLocation - :ivar zones: Optional. Gets or sets the pinned logical availability zone for the storage - account. - :vartype zones: list[str] - :ivar placement: Optional. Gets or sets the zonal placement details for the storage account. - :vartype placement: ~azure.mgmt.storage.models.Placement - :ivar tags: Gets or sets a list of key value pairs that describe the resource. These tags can - be used for viewing and grouping this resource (across resource groups). A maximum of 15 tags - can be provided for a resource. Each tag must have a key with a length no greater than 128 - characters and a value with a length no greater than 256 characters. - :vartype tags: dict[str, str] - :ivar identity: The identity of the resource. - :vartype identity: ~azure.mgmt.storage.models.Identity - :ivar allowed_copy_scope: Restrict copy to and from Storage Accounts within an AAD tenant or - with Private Links to the same VNet. Known values are: "PrivateLink" and "AAD". - :vartype allowed_copy_scope: str or ~azure.mgmt.storage.models.AllowedCopyScope - :ivar public_network_access: Allow, disallow, or let Network Security Perimeter configuration - to evaluate public network access to Storage Account. Value is optional but if passed in, must - be 'Enabled', 'Disabled' or 'SecuredByPerimeter'. Known values are: "Enabled", "Disabled", and - "SecuredByPerimeter". - :vartype public_network_access: str or ~azure.mgmt.storage.models.PublicNetworkAccess - :ivar sas_policy: SasPolicy assigned to the storage account. - :vartype sas_policy: ~azure.mgmt.storage.models.SasPolicy - :ivar key_policy: KeyPolicy assigned to the storage account. - :vartype key_policy: ~azure.mgmt.storage.models.KeyPolicy - :ivar custom_domain: User domain assigned to the storage account. Name is the CNAME source. - Only one custom domain is supported per storage account at this time. To clear the existing - custom domain, use an empty string for the custom domain name property. - :vartype custom_domain: ~azure.mgmt.storage.models.CustomDomain - :ivar encryption: Encryption settings to be used for server-side encryption for the storage - account. - :vartype encryption: ~azure.mgmt.storage.models.Encryption - :ivar network_rule_set: Network rule set. - :vartype network_rule_set: ~azure.mgmt.storage.models.NetworkRuleSet - :ivar access_tier: Required for storage accounts where kind = BlobStorage. The access tier is - used for billing. The 'Premium' access tier is the default value for premium block blobs - storage account type and it cannot be changed for the premium block blobs storage account type. - Known values are: "Hot", "Cool", "Premium", and "Cold". - :vartype access_tier: str or ~azure.mgmt.storage.models.AccessTier - :ivar azure_files_identity_based_authentication: Provides the identity based authentication - settings for Azure Files. - :vartype azure_files_identity_based_authentication: - ~azure.mgmt.storage.models.AzureFilesIdentityBasedAuthentication - :ivar enable_https_traffic_only: Allows https traffic only to storage service if sets to true. - The default value is true since API version 2019-04-01. - :vartype enable_https_traffic_only: bool - :ivar is_sftp_enabled: Enables Secure File Transfer Protocol, if set to true. - :vartype is_sftp_enabled: bool - :ivar is_local_user_enabled: Enables local users feature, if set to true. - :vartype is_local_user_enabled: bool - :ivar enable_extended_groups: Enables extended group support with local users feature, if set - to true. - :vartype enable_extended_groups: bool - :ivar is_hns_enabled: Account HierarchicalNamespace enabled if sets to true. - :vartype is_hns_enabled: bool - :ivar large_file_shares_state: Allow large file shares if sets to Enabled. It cannot be - disabled once it is enabled. Known values are: "Disabled" and "Enabled". - :vartype large_file_shares_state: str or ~azure.mgmt.storage.models.LargeFileSharesState - :ivar routing_preference: Maintains information about the network routing choice opted by the - user for data transfer. - :vartype routing_preference: ~azure.mgmt.storage.models.RoutingPreference - :ivar dual_stack_endpoint_preference: Maintains information about the Internet protocol opted - by the user. - :vartype dual_stack_endpoint_preference: ~azure.mgmt.storage.models.DualStackEndpointPreference - :ivar allow_blob_public_access: Allow or disallow public access to all blobs or containers in - the storage account. The default interpretation is false for this property. - :vartype allow_blob_public_access: bool - :ivar minimum_tls_version: Set the minimum TLS version to be permitted on requests to storage. - The default interpretation is TLS 1.0 for this property. Known values are: "TLS1_0", "TLS1_1", - "TLS1_2", and "TLS1_3". - :vartype minimum_tls_version: str or ~azure.mgmt.storage.models.MinimumTlsVersion - :ivar allow_shared_key_access: Indicates whether the storage account permits requests to be - authorized with the account access key via Shared Key. If false, then all requests, including - shared access signatures, must be authorized with Azure Active Directory (Azure AD). The - default value is null, which is equivalent to true. - :vartype allow_shared_key_access: bool - :ivar enable_nfs_v3: NFS 3.0 protocol support enabled if set to true. - :vartype enable_nfs_v3: bool - :ivar allow_cross_tenant_replication: Allow or disallow cross AAD tenant object replication. - Set this property to true for new or existing accounts only if object replication policies will - involve storage accounts in different AAD tenants. The default interpretation is false for new - accounts to follow best security practices by default. - :vartype allow_cross_tenant_replication: bool - :ivar default_to_o_auth_authentication: A boolean flag which indicates whether the default - authentication is OAuth or not. The default interpretation is false for this property. - :vartype default_to_o_auth_authentication: bool - :ivar immutable_storage_with_versioning: The property is immutable and can only be set to true - at the account creation time. When set to true, it enables object level immutability for all - the new containers in the account by default. - :vartype immutable_storage_with_versioning: ~azure.mgmt.storage.models.ImmutableStorageAccount - :ivar dns_endpoint_type: Allows you to specify the type of endpoint. Set this to AzureDNSZone - to create a large number of accounts in a single subscription, which creates accounts in an - Azure DNS Zone and the endpoint URL will have an alphanumeric DNS Zone identifier. Known values - are: "Standard" and "AzureDnsZone". - :vartype dns_endpoint_type: str or ~azure.mgmt.storage.models.DnsEndpointType - :ivar geo_priority_replication_status: Status indicating whether Geo Priority Replication is - enabled for the account. - :vartype geo_priority_replication_status: - ~azure.mgmt.storage.models.GeoPriorityReplicationStatus - """ - - _validation = { - "sku": {"required": True}, - "kind": {"required": True}, - "location": {"required": True}, - } - - _attribute_map = { - "sku": {"key": "sku", "type": "Sku"}, - "kind": {"key": "kind", "type": "str"}, - "location": {"key": "location", "type": "str"}, - "extended_location": {"key": "extendedLocation", "type": "ExtendedLocation"}, - "zones": {"key": "zones", "type": "[str]"}, - "placement": {"key": "placement", "type": "Placement"}, - "tags": {"key": "tags", "type": "{str}"}, - "identity": {"key": "identity", "type": "Identity"}, - "allowed_copy_scope": {"key": "properties.allowedCopyScope", "type": "str"}, - "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, - "sas_policy": {"key": "properties.sasPolicy", "type": "SasPolicy"}, - "key_policy": {"key": "properties.keyPolicy", "type": "KeyPolicy"}, - "custom_domain": {"key": "properties.customDomain", "type": "CustomDomain"}, - "encryption": {"key": "properties.encryption", "type": "Encryption"}, - "network_rule_set": {"key": "properties.networkAcls", "type": "NetworkRuleSet"}, - "access_tier": {"key": "properties.accessTier", "type": "str"}, - "azure_files_identity_based_authentication": { - "key": "properties.azureFilesIdentityBasedAuthentication", - "type": "AzureFilesIdentityBasedAuthentication", - }, - "enable_https_traffic_only": {"key": "properties.supportsHttpsTrafficOnly", "type": "bool"}, - "is_sftp_enabled": {"key": "properties.isSftpEnabled", "type": "bool"}, - "is_local_user_enabled": {"key": "properties.isLocalUserEnabled", "type": "bool"}, - "enable_extended_groups": {"key": "properties.enableExtendedGroups", "type": "bool"}, - "is_hns_enabled": {"key": "properties.isHnsEnabled", "type": "bool"}, - "large_file_shares_state": {"key": "properties.largeFileSharesState", "type": "str"}, - "routing_preference": {"key": "properties.routingPreference", "type": "RoutingPreference"}, - "dual_stack_endpoint_preference": { - "key": "properties.dualStackEndpointPreference", - "type": "DualStackEndpointPreference", - }, - "allow_blob_public_access": {"key": "properties.allowBlobPublicAccess", "type": "bool"}, - "minimum_tls_version": {"key": "properties.minimumTlsVersion", "type": "str"}, - "allow_shared_key_access": {"key": "properties.allowSharedKeyAccess", "type": "bool"}, - "enable_nfs_v3": {"key": "properties.isNfsV3Enabled", "type": "bool"}, - "allow_cross_tenant_replication": {"key": "properties.allowCrossTenantReplication", "type": "bool"}, - "default_to_o_auth_authentication": {"key": "properties.defaultToOAuthAuthentication", "type": "bool"}, - "immutable_storage_with_versioning": { - "key": "properties.immutableStorageWithVersioning", - "type": "ImmutableStorageAccount", - }, - "dns_endpoint_type": {"key": "properties.dnsEndpointType", "type": "str"}, - "geo_priority_replication_status": { - "key": "properties.geoPriorityReplicationStatus", - "type": "GeoPriorityReplicationStatus", - }, - } - - def __init__( # pylint: disable=too-many-locals - self, - *, - sku: "_models.Sku", - kind: Union[str, "_models.Kind"], - location: str, - extended_location: Optional["_models.ExtendedLocation"] = None, - zones: Optional[list[str]] = None, - placement: Optional["_models.Placement"] = None, - tags: Optional[dict[str, str]] = None, - identity: Optional["_models.Identity"] = None, - allowed_copy_scope: Optional[Union[str, "_models.AllowedCopyScope"]] = None, - public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, - sas_policy: Optional["_models.SasPolicy"] = None, - key_policy: Optional["_models.KeyPolicy"] = None, - custom_domain: Optional["_models.CustomDomain"] = None, - encryption: Optional["_models.Encryption"] = None, - network_rule_set: Optional["_models.NetworkRuleSet"] = None, - access_tier: Optional[Union[str, "_models.AccessTier"]] = None, - azure_files_identity_based_authentication: Optional["_models.AzureFilesIdentityBasedAuthentication"] = None, - enable_https_traffic_only: Optional[bool] = None, - is_sftp_enabled: Optional[bool] = None, - is_local_user_enabled: Optional[bool] = None, - enable_extended_groups: Optional[bool] = None, - is_hns_enabled: Optional[bool] = None, - large_file_shares_state: Optional[Union[str, "_models.LargeFileSharesState"]] = None, - routing_preference: Optional["_models.RoutingPreference"] = None, - dual_stack_endpoint_preference: Optional["_models.DualStackEndpointPreference"] = None, - allow_blob_public_access: Optional[bool] = None, - minimum_tls_version: Optional[Union[str, "_models.MinimumTlsVersion"]] = None, - allow_shared_key_access: Optional[bool] = None, - enable_nfs_v3: Optional[bool] = None, - allow_cross_tenant_replication: Optional[bool] = None, - default_to_o_auth_authentication: Optional[bool] = None, - immutable_storage_with_versioning: Optional["_models.ImmutableStorageAccount"] = None, - dns_endpoint_type: Optional[Union[str, "_models.DnsEndpointType"]] = None, - geo_priority_replication_status: Optional["_models.GeoPriorityReplicationStatus"] = None, - **kwargs: Any - ) -> None: - """ - :keyword sku: Required. Gets or sets the SKU name. Required. - :paramtype sku: ~azure.mgmt.storage.models.Sku - :keyword kind: Required. Indicates the type of storage account. Required. Known values are: - "Storage", "StorageV2", "BlobStorage", "FileStorage", and "BlockBlobStorage". - :paramtype kind: str or ~azure.mgmt.storage.models.Kind - :keyword location: Required. Gets or sets the location of the resource. This will be one of the - supported and registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.). The - geo region of a resource cannot be changed once it is created, but if an identical geo region - is specified on update, the request will succeed. Required. - :paramtype location: str - :keyword extended_location: Optional. Set the extended location of the resource. If not set, - the storage account will be created in Azure main region. Otherwise it will be created in the - specified extended location. - :paramtype extended_location: ~azure.mgmt.storage.models.ExtendedLocation - :keyword zones: Optional. Gets or sets the pinned logical availability zone for the storage - account. - :paramtype zones: list[str] - :keyword placement: Optional. Gets or sets the zonal placement details for the storage account. - :paramtype placement: ~azure.mgmt.storage.models.Placement - :keyword tags: Gets or sets a list of key value pairs that describe the resource. These tags - can be used for viewing and grouping this resource (across resource groups). A maximum of 15 - tags can be provided for a resource. Each tag must have a key with a length no greater than 128 - characters and a value with a length no greater than 256 characters. - :paramtype tags: dict[str, str] - :keyword identity: The identity of the resource. - :paramtype identity: ~azure.mgmt.storage.models.Identity - :keyword allowed_copy_scope: Restrict copy to and from Storage Accounts within an AAD tenant or - with Private Links to the same VNet. Known values are: "PrivateLink" and "AAD". - :paramtype allowed_copy_scope: str or ~azure.mgmt.storage.models.AllowedCopyScope - :keyword public_network_access: Allow, disallow, or let Network Security Perimeter - configuration to evaluate public network access to Storage Account. Value is optional but if - passed in, must be 'Enabled', 'Disabled' or 'SecuredByPerimeter'. Known values are: "Enabled", - "Disabled", and "SecuredByPerimeter". - :paramtype public_network_access: str or ~azure.mgmt.storage.models.PublicNetworkAccess - :keyword sas_policy: SasPolicy assigned to the storage account. - :paramtype sas_policy: ~azure.mgmt.storage.models.SasPolicy - :keyword key_policy: KeyPolicy assigned to the storage account. - :paramtype key_policy: ~azure.mgmt.storage.models.KeyPolicy - :keyword custom_domain: User domain assigned to the storage account. Name is the CNAME source. - Only one custom domain is supported per storage account at this time. To clear the existing - custom domain, use an empty string for the custom domain name property. - :paramtype custom_domain: ~azure.mgmt.storage.models.CustomDomain - :keyword encryption: Encryption settings to be used for server-side encryption for the storage - account. - :paramtype encryption: ~azure.mgmt.storage.models.Encryption - :keyword network_rule_set: Network rule set. - :paramtype network_rule_set: ~azure.mgmt.storage.models.NetworkRuleSet - :keyword access_tier: Required for storage accounts where kind = BlobStorage. The access tier - is used for billing. The 'Premium' access tier is the default value for premium block blobs - storage account type and it cannot be changed for the premium block blobs storage account type. - Known values are: "Hot", "Cool", "Premium", and "Cold". - :paramtype access_tier: str or ~azure.mgmt.storage.models.AccessTier - :keyword azure_files_identity_based_authentication: Provides the identity based authentication - settings for Azure Files. - :paramtype azure_files_identity_based_authentication: - ~azure.mgmt.storage.models.AzureFilesIdentityBasedAuthentication - :keyword enable_https_traffic_only: Allows https traffic only to storage service if sets to - true. The default value is true since API version 2019-04-01. - :paramtype enable_https_traffic_only: bool - :keyword is_sftp_enabled: Enables Secure File Transfer Protocol, if set to true. - :paramtype is_sftp_enabled: bool - :keyword is_local_user_enabled: Enables local users feature, if set to true. - :paramtype is_local_user_enabled: bool - :keyword enable_extended_groups: Enables extended group support with local users feature, if - set to true. - :paramtype enable_extended_groups: bool - :keyword is_hns_enabled: Account HierarchicalNamespace enabled if sets to true. - :paramtype is_hns_enabled: bool - :keyword large_file_shares_state: Allow large file shares if sets to Enabled. It cannot be - disabled once it is enabled. Known values are: "Disabled" and "Enabled". - :paramtype large_file_shares_state: str or ~azure.mgmt.storage.models.LargeFileSharesState - :keyword routing_preference: Maintains information about the network routing choice opted by - the user for data transfer. - :paramtype routing_preference: ~azure.mgmt.storage.models.RoutingPreference - :keyword dual_stack_endpoint_preference: Maintains information about the Internet protocol - opted by the user. - :paramtype dual_stack_endpoint_preference: - ~azure.mgmt.storage.models.DualStackEndpointPreference - :keyword allow_blob_public_access: Allow or disallow public access to all blobs or containers - in the storage account. The default interpretation is false for this property. - :paramtype allow_blob_public_access: bool - :keyword minimum_tls_version: Set the minimum TLS version to be permitted on requests to - storage. The default interpretation is TLS 1.0 for this property. Known values are: "TLS1_0", - "TLS1_1", "TLS1_2", and "TLS1_3". - :paramtype minimum_tls_version: str or ~azure.mgmt.storage.models.MinimumTlsVersion - :keyword allow_shared_key_access: Indicates whether the storage account permits requests to be - authorized with the account access key via Shared Key. If false, then all requests, including - shared access signatures, must be authorized with Azure Active Directory (Azure AD). The - default value is null, which is equivalent to true. - :paramtype allow_shared_key_access: bool - :keyword enable_nfs_v3: NFS 3.0 protocol support enabled if set to true. - :paramtype enable_nfs_v3: bool - :keyword allow_cross_tenant_replication: Allow or disallow cross AAD tenant object replication. - Set this property to true for new or existing accounts only if object replication policies will - involve storage accounts in different AAD tenants. The default interpretation is false for new - accounts to follow best security practices by default. - :paramtype allow_cross_tenant_replication: bool - :keyword default_to_o_auth_authentication: A boolean flag which indicates whether the default - authentication is OAuth or not. The default interpretation is false for this property. - :paramtype default_to_o_auth_authentication: bool - :keyword immutable_storage_with_versioning: The property is immutable and can only be set to - true at the account creation time. When set to true, it enables object level immutability for - all the new containers in the account by default. - :paramtype immutable_storage_with_versioning: - ~azure.mgmt.storage.models.ImmutableStorageAccount - :keyword dns_endpoint_type: Allows you to specify the type of endpoint. Set this to - AzureDNSZone to create a large number of accounts in a single subscription, which creates - accounts in an Azure DNS Zone and the endpoint URL will have an alphanumeric DNS Zone - identifier. Known values are: "Standard" and "AzureDnsZone". - :paramtype dns_endpoint_type: str or ~azure.mgmt.storage.models.DnsEndpointType - :keyword geo_priority_replication_status: Status indicating whether Geo Priority Replication is - enabled for the account. - :paramtype geo_priority_replication_status: - ~azure.mgmt.storage.models.GeoPriorityReplicationStatus - """ - super().__init__(**kwargs) - self.sku = sku - self.kind = kind - self.location = location - self.extended_location = extended_location - self.zones = zones - self.placement = placement - self.tags = tags - self.identity = identity - self.allowed_copy_scope = allowed_copy_scope - self.public_network_access = public_network_access - self.sas_policy = sas_policy - self.key_policy = key_policy - self.custom_domain = custom_domain - self.encryption = encryption - self.network_rule_set = network_rule_set - self.access_tier = access_tier - self.azure_files_identity_based_authentication = azure_files_identity_based_authentication - self.enable_https_traffic_only = enable_https_traffic_only - self.is_sftp_enabled = is_sftp_enabled - self.is_local_user_enabled = is_local_user_enabled - self.enable_extended_groups = enable_extended_groups - self.is_hns_enabled = is_hns_enabled - self.large_file_shares_state = large_file_shares_state - self.routing_preference = routing_preference - self.dual_stack_endpoint_preference = dual_stack_endpoint_preference - self.allow_blob_public_access = allow_blob_public_access - self.minimum_tls_version = minimum_tls_version - self.allow_shared_key_access = allow_shared_key_access - self.enable_nfs_v3 = enable_nfs_v3 - self.allow_cross_tenant_replication = allow_cross_tenant_replication - self.default_to_o_auth_authentication = default_to_o_auth_authentication - self.immutable_storage_with_versioning = immutable_storage_with_versioning - self.dns_endpoint_type = dns_endpoint_type - self.geo_priority_replication_status = geo_priority_replication_status - - -class StorageAccountInternetEndpoints(_serialization.Model): - """The URIs that are used to perform a retrieval of a public blob, file, web or dfs object via a - internet routing endpoint. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar blob: Gets the blob endpoint. - :vartype blob: str - :ivar file: Gets the file endpoint. - :vartype file: str - :ivar web: Gets the web endpoint. - :vartype web: str - :ivar dfs: Gets the dfs endpoint. - :vartype dfs: str - """ - - _validation = { - "blob": {"readonly": True}, - "file": {"readonly": True}, - "web": {"readonly": True}, - "dfs": {"readonly": True}, - } - - _attribute_map = { - "blob": {"key": "blob", "type": "str"}, - "file": {"key": "file", "type": "str"}, - "web": {"key": "web", "type": "str"}, - "dfs": {"key": "dfs", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.blob: Optional[str] = None - self.file: Optional[str] = None - self.web: Optional[str] = None - self.dfs: Optional[str] = None - - -class StorageAccountIpv6Endpoints(_serialization.Model): - """The URIs that are used to perform a retrieval of a public blob, queue, table, web or dfs object - via an IPv6 endpoint. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar blob: Gets the blob endpoint. - :vartype blob: str - :ivar queue: Gets the queue endpoint. - :vartype queue: str - :ivar table: Gets the table endpoint. - :vartype table: str - :ivar file: Gets the file endpoint. - :vartype file: str - :ivar web: Gets the web endpoint. - :vartype web: str - :ivar dfs: Gets the dfs endpoint. - :vartype dfs: str - :ivar microsoft_endpoints: Gets the microsoft routing storage endpoints. - :vartype microsoft_endpoints: ~azure.mgmt.storage.models.StorageAccountMicrosoftEndpoints - :ivar internet_endpoints: Gets the internet routing storage endpoints. - :vartype internet_endpoints: ~azure.mgmt.storage.models.StorageAccountInternetEndpoints - """ - - _validation = { - "blob": {"readonly": True}, - "queue": {"readonly": True}, - "table": {"readonly": True}, - "file": {"readonly": True}, - "web": {"readonly": True}, - "dfs": {"readonly": True}, - } - - _attribute_map = { - "blob": {"key": "blob", "type": "str"}, - "queue": {"key": "queue", "type": "str"}, - "table": {"key": "table", "type": "str"}, - "file": {"key": "file", "type": "str"}, - "web": {"key": "web", "type": "str"}, - "dfs": {"key": "dfs", "type": "str"}, - "microsoft_endpoints": {"key": "microsoftEndpoints", "type": "StorageAccountMicrosoftEndpoints"}, - "internet_endpoints": {"key": "internetEndpoints", "type": "StorageAccountInternetEndpoints"}, - } - - def __init__( - self, - *, - microsoft_endpoints: Optional["_models.StorageAccountMicrosoftEndpoints"] = None, - internet_endpoints: Optional["_models.StorageAccountInternetEndpoints"] = None, - **kwargs: Any - ) -> None: - """ - :keyword microsoft_endpoints: Gets the microsoft routing storage endpoints. - :paramtype microsoft_endpoints: ~azure.mgmt.storage.models.StorageAccountMicrosoftEndpoints - :keyword internet_endpoints: Gets the internet routing storage endpoints. - :paramtype internet_endpoints: ~azure.mgmt.storage.models.StorageAccountInternetEndpoints - """ - super().__init__(**kwargs) - self.blob: Optional[str] = None - self.queue: Optional[str] = None - self.table: Optional[str] = None - self.file: Optional[str] = None - self.web: Optional[str] = None - self.dfs: Optional[str] = None - self.microsoft_endpoints = microsoft_endpoints - self.internet_endpoints = internet_endpoints - - -class StorageAccountKey(_serialization.Model): - """An access key for the storage account. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar key_name: Name of the key. - :vartype key_name: str - :ivar value: Base 64-encoded value of the key. - :vartype value: str - :ivar permissions: Permissions for the key -- read-only or full permissions. Known values are: - "Read" and "Full". - :vartype permissions: str or ~azure.mgmt.storage.models.KeyPermission - :ivar creation_time: Creation time of the key, in round trip date format. - :vartype creation_time: ~datetime.datetime - """ - - _validation = { - "key_name": {"readonly": True}, - "value": {"readonly": True}, - "permissions": {"readonly": True}, - "creation_time": {"readonly": True}, - } - - _attribute_map = { - "key_name": {"key": "keyName", "type": "str"}, - "value": {"key": "value", "type": "str"}, - "permissions": {"key": "permissions", "type": "str"}, - "creation_time": {"key": "creationTime", "type": "iso-8601"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.key_name: Optional[str] = None - self.value: Optional[str] = None - self.permissions: Optional[Union[str, "_models.KeyPermission"]] = None - self.creation_time: Optional[datetime.datetime] = None - - -class StorageAccountListKeysResult(_serialization.Model): - """The response from the ListKeys operation. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar keys: Gets the list of storage account keys and their properties for the specified - storage account. - :vartype keys: list[~azure.mgmt.storage.models.StorageAccountKey] - """ - - _validation = { - "keys": {"readonly": True}, - } - - _attribute_map = { - "keys": {"key": "keys", "type": "[StorageAccountKey]"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.keys: Optional[list["_models.StorageAccountKey"]] = None - - -class StorageAccountListResult(_serialization.Model): - """The response from the List Storage Accounts operation. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: Gets the list of storage accounts and their properties. - :vartype value: list[~azure.mgmt.storage.models.StorageAccount] - :ivar next_link: Request URL that can be used to query next page of storage accounts. Returned - when total number of requested storage accounts exceed maximum page size. - :vartype next_link: str - """ - - _validation = { - "value": {"readonly": True}, - "next_link": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[StorageAccount]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value: Optional[list["_models.StorageAccount"]] = None - self.next_link: Optional[str] = None - - -class StorageAccountMicrosoftEndpoints(_serialization.Model): - """The URIs that are used to perform a retrieval of a public blob, queue, table, web or dfs object - via a microsoft routing endpoint. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar blob: Gets the blob endpoint. - :vartype blob: str - :ivar queue: Gets the queue endpoint. - :vartype queue: str - :ivar table: Gets the table endpoint. - :vartype table: str - :ivar file: Gets the file endpoint. - :vartype file: str - :ivar web: Gets the web endpoint. - :vartype web: str - :ivar dfs: Gets the dfs endpoint. - :vartype dfs: str - """ - - _validation = { - "blob": {"readonly": True}, - "queue": {"readonly": True}, - "table": {"readonly": True}, - "file": {"readonly": True}, - "web": {"readonly": True}, - "dfs": {"readonly": True}, - } - - _attribute_map = { - "blob": {"key": "blob", "type": "str"}, - "queue": {"key": "queue", "type": "str"}, - "table": {"key": "table", "type": "str"}, - "file": {"key": "file", "type": "str"}, - "web": {"key": "web", "type": "str"}, - "dfs": {"key": "dfs", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.blob: Optional[str] = None - self.queue: Optional[str] = None - self.table: Optional[str] = None - self.file: Optional[str] = None - self.web: Optional[str] = None - self.dfs: Optional[str] = None - - -class StorageAccountMigration(_serialization.Model): - """The parameters or status associated with an ongoing or enqueued storage account migration in - order to update its current SKU or region. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to server. - - :ivar id: Migration Resource Id. - :vartype id: str - :ivar name: current value is 'default' for customer initiated migration. - :vartype name: str - :ivar type: SrpAccountMigrationType in ARM contract which is 'accountMigrations'. - :vartype type: str - :ivar target_sku_name: Target sku name for the account. Required. Known values are: - "Standard_LRS", "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", "Premium_ZRS", - "Standard_GZRS", "Standard_RAGZRS", "StandardV2_LRS", "StandardV2_GRS", "StandardV2_ZRS", - "StandardV2_GZRS", "PremiumV2_LRS", and "PremiumV2_ZRS". - :vartype target_sku_name: str or ~azure.mgmt.storage.models.SkuName - :ivar migration_status: Current status of migration. Known values are: "Invalid", - "SubmittedForConversion", "InProgress", "Complete", and "Failed". - :vartype migration_status: str or ~azure.mgmt.storage.models.MigrationStatus - :ivar migration_failed_reason: Error code for migration failure. - :vartype migration_failed_reason: str - :ivar migration_failed_detailed_reason: Reason for migration failure. - :vartype migration_failed_detailed_reason: str - """ - - _validation = { - "id": {"readonly": True}, - "target_sku_name": {"required": True}, - "migration_status": {"readonly": True}, - "migration_failed_reason": {"readonly": True}, - "migration_failed_detailed_reason": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "target_sku_name": {"key": "properties.targetSkuName", "type": "str"}, - "migration_status": {"key": "properties.migrationStatus", "type": "str"}, - "migration_failed_reason": {"key": "properties.migrationFailedReason", "type": "str"}, - "migration_failed_detailed_reason": {"key": "properties.migrationFailedDetailedReason", "type": "str"}, - } - - def __init__( - self, - *, - target_sku_name: Union[str, "_models.SkuName"], - name: Optional[str] = None, - type: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword name: current value is 'default' for customer initiated migration. - :paramtype name: str - :keyword type: SrpAccountMigrationType in ARM contract which is 'accountMigrations'. - :paramtype type: str - :keyword target_sku_name: Target sku name for the account. Required. Known values are: - "Standard_LRS", "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", "Premium_ZRS", - "Standard_GZRS", "Standard_RAGZRS", "StandardV2_LRS", "StandardV2_GRS", "StandardV2_ZRS", - "StandardV2_GZRS", "PremiumV2_LRS", and "PremiumV2_ZRS". - :paramtype target_sku_name: str or ~azure.mgmt.storage.models.SkuName - """ - super().__init__(**kwargs) - self.id: Optional[str] = None - self.name = name - self.type = type - self.target_sku_name = target_sku_name - self.migration_status: Optional[Union[str, "_models.MigrationStatus"]] = None - self.migration_failed_reason: Optional[str] = None - self.migration_failed_detailed_reason: Optional[str] = None - - -class StorageAccountRegenerateKeyParameters(_serialization.Model): - """The parameters used to regenerate the storage account key. - - All required parameters must be populated in order to send to server. - - :ivar key_name: The name of storage keys that want to be regenerated, possible values are key1, - key2, kerb1, kerb2. Required. - :vartype key_name: str - """ - - _validation = { - "key_name": {"required": True}, - } - - _attribute_map = { - "key_name": {"key": "keyName", "type": "str"}, - } - - def __init__(self, *, key_name: str, **kwargs: Any) -> None: - """ - :keyword key_name: The name of storage keys that want to be regenerated, possible values are - key1, key2, kerb1, kerb2. Required. - :paramtype key_name: str - """ - super().__init__(**kwargs) - self.key_name = key_name - - -class StorageAccountSkuConversionStatus(_serialization.Model): - """This defines the sku conversion status object for asynchronous sku conversions. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar sku_conversion_status: This property indicates the current sku conversion status. Known - values are: "InProgress", "Succeeded", and "Failed". - :vartype sku_conversion_status: str or ~azure.mgmt.storage.models.SkuConversionStatus - :ivar target_sku_name: This property represents the target sku name to which the account sku is - being converted asynchronously. Known values are: "Standard_LRS", "Standard_GRS", - "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", "Premium_ZRS", "Standard_GZRS", - "Standard_RAGZRS", "StandardV2_LRS", "StandardV2_GRS", "StandardV2_ZRS", "StandardV2_GZRS", - "PremiumV2_LRS", and "PremiumV2_ZRS". - :vartype target_sku_name: str or ~azure.mgmt.storage.models.SkuName - :ivar start_time: This property represents the sku conversion start time. - :vartype start_time: str - :ivar end_time: This property represents the sku conversion end time. - :vartype end_time: str - """ - - _validation = { - "sku_conversion_status": {"readonly": True}, - "start_time": {"readonly": True}, - "end_time": {"readonly": True}, - } - - _attribute_map = { - "sku_conversion_status": {"key": "skuConversionStatus", "type": "str"}, - "target_sku_name": {"key": "targetSkuName", "type": "str"}, - "start_time": {"key": "startTime", "type": "str"}, - "end_time": {"key": "endTime", "type": "str"}, - } - - def __init__(self, *, target_sku_name: Optional[Union[str, "_models.SkuName"]] = None, **kwargs: Any) -> None: - """ - :keyword target_sku_name: This property represents the target sku name to which the account sku - is being converted asynchronously. Known values are: "Standard_LRS", "Standard_GRS", - "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", "Premium_ZRS", "Standard_GZRS", - "Standard_RAGZRS", "StandardV2_LRS", "StandardV2_GRS", "StandardV2_ZRS", "StandardV2_GZRS", - "PremiumV2_LRS", and "PremiumV2_ZRS". - :paramtype target_sku_name: str or ~azure.mgmt.storage.models.SkuName - """ - super().__init__(**kwargs) - self.sku_conversion_status: Optional[Union[str, "_models.SkuConversionStatus"]] = None - self.target_sku_name = target_sku_name - self.start_time: Optional[str] = None - self.end_time: Optional[str] = None - - -class StorageAccountUpdateParameters(_serialization.Model): - """The parameters that can be provided when updating the storage account properties. - - :ivar sku: Gets or sets the SKU name. Note that the SKU name cannot be updated to Standard_ZRS, - Premium_LRS or Premium_ZRS, nor can accounts of those SKU names be updated to any other value. - :vartype sku: ~azure.mgmt.storage.models.Sku - :ivar tags: Gets or sets a list of key value pairs that describe the resource. These tags can - be used in viewing and grouping this resource (across resource groups). A maximum of 15 tags - can be provided for a resource. Each tag must have a key no greater in length than 128 - characters and a value no greater in length than 256 characters. - :vartype tags: dict[str, str] - :ivar identity: The identity of the resource. - :vartype identity: ~azure.mgmt.storage.models.Identity - :ivar kind: Optional. Indicates the type of storage account. Currently only StorageV2 value - supported by server. Known values are: "Storage", "StorageV2", "BlobStorage", "FileStorage", - and "BlockBlobStorage". - :vartype kind: str or ~azure.mgmt.storage.models.Kind - :ivar zones: Optional. Gets or sets the pinned logical availability zone for the storage - account. - :vartype zones: list[str] - :ivar placement: Optional. Gets or sets the zonal placement details for the storage account. - :vartype placement: ~azure.mgmt.storage.models.Placement - :ivar custom_domain: Custom domain assigned to the storage account by the user. Name is the - CNAME source. Only one custom domain is supported per storage account at this time. To clear - the existing custom domain, use an empty string for the custom domain name property. - :vartype custom_domain: ~azure.mgmt.storage.models.CustomDomain - :ivar encryption: Not applicable. Azure Storage encryption at rest is enabled by default for - all storage accounts and cannot be disabled. - :vartype encryption: ~azure.mgmt.storage.models.Encryption - :ivar sas_policy: SasPolicy assigned to the storage account. - :vartype sas_policy: ~azure.mgmt.storage.models.SasPolicy - :ivar key_policy: KeyPolicy assigned to the storage account. - :vartype key_policy: ~azure.mgmt.storage.models.KeyPolicy - :ivar access_tier: Required for storage accounts where kind = BlobStorage. The access tier is - used for billing. The 'Premium' access tier is the default value for premium block blobs - storage account type and it cannot be changed for the premium block blobs storage account type. - Known values are: "Hot", "Cool", "Premium", and "Cold". - :vartype access_tier: str or ~azure.mgmt.storage.models.AccessTier - :ivar azure_files_identity_based_authentication: Provides the identity based authentication - settings for Azure Files. - :vartype azure_files_identity_based_authentication: - ~azure.mgmt.storage.models.AzureFilesIdentityBasedAuthentication - :ivar enable_https_traffic_only: Allows https traffic only to storage service if sets to true. - :vartype enable_https_traffic_only: bool - :ivar is_sftp_enabled: Enables Secure File Transfer Protocol, if set to true. - :vartype is_sftp_enabled: bool - :ivar is_local_user_enabled: Enables local users feature, if set to true. - :vartype is_local_user_enabled: bool - :ivar enable_extended_groups: Enables extended group support with local users feature, if set - to true. - :vartype enable_extended_groups: bool - :ivar network_rule_set: Network rule set. - :vartype network_rule_set: ~azure.mgmt.storage.models.NetworkRuleSet - :ivar large_file_shares_state: Allow large file shares if sets to Enabled. It cannot be - disabled once it is enabled. Known values are: "Disabled" and "Enabled". - :vartype large_file_shares_state: str or ~azure.mgmt.storage.models.LargeFileSharesState - :ivar routing_preference: Maintains information about the network routing choice opted by the - user for data transfer. - :vartype routing_preference: ~azure.mgmt.storage.models.RoutingPreference - :ivar dual_stack_endpoint_preference: Maintains information about the Internet protocol opted - by the user. - :vartype dual_stack_endpoint_preference: ~azure.mgmt.storage.models.DualStackEndpointPreference - :ivar allow_blob_public_access: Allow or disallow public access to all blobs or containers in - the storage account. The default interpretation is false for this property. - :vartype allow_blob_public_access: bool - :ivar minimum_tls_version: Set the minimum TLS version to be permitted on requests to storage. - The default interpretation is TLS 1.0 for this property. Known values are: "TLS1_0", "TLS1_1", - "TLS1_2", and "TLS1_3". - :vartype minimum_tls_version: str or ~azure.mgmt.storage.models.MinimumTlsVersion - :ivar allow_shared_key_access: Indicates whether the storage account permits requests to be - authorized with the account access key via Shared Key. If false, then all requests, including - shared access signatures, must be authorized with Azure Active Directory (Azure AD). The - default value is null, which is equivalent to true. - :vartype allow_shared_key_access: bool - :ivar allow_cross_tenant_replication: Allow or disallow cross AAD tenant object replication. - Set this property to true for new or existing accounts only if object replication policies will - involve storage accounts in different AAD tenants. The default interpretation is false for new - accounts to follow best security practices by default. - :vartype allow_cross_tenant_replication: bool - :ivar default_to_o_auth_authentication: A boolean flag which indicates whether the default - authentication is OAuth or not. The default interpretation is false for this property. - :vartype default_to_o_auth_authentication: bool - :ivar public_network_access: Allow, disallow, or let Network Security Perimeter configuration - to evaluate public network access to Storage Account. Value is optional but if passed in, must - be 'Enabled', 'Disabled' or 'SecuredByPerimeter'. Known values are: "Enabled", "Disabled", and - "SecuredByPerimeter". - :vartype public_network_access: str or ~azure.mgmt.storage.models.PublicNetworkAccess - :ivar immutable_storage_with_versioning: The property is immutable and can only be set to true - at the account creation time. When set to true, it enables object level immutability for all - the containers in the account by default. - :vartype immutable_storage_with_versioning: ~azure.mgmt.storage.models.ImmutableStorageAccount - :ivar allowed_copy_scope: Restrict copy to and from Storage Accounts within an AAD tenant or - with Private Links to the same VNet. Known values are: "PrivateLink" and "AAD". - :vartype allowed_copy_scope: str or ~azure.mgmt.storage.models.AllowedCopyScope - :ivar dns_endpoint_type: Allows you to specify the type of endpoint. Set this to AzureDNSZone - to create a large number of accounts in a single subscription, which creates accounts in an - Azure DNS Zone and the endpoint URL will have an alphanumeric DNS Zone identifier. Known values - are: "Standard" and "AzureDnsZone". - :vartype dns_endpoint_type: str or ~azure.mgmt.storage.models.DnsEndpointType - :ivar geo_priority_replication_status: Status indicating whether Geo Priority Replication is - enabled for the account. - :vartype geo_priority_replication_status: - ~azure.mgmt.storage.models.GeoPriorityReplicationStatus - """ - - _attribute_map = { - "sku": {"key": "sku", "type": "Sku"}, - "tags": {"key": "tags", "type": "{str}"}, - "identity": {"key": "identity", "type": "Identity"}, - "kind": {"key": "kind", "type": "str"}, - "zones": {"key": "zones", "type": "[str]"}, - "placement": {"key": "placement", "type": "Placement"}, - "custom_domain": {"key": "properties.customDomain", "type": "CustomDomain"}, - "encryption": {"key": "properties.encryption", "type": "Encryption"}, - "sas_policy": {"key": "properties.sasPolicy", "type": "SasPolicy"}, - "key_policy": {"key": "properties.keyPolicy", "type": "KeyPolicy"}, - "access_tier": {"key": "properties.accessTier", "type": "str"}, - "azure_files_identity_based_authentication": { - "key": "properties.azureFilesIdentityBasedAuthentication", - "type": "AzureFilesIdentityBasedAuthentication", - }, - "enable_https_traffic_only": {"key": "properties.supportsHttpsTrafficOnly", "type": "bool"}, - "is_sftp_enabled": {"key": "properties.isSftpEnabled", "type": "bool"}, - "is_local_user_enabled": {"key": "properties.isLocalUserEnabled", "type": "bool"}, - "enable_extended_groups": {"key": "properties.enableExtendedGroups", "type": "bool"}, - "network_rule_set": {"key": "properties.networkAcls", "type": "NetworkRuleSet"}, - "large_file_shares_state": {"key": "properties.largeFileSharesState", "type": "str"}, - "routing_preference": {"key": "properties.routingPreference", "type": "RoutingPreference"}, - "dual_stack_endpoint_preference": { - "key": "properties.dualStackEndpointPreference", - "type": "DualStackEndpointPreference", - }, - "allow_blob_public_access": {"key": "properties.allowBlobPublicAccess", "type": "bool"}, - "minimum_tls_version": {"key": "properties.minimumTlsVersion", "type": "str"}, - "allow_shared_key_access": {"key": "properties.allowSharedKeyAccess", "type": "bool"}, - "allow_cross_tenant_replication": {"key": "properties.allowCrossTenantReplication", "type": "bool"}, - "default_to_o_auth_authentication": {"key": "properties.defaultToOAuthAuthentication", "type": "bool"}, - "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, - "immutable_storage_with_versioning": { - "key": "properties.immutableStorageWithVersioning", - "type": "ImmutableStorageAccount", - }, - "allowed_copy_scope": {"key": "properties.allowedCopyScope", "type": "str"}, - "dns_endpoint_type": {"key": "properties.dnsEndpointType", "type": "str"}, - "geo_priority_replication_status": { - "key": "properties.geoPriorityReplicationStatus", - "type": "GeoPriorityReplicationStatus", - }, - } - - def __init__( # pylint: disable=too-many-locals - self, - *, - sku: Optional["_models.Sku"] = None, - tags: Optional[dict[str, str]] = None, - identity: Optional["_models.Identity"] = None, - kind: Optional[Union[str, "_models.Kind"]] = None, - zones: Optional[list[str]] = None, - placement: Optional["_models.Placement"] = None, - custom_domain: Optional["_models.CustomDomain"] = None, - encryption: Optional["_models.Encryption"] = None, - sas_policy: Optional["_models.SasPolicy"] = None, - key_policy: Optional["_models.KeyPolicy"] = None, - access_tier: Optional[Union[str, "_models.AccessTier"]] = None, - azure_files_identity_based_authentication: Optional["_models.AzureFilesIdentityBasedAuthentication"] = None, - enable_https_traffic_only: Optional[bool] = None, - is_sftp_enabled: Optional[bool] = None, - is_local_user_enabled: Optional[bool] = None, - enable_extended_groups: Optional[bool] = None, - network_rule_set: Optional["_models.NetworkRuleSet"] = None, - large_file_shares_state: Optional[Union[str, "_models.LargeFileSharesState"]] = None, - routing_preference: Optional["_models.RoutingPreference"] = None, - dual_stack_endpoint_preference: Optional["_models.DualStackEndpointPreference"] = None, - allow_blob_public_access: Optional[bool] = None, - minimum_tls_version: Optional[Union[str, "_models.MinimumTlsVersion"]] = None, - allow_shared_key_access: Optional[bool] = None, - allow_cross_tenant_replication: Optional[bool] = None, - default_to_o_auth_authentication: Optional[bool] = None, - public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, - immutable_storage_with_versioning: Optional["_models.ImmutableStorageAccount"] = None, - allowed_copy_scope: Optional[Union[str, "_models.AllowedCopyScope"]] = None, - dns_endpoint_type: Optional[Union[str, "_models.DnsEndpointType"]] = None, - geo_priority_replication_status: Optional["_models.GeoPriorityReplicationStatus"] = None, - **kwargs: Any - ) -> None: - """ - :keyword sku: Gets or sets the SKU name. Note that the SKU name cannot be updated to - Standard_ZRS, Premium_LRS or Premium_ZRS, nor can accounts of those SKU names be updated to any - other value. - :paramtype sku: ~azure.mgmt.storage.models.Sku - :keyword tags: Gets or sets a list of key value pairs that describe the resource. These tags - can be used in viewing and grouping this resource (across resource groups). A maximum of 15 - tags can be provided for a resource. Each tag must have a key no greater in length than 128 - characters and a value no greater in length than 256 characters. - :paramtype tags: dict[str, str] - :keyword identity: The identity of the resource. - :paramtype identity: ~azure.mgmt.storage.models.Identity - :keyword kind: Optional. Indicates the type of storage account. Currently only StorageV2 value - supported by server. Known values are: "Storage", "StorageV2", "BlobStorage", "FileStorage", - and "BlockBlobStorage". - :paramtype kind: str or ~azure.mgmt.storage.models.Kind - :keyword zones: Optional. Gets or sets the pinned logical availability zone for the storage - account. - :paramtype zones: list[str] - :keyword placement: Optional. Gets or sets the zonal placement details for the storage account. - :paramtype placement: ~azure.mgmt.storage.models.Placement - :keyword custom_domain: Custom domain assigned to the storage account by the user. Name is the - CNAME source. Only one custom domain is supported per storage account at this time. To clear - the existing custom domain, use an empty string for the custom domain name property. - :paramtype custom_domain: ~azure.mgmt.storage.models.CustomDomain - :keyword encryption: Not applicable. Azure Storage encryption at rest is enabled by default for - all storage accounts and cannot be disabled. - :paramtype encryption: ~azure.mgmt.storage.models.Encryption - :keyword sas_policy: SasPolicy assigned to the storage account. - :paramtype sas_policy: ~azure.mgmt.storage.models.SasPolicy - :keyword key_policy: KeyPolicy assigned to the storage account. - :paramtype key_policy: ~azure.mgmt.storage.models.KeyPolicy - :keyword access_tier: Required for storage accounts where kind = BlobStorage. The access tier - is used for billing. The 'Premium' access tier is the default value for premium block blobs - storage account type and it cannot be changed for the premium block blobs storage account type. - Known values are: "Hot", "Cool", "Premium", and "Cold". - :paramtype access_tier: str or ~azure.mgmt.storage.models.AccessTier - :keyword azure_files_identity_based_authentication: Provides the identity based authentication - settings for Azure Files. - :paramtype azure_files_identity_based_authentication: - ~azure.mgmt.storage.models.AzureFilesIdentityBasedAuthentication - :keyword enable_https_traffic_only: Allows https traffic only to storage service if sets to - true. - :paramtype enable_https_traffic_only: bool - :keyword is_sftp_enabled: Enables Secure File Transfer Protocol, if set to true. - :paramtype is_sftp_enabled: bool - :keyword is_local_user_enabled: Enables local users feature, if set to true. - :paramtype is_local_user_enabled: bool - :keyword enable_extended_groups: Enables extended group support with local users feature, if - set to true. - :paramtype enable_extended_groups: bool - :keyword network_rule_set: Network rule set. - :paramtype network_rule_set: ~azure.mgmt.storage.models.NetworkRuleSet - :keyword large_file_shares_state: Allow large file shares if sets to Enabled. It cannot be - disabled once it is enabled. Known values are: "Disabled" and "Enabled". - :paramtype large_file_shares_state: str or ~azure.mgmt.storage.models.LargeFileSharesState - :keyword routing_preference: Maintains information about the network routing choice opted by - the user for data transfer. - :paramtype routing_preference: ~azure.mgmt.storage.models.RoutingPreference - :keyword dual_stack_endpoint_preference: Maintains information about the Internet protocol - opted by the user. - :paramtype dual_stack_endpoint_preference: - ~azure.mgmt.storage.models.DualStackEndpointPreference - :keyword allow_blob_public_access: Allow or disallow public access to all blobs or containers - in the storage account. The default interpretation is false for this property. - :paramtype allow_blob_public_access: bool - :keyword minimum_tls_version: Set the minimum TLS version to be permitted on requests to - storage. The default interpretation is TLS 1.0 for this property. Known values are: "TLS1_0", - "TLS1_1", "TLS1_2", and "TLS1_3". - :paramtype minimum_tls_version: str or ~azure.mgmt.storage.models.MinimumTlsVersion - :keyword allow_shared_key_access: Indicates whether the storage account permits requests to be - authorized with the account access key via Shared Key. If false, then all requests, including - shared access signatures, must be authorized with Azure Active Directory (Azure AD). The - default value is null, which is equivalent to true. - :paramtype allow_shared_key_access: bool - :keyword allow_cross_tenant_replication: Allow or disallow cross AAD tenant object replication. - Set this property to true for new or existing accounts only if object replication policies will - involve storage accounts in different AAD tenants. The default interpretation is false for new - accounts to follow best security practices by default. - :paramtype allow_cross_tenant_replication: bool - :keyword default_to_o_auth_authentication: A boolean flag which indicates whether the default - authentication is OAuth or not. The default interpretation is false for this property. - :paramtype default_to_o_auth_authentication: bool - :keyword public_network_access: Allow, disallow, or let Network Security Perimeter - configuration to evaluate public network access to Storage Account. Value is optional but if - passed in, must be 'Enabled', 'Disabled' or 'SecuredByPerimeter'. Known values are: "Enabled", - "Disabled", and "SecuredByPerimeter". - :paramtype public_network_access: str or ~azure.mgmt.storage.models.PublicNetworkAccess - :keyword immutable_storage_with_versioning: The property is immutable and can only be set to - true at the account creation time. When set to true, it enables object level immutability for - all the containers in the account by default. - :paramtype immutable_storage_with_versioning: - ~azure.mgmt.storage.models.ImmutableStorageAccount - :keyword allowed_copy_scope: Restrict copy to and from Storage Accounts within an AAD tenant or - with Private Links to the same VNet. Known values are: "PrivateLink" and "AAD". - :paramtype allowed_copy_scope: str or ~azure.mgmt.storage.models.AllowedCopyScope - :keyword dns_endpoint_type: Allows you to specify the type of endpoint. Set this to - AzureDNSZone to create a large number of accounts in a single subscription, which creates - accounts in an Azure DNS Zone and the endpoint URL will have an alphanumeric DNS Zone - identifier. Known values are: "Standard" and "AzureDnsZone". - :paramtype dns_endpoint_type: str or ~azure.mgmt.storage.models.DnsEndpointType - :keyword geo_priority_replication_status: Status indicating whether Geo Priority Replication is - enabled for the account. - :paramtype geo_priority_replication_status: - ~azure.mgmt.storage.models.GeoPriorityReplicationStatus - """ - super().__init__(**kwargs) - self.sku = sku - self.tags = tags - self.identity = identity - self.kind = kind - self.zones = zones - self.placement = placement - self.custom_domain = custom_domain - self.encryption = encryption - self.sas_policy = sas_policy - self.key_policy = key_policy - self.access_tier = access_tier - self.azure_files_identity_based_authentication = azure_files_identity_based_authentication - self.enable_https_traffic_only = enable_https_traffic_only - self.is_sftp_enabled = is_sftp_enabled - self.is_local_user_enabled = is_local_user_enabled - self.enable_extended_groups = enable_extended_groups - self.network_rule_set = network_rule_set - self.large_file_shares_state = large_file_shares_state - self.routing_preference = routing_preference - self.dual_stack_endpoint_preference = dual_stack_endpoint_preference - self.allow_blob_public_access = allow_blob_public_access - self.minimum_tls_version = minimum_tls_version - self.allow_shared_key_access = allow_shared_key_access - self.allow_cross_tenant_replication = allow_cross_tenant_replication - self.default_to_o_auth_authentication = default_to_o_auth_authentication - self.public_network_access = public_network_access - self.immutable_storage_with_versioning = immutable_storage_with_versioning - self.allowed_copy_scope = allowed_copy_scope - self.dns_endpoint_type = dns_endpoint_type - self.geo_priority_replication_status = geo_priority_replication_status - - -class StorageQueue(Resource): - """StorageQueue. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar metadata: A name-value pair that represents queue metadata. - :vartype metadata: dict[str, str] - :ivar approximate_message_count: Integer indicating an approximate number of messages in the - queue. This number is not lower than the actual number of messages in the queue, but could be - higher. - :vartype approximate_message_count: int - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "approximate_message_count": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "metadata": {"key": "properties.metadata", "type": "{str}"}, - "approximate_message_count": {"key": "properties.approximateMessageCount", "type": "int"}, - } - - def __init__(self, *, metadata: Optional[dict[str, str]] = None, **kwargs: Any) -> None: - """ - :keyword metadata: A name-value pair that represents queue metadata. - :paramtype metadata: dict[str, str] - """ - super().__init__(**kwargs) - self.metadata = metadata - self.approximate_message_count: Optional[int] = None - - -class StorageSkuListResult(_serialization.Model): - """The response from the List Storage SKUs operation. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: Get the list result of storage SKUs and their properties. - :vartype value: list[~azure.mgmt.storage.models.SkuInformation] - """ - - _validation = { - "value": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[SkuInformation]"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value: Optional[list["_models.SkuInformation"]] = None - - -class StorageTaskAssignment(Resource): - """The storage task assignment. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to server. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar properties: Properties of the storage task assignment. Required. - :vartype properties: ~azure.mgmt.storage.models.StorageTaskAssignmentProperties - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "properties": {"required": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "properties": {"key": "properties", "type": "StorageTaskAssignmentProperties"}, - } - - def __init__(self, *, properties: "_models.StorageTaskAssignmentProperties", **kwargs: Any) -> None: - """ - :keyword properties: Properties of the storage task assignment. Required. - :paramtype properties: ~azure.mgmt.storage.models.StorageTaskAssignmentProperties - """ - super().__init__(**kwargs) - self.properties = properties - - -class StorageTaskAssignmentExecutionContext(_serialization.Model): - """Execution context of the storage task assignment. - - All required parameters must be populated in order to send to server. - - :ivar target: Execution target of the storage task assignment. - :vartype target: ~azure.mgmt.storage.models.ExecutionTarget - :ivar trigger: Execution trigger of the storage task assignment. Required. - :vartype trigger: ~azure.mgmt.storage.models.ExecutionTrigger - """ - - _validation = { - "trigger": {"required": True}, - } - - _attribute_map = { - "target": {"key": "target", "type": "ExecutionTarget"}, - "trigger": {"key": "trigger", "type": "ExecutionTrigger"}, - } - - def __init__( - self, *, trigger: "_models.ExecutionTrigger", target: Optional["_models.ExecutionTarget"] = None, **kwargs: Any - ) -> None: - """ - :keyword target: Execution target of the storage task assignment. - :paramtype target: ~azure.mgmt.storage.models.ExecutionTarget - :keyword trigger: Execution trigger of the storage task assignment. Required. - :paramtype trigger: ~azure.mgmt.storage.models.ExecutionTrigger - """ - super().__init__(**kwargs) - self.target = target - self.trigger = trigger - - -class StorageTaskAssignmentProperties(_serialization.Model): - """Properties of the storage task assignment. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to server. - - :ivar task_id: Id of the corresponding storage task. Required. - :vartype task_id: str - :ivar enabled: Whether the storage task assignment is enabled or not. Required. - :vartype enabled: bool - :ivar description: Text that describes the purpose of the storage task assignment. Required. - :vartype description: str - :ivar execution_context: The storage task assignment execution context. Required. - :vartype execution_context: ~azure.mgmt.storage.models.StorageTaskAssignmentExecutionContext - :ivar report: The storage task assignment report. Required. - :vartype report: ~azure.mgmt.storage.models.StorageTaskAssignmentReport - :ivar provisioning_state: Represents the provisioning state of the storage task assignment. - Known values are: "Creating", "ResolvingDNS", "Succeeded", "ValidateSubscriptionQuotaBegin", - "ValidateSubscriptionQuotaEnd", "Accepted", "Deleting", "Canceled", and "Failed". - :vartype provisioning_state: str or ~azure.mgmt.storage.models.ProvisioningState - :ivar run_status: Run status of storage task assignment. - :vartype run_status: ~azure.mgmt.storage.models.StorageTaskReportProperties - """ - - _validation = { - "task_id": {"required": True}, - "enabled": {"required": True}, - "description": {"required": True}, - "execution_context": {"required": True}, - "report": {"required": True}, - "provisioning_state": {"readonly": True}, - } - - _attribute_map = { - "task_id": {"key": "taskId", "type": "str"}, - "enabled": {"key": "enabled", "type": "bool"}, - "description": {"key": "description", "type": "str"}, - "execution_context": {"key": "executionContext", "type": "StorageTaskAssignmentExecutionContext"}, - "report": {"key": "report", "type": "StorageTaskAssignmentReport"}, - "provisioning_state": {"key": "provisioningState", "type": "str"}, - "run_status": {"key": "runStatus", "type": "StorageTaskReportProperties"}, - } - - def __init__( - self, - *, - task_id: str, - enabled: bool, - description: str, - execution_context: "_models.StorageTaskAssignmentExecutionContext", - report: "_models.StorageTaskAssignmentReport", - run_status: Optional["_models.StorageTaskReportProperties"] = None, - **kwargs: Any - ) -> None: - """ - :keyword task_id: Id of the corresponding storage task. Required. - :paramtype task_id: str - :keyword enabled: Whether the storage task assignment is enabled or not. Required. - :paramtype enabled: bool - :keyword description: Text that describes the purpose of the storage task assignment. Required. - :paramtype description: str - :keyword execution_context: The storage task assignment execution context. Required. - :paramtype execution_context: ~azure.mgmt.storage.models.StorageTaskAssignmentExecutionContext - :keyword report: The storage task assignment report. Required. - :paramtype report: ~azure.mgmt.storage.models.StorageTaskAssignmentReport - :keyword run_status: Run status of storage task assignment. - :paramtype run_status: ~azure.mgmt.storage.models.StorageTaskReportProperties - """ - super().__init__(**kwargs) - self.task_id = task_id - self.enabled = enabled - self.description = description - self.execution_context = execution_context - self.report = report - self.provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None - self.run_status = run_status - - -class StorageTaskAssignmentReport(_serialization.Model): - """The storage task assignment report. - - All required parameters must be populated in order to send to server. - - :ivar prefix: The container prefix for the location of storage task assignment report. - Required. - :vartype prefix: str - """ - - _validation = { - "prefix": {"required": True}, - } - - _attribute_map = { - "prefix": {"key": "prefix", "type": "str"}, - } - - def __init__(self, *, prefix: str, **kwargs: Any) -> None: - """ - :keyword prefix: The container prefix for the location of storage task assignment report. - Required. - :paramtype prefix: str - """ - super().__init__(**kwargs) - self.prefix = prefix - - -class StorageTaskAssignmentsList(_serialization.Model): - """List of storage task assignments for the storage account. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: Gets the list of storage task assignments and their properties. - :vartype value: list[~azure.mgmt.storage.models.StorageTaskAssignment] - :ivar next_link: Request URL that can be used to query next page of storage task assignments. - Returned when total number of requested storage task assignments exceed maximum page size. - :vartype next_link: str - """ - - _validation = { - "value": {"readonly": True}, - "next_link": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[StorageTaskAssignment]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value: Optional[list["_models.StorageTaskAssignment"]] = None - self.next_link: Optional[str] = None - - -class StorageTaskAssignmentUpdateExecutionContext(_serialization.Model): # pylint: disable=name-too-long - """Execution context of the storage task assignment update. - - :ivar target: Execution target of the storage task assignment. - :vartype target: ~azure.mgmt.storage.models.ExecutionTarget - :ivar trigger: Execution trigger of the storage task assignment. - :vartype trigger: ~azure.mgmt.storage.models.ExecutionTriggerUpdate - """ - - _attribute_map = { - "target": {"key": "target", "type": "ExecutionTarget"}, - "trigger": {"key": "trigger", "type": "ExecutionTriggerUpdate"}, - } - - def __init__( - self, - *, - target: Optional["_models.ExecutionTarget"] = None, - trigger: Optional["_models.ExecutionTriggerUpdate"] = None, - **kwargs: Any - ) -> None: - """ - :keyword target: Execution target of the storage task assignment. - :paramtype target: ~azure.mgmt.storage.models.ExecutionTarget - :keyword trigger: Execution trigger of the storage task assignment. - :paramtype trigger: ~azure.mgmt.storage.models.ExecutionTriggerUpdate - """ - super().__init__(**kwargs) - self.target = target - self.trigger = trigger - - -class StorageTaskAssignmentUpdateParameters(_serialization.Model): - """Parameters of the storage task assignment update request. - - :ivar properties: Properties of the storage task assignment. - :vartype properties: ~azure.mgmt.storage.models.StorageTaskAssignmentUpdateProperties - """ - - _attribute_map = { - "properties": {"key": "properties", "type": "StorageTaskAssignmentUpdateProperties"}, - } - - def __init__( - self, *, properties: Optional["_models.StorageTaskAssignmentUpdateProperties"] = None, **kwargs: Any - ) -> None: - """ - :keyword properties: Properties of the storage task assignment. - :paramtype properties: ~azure.mgmt.storage.models.StorageTaskAssignmentUpdateProperties - """ - super().__init__(**kwargs) - self.properties = properties - - -class StorageTaskAssignmentUpdateProperties(_serialization.Model): - """Properties of the storage task update assignment. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar task_id: Id of the corresponding storage task. - :vartype task_id: str - :ivar enabled: Whether the storage task assignment is enabled or not. - :vartype enabled: bool - :ivar description: Text that describes the purpose of the storage task assignment. - :vartype description: str - :ivar execution_context: The storage task assignment execution context. - :vartype execution_context: - ~azure.mgmt.storage.models.StorageTaskAssignmentUpdateExecutionContext - :ivar report: The storage task assignment report. - :vartype report: ~azure.mgmt.storage.models.StorageTaskAssignmentUpdateReport - :ivar provisioning_state: Represents the provisioning state of the storage task assignment. - Known values are: "Creating", "ResolvingDNS", "Succeeded", "ValidateSubscriptionQuotaBegin", - "ValidateSubscriptionQuotaEnd", "Accepted", "Deleting", "Canceled", and "Failed". - :vartype provisioning_state: str or ~azure.mgmt.storage.models.ProvisioningState - :ivar run_status: Run status of storage task assignment. - :vartype run_status: ~azure.mgmt.storage.models.StorageTaskReportProperties - """ - - _validation = { - "task_id": {"readonly": True}, - "provisioning_state": {"readonly": True}, - } - - _attribute_map = { - "task_id": {"key": "taskId", "type": "str"}, - "enabled": {"key": "enabled", "type": "bool"}, - "description": {"key": "description", "type": "str"}, - "execution_context": {"key": "executionContext", "type": "StorageTaskAssignmentUpdateExecutionContext"}, - "report": {"key": "report", "type": "StorageTaskAssignmentUpdateReport"}, - "provisioning_state": {"key": "provisioningState", "type": "str"}, - "run_status": {"key": "runStatus", "type": "StorageTaskReportProperties"}, - } - - def __init__( - self, - *, - enabled: Optional[bool] = None, - description: Optional[str] = None, - execution_context: Optional["_models.StorageTaskAssignmentUpdateExecutionContext"] = None, - report: Optional["_models.StorageTaskAssignmentUpdateReport"] = None, - run_status: Optional["_models.StorageTaskReportProperties"] = None, - **kwargs: Any - ) -> None: - """ - :keyword enabled: Whether the storage task assignment is enabled or not. - :paramtype enabled: bool - :keyword description: Text that describes the purpose of the storage task assignment. - :paramtype description: str - :keyword execution_context: The storage task assignment execution context. - :paramtype execution_context: - ~azure.mgmt.storage.models.StorageTaskAssignmentUpdateExecutionContext - :keyword report: The storage task assignment report. - :paramtype report: ~azure.mgmt.storage.models.StorageTaskAssignmentUpdateReport - :keyword run_status: Run status of storage task assignment. - :paramtype run_status: ~azure.mgmt.storage.models.StorageTaskReportProperties - """ - super().__init__(**kwargs) - self.task_id: Optional[str] = None - self.enabled = enabled - self.description = description - self.execution_context = execution_context - self.report = report - self.provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None - self.run_status = run_status - - -class StorageTaskAssignmentUpdateReport(_serialization.Model): - """The storage task assignment report. - - :ivar prefix: The prefix of the storage task assignment report. - :vartype prefix: str - """ - - _attribute_map = { - "prefix": {"key": "prefix", "type": "str"}, - } - - def __init__(self, *, prefix: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword prefix: The prefix of the storage task assignment report. - :paramtype prefix: str - """ - super().__init__(**kwargs) - self.prefix = prefix - - -class StorageTaskReportInstance(ProxyResource): - """Storage Tasks run report instance. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar properties: Storage task execution report for a run instance. - :vartype properties: ~azure.mgmt.storage.models.StorageTaskReportProperties - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "properties": {"key": "properties", "type": "StorageTaskReportProperties"}, - } - - def __init__(self, *, properties: Optional["_models.StorageTaskReportProperties"] = None, **kwargs: Any) -> None: - """ - :keyword properties: Storage task execution report for a run instance. - :paramtype properties: ~azure.mgmt.storage.models.StorageTaskReportProperties - """ - super().__init__(**kwargs) - self.properties = properties - - -class StorageTaskReportProperties(_serialization.Model): - """Storage task execution report for a run instance. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar task_assignment_id: Represents the Storage Task Assignment Id associated with the storage - task that provided an execution context. - :vartype task_assignment_id: str - :ivar storage_account_id: Represents the Storage Account Id where the storage task definition - was applied and executed. - :vartype storage_account_id: str - :ivar start_time: Start time of the run instance. Filter options such as startTime gt - '2023-06-26T20:51:24.4494016Z' and other comparison operators can be used as described for - DateTime properties in - https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators. - :vartype start_time: str - :ivar finish_time: End time of the run instance. Filter options such as startTime gt - '2023-06-26T20:51:24.4494016Z' and other comparison operators can be used as described for - DateTime properties in - https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators. - :vartype finish_time: str - :ivar objects_targeted_count: Total number of objects that meet the condition as defined in the - storage task assignment execution context. Filter options such as objectsTargetedCount gt 50 - and other comparison operators can be used as described for Numerical properties in - https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators. - :vartype objects_targeted_count: str - :ivar objects_operated_on_count: Total number of objects that meet the storage tasks condition - and were operated upon. Filter options such as objectsOperatedOnCount ge 100 and other - comparison operators can be used as described for Numerical properties in - https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators. - :vartype objects_operated_on_count: str - :ivar object_failed_count: Total number of objects where task operation failed when was - attempted. Filter options such as objectFailedCount eq 0 and other comparison operators can be - used as described for Numerical properties in - https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators. - :vartype object_failed_count: str - :ivar objects_succeeded_count: Total number of objects where task operation succeeded when was - attempted.Filter options such as objectsSucceededCount gt 150 and other comparison operators - can be used as described for Numerical properties in - https://learn.microsoft.com/rest/api/storageservices/querying-tables-and-entities#supported-comparison-operators. - :vartype objects_succeeded_count: str - :ivar run_status_error: Well known Azure Storage error code that represents the error - encountered during execution of the run instance. - :vartype run_status_error: str - :ivar run_status_enum: Represents the status of the execution. Known values are: "InProgress" - and "Finished". - :vartype run_status_enum: str or ~azure.mgmt.storage.models.RunStatusEnum - :ivar summary_report_path: Full path to the verbose report stored in the reporting container as - specified in the assignment execution context for the storage account. - :vartype summary_report_path: str - :ivar task_id: Storage Task Arm Id. - :vartype task_id: str - :ivar task_version: Storage Task Version. - :vartype task_version: str - :ivar run_result: Represents the overall result of the execution for the run instance. Known - values are: "Succeeded" and "Failed". - :vartype run_result: str or ~azure.mgmt.storage.models.RunResult - """ - - _validation = { - "task_assignment_id": {"readonly": True}, - "storage_account_id": {"readonly": True}, - "start_time": {"readonly": True}, - "finish_time": {"readonly": True}, - "objects_targeted_count": {"readonly": True}, - "objects_operated_on_count": {"readonly": True}, - "object_failed_count": {"readonly": True}, - "objects_succeeded_count": {"readonly": True}, - "run_status_error": {"readonly": True}, - "run_status_enum": {"readonly": True}, - "summary_report_path": {"readonly": True}, - "task_id": {"readonly": True}, - "task_version": {"readonly": True}, - "run_result": {"readonly": True}, - } - - _attribute_map = { - "task_assignment_id": {"key": "taskAssignmentId", "type": "str"}, - "storage_account_id": {"key": "storageAccountId", "type": "str"}, - "start_time": {"key": "startTime", "type": "str"}, - "finish_time": {"key": "finishTime", "type": "str"}, - "objects_targeted_count": {"key": "objectsTargetedCount", "type": "str"}, - "objects_operated_on_count": {"key": "objectsOperatedOnCount", "type": "str"}, - "object_failed_count": {"key": "objectFailedCount", "type": "str"}, - "objects_succeeded_count": {"key": "objectsSucceededCount", "type": "str"}, - "run_status_error": {"key": "runStatusError", "type": "str"}, - "run_status_enum": {"key": "runStatusEnum", "type": "str"}, - "summary_report_path": {"key": "summaryReportPath", "type": "str"}, - "task_id": {"key": "taskId", "type": "str"}, - "task_version": {"key": "taskVersion", "type": "str"}, - "run_result": {"key": "runResult", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.task_assignment_id: Optional[str] = None - self.storage_account_id: Optional[str] = None - self.start_time: Optional[str] = None - self.finish_time: Optional[str] = None - self.objects_targeted_count: Optional[str] = None - self.objects_operated_on_count: Optional[str] = None - self.object_failed_count: Optional[str] = None - self.objects_succeeded_count: Optional[str] = None - self.run_status_error: Optional[str] = None - self.run_status_enum: Optional[Union[str, "_models.RunStatusEnum"]] = None - self.summary_report_path: Optional[str] = None - self.task_id: Optional[str] = None - self.task_version: Optional[str] = None - self.run_result: Optional[Union[str, "_models.RunResult"]] = None - - -class StorageTaskReportSummary(_serialization.Model): - """Fetch Storage Tasks Run Summary. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: Gets storage tasks run result summary. - :vartype value: list[~azure.mgmt.storage.models.StorageTaskReportInstance] - :ivar next_link: Request URL that can be used to query next page of storage task run results - summary. Returned when the number of run instances and summary reports exceed maximum page - size. - :vartype next_link: str - """ - - _validation = { - "value": {"readonly": True}, - "next_link": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[StorageTaskReportInstance]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value: Optional[list["_models.StorageTaskReportInstance"]] = None - self.next_link: Optional[str] = None - - -class SystemData(_serialization.Model): - """Metadata pertaining to creation and last modification of the resource. - - :ivar created_by: The identity that created the resource. - :vartype created_by: str - :ivar created_by_type: The type of identity that created the resource. Known values are: - "User", "Application", "ManagedIdentity", and "Key". - :vartype created_by_type: str or ~azure.mgmt.storage.models.CreatedByType - :ivar created_at: The timestamp of resource creation (UTC). - :vartype created_at: ~datetime.datetime - :ivar last_modified_by: The identity that last modified the resource. - :vartype last_modified_by: str - :ivar last_modified_by_type: The type of identity that last modified the resource. Known values - are: "User", "Application", "ManagedIdentity", and "Key". - :vartype last_modified_by_type: str or ~azure.mgmt.storage.models.CreatedByType - :ivar last_modified_at: The timestamp of resource last modification (UTC). - :vartype last_modified_at: ~datetime.datetime - """ - - _attribute_map = { - "created_by": {"key": "createdBy", "type": "str"}, - "created_by_type": {"key": "createdByType", "type": "str"}, - "created_at": {"key": "createdAt", "type": "iso-8601"}, - "last_modified_by": {"key": "lastModifiedBy", "type": "str"}, - "last_modified_by_type": {"key": "lastModifiedByType", "type": "str"}, - "last_modified_at": {"key": "lastModifiedAt", "type": "iso-8601"}, - } - - def __init__( - self, - *, - created_by: Optional[str] = None, - created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, - created_at: Optional[datetime.datetime] = None, - last_modified_by: Optional[str] = None, - last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, - last_modified_at: Optional[datetime.datetime] = None, - **kwargs: Any - ) -> None: - """ - :keyword created_by: The identity that created the resource. - :paramtype created_by: str - :keyword created_by_type: The type of identity that created the resource. Known values are: - "User", "Application", "ManagedIdentity", and "Key". - :paramtype created_by_type: str or ~azure.mgmt.storage.models.CreatedByType - :keyword created_at: The timestamp of resource creation (UTC). - :paramtype created_at: ~datetime.datetime - :keyword last_modified_by: The identity that last modified the resource. - :paramtype last_modified_by: str - :keyword last_modified_by_type: The type of identity that last modified the resource. Known - values are: "User", "Application", "ManagedIdentity", and "Key". - :paramtype last_modified_by_type: str or ~azure.mgmt.storage.models.CreatedByType - :keyword last_modified_at: The timestamp of resource last modification (UTC). - :paramtype last_modified_at: ~datetime.datetime - """ - super().__init__(**kwargs) - self.created_by = created_by - self.created_by_type = created_by_type - self.created_at = created_at - self.last_modified_by = last_modified_by - self.last_modified_by_type = last_modified_by_type - self.last_modified_at = last_modified_at - - -class Table(Resource): - """Properties of the table, including Id, resource name, resource type. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar table_name: Table name under the specified account. - :vartype table_name: str - :ivar signed_identifiers: List of stored access policies specified on the table. - :vartype signed_identifiers: list[~azure.mgmt.storage.models.TableSignedIdentifier] - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "table_name": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "table_name": {"key": "properties.tableName", "type": "str"}, - "signed_identifiers": {"key": "properties.signedIdentifiers", "type": "[TableSignedIdentifier]"}, - } - - def __init__( - self, *, signed_identifiers: Optional[list["_models.TableSignedIdentifier"]] = None, **kwargs: Any - ) -> None: - """ - :keyword signed_identifiers: List of stored access policies specified on the table. - :paramtype signed_identifiers: list[~azure.mgmt.storage.models.TableSignedIdentifier] - """ - super().__init__(**kwargs) - self.table_name: Optional[str] = None - self.signed_identifiers = signed_identifiers - - -class TableAccessPolicy(_serialization.Model): - """Table Access Policy Properties Object. - - All required parameters must be populated in order to send to server. - - :ivar start_time: Start time of the access policy. - :vartype start_time: ~datetime.datetime - :ivar expiry_time: Expiry time of the access policy. - :vartype expiry_time: ~datetime.datetime - :ivar permission: Required. List of abbreviated permissions. Supported permission values - include 'r','a','u','d'. Required. - :vartype permission: str - """ - - _validation = { - "permission": {"required": True}, - } - - _attribute_map = { - "start_time": {"key": "startTime", "type": "iso-8601"}, - "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, - "permission": {"key": "permission", "type": "str"}, - } - - def __init__( - self, - *, - permission: str, - start_time: Optional[datetime.datetime] = None, - expiry_time: Optional[datetime.datetime] = None, - **kwargs: Any - ) -> None: - """ - :keyword start_time: Start time of the access policy. - :paramtype start_time: ~datetime.datetime - :keyword expiry_time: Expiry time of the access policy. - :paramtype expiry_time: ~datetime.datetime - :keyword permission: Required. List of abbreviated permissions. Supported permission values - include 'r','a','u','d'. Required. - :paramtype permission: str - """ - super().__init__(**kwargs) - self.start_time = start_time - self.expiry_time = expiry_time - self.permission = permission - - -class TableServiceProperties(Resource): - """The properties of a storage account’s Table service. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar cors: Specifies CORS rules for the Table service. You can include up to five CorsRule - elements in the request. If no CorsRule elements are included in the request body, all CORS - rules will be deleted, and CORS will be disabled for the Table service. - :vartype cors: ~azure.mgmt.storage.models.CorsRules - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "cors": {"key": "properties.cors", "type": "CorsRules"}, - } - - def __init__(self, *, cors: Optional["_models.CorsRules"] = None, **kwargs: Any) -> None: - """ - :keyword cors: Specifies CORS rules for the Table service. You can include up to five CorsRule - elements in the request. If no CorsRule elements are included in the request body, all CORS - rules will be deleted, and CORS will be disabled for the Table service. - :paramtype cors: ~azure.mgmt.storage.models.CorsRules - """ - super().__init__(**kwargs) - self.cors = cors - - -class TableSignedIdentifier(_serialization.Model): - """Object to set Table Access Policy. - - All required parameters must be populated in order to send to server. - - :ivar id: unique-64-character-value of the stored access policy. Required. - :vartype id: str - :ivar access_policy: Access policy. - :vartype access_policy: ~azure.mgmt.storage.models.TableAccessPolicy - """ - - _validation = { - "id": {"required": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "access_policy": {"key": "accessPolicy", "type": "TableAccessPolicy"}, - } - - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - access_policy: Optional["_models.TableAccessPolicy"] = None, - **kwargs: Any - ) -> None: - """ - :keyword id: unique-64-character-value of the stored access policy. Required. - :paramtype id: str - :keyword access_policy: Access policy. - :paramtype access_policy: ~azure.mgmt.storage.models.TableAccessPolicy - """ - super().__init__(**kwargs) - self.id = id - self.access_policy = access_policy - - -class TagFilter(_serialization.Model): - """Blob index tag based filtering for blob objects. - - All required parameters must be populated in order to send to server. - - :ivar name: This is the filter tag name, it can have 1 - 128 characters. Required. - :vartype name: str - :ivar op: This is the comparison operator which is used for object comparison and filtering. - Only == (equality operator) is currently supported. Required. - :vartype op: str - :ivar value: This is the filter tag value field used for tag based filtering, it can have 0 - - 256 characters. Required. - :vartype value: str - """ - - _validation = { - "name": {"required": True, "max_length": 128, "min_length": 1}, - "op": {"required": True}, - "value": {"required": True, "max_length": 256}, - } - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "op": {"key": "op", "type": "str"}, - "value": {"key": "value", "type": "str"}, - } - - def __init__(self, *, name: str, op: str, value: str, **kwargs: Any) -> None: - """ - :keyword name: This is the filter tag name, it can have 1 - 128 characters. Required. - :paramtype name: str - :keyword op: This is the comparison operator which is used for object comparison and filtering. - Only == (equality operator) is currently supported. Required. - :paramtype op: str - :keyword value: This is the filter tag value field used for tag based filtering, it can have 0 - - 256 characters. Required. - :paramtype value: str - """ - super().__init__(**kwargs) - self.name = name - self.op = op - self.value = value - - -class TagProperty(_serialization.Model): - """A tag of the LegalHold of a blob container. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar tag: The tag value. - :vartype tag: str - :ivar timestamp: Returns the date and time the tag was added. - :vartype timestamp: ~datetime.datetime - :ivar object_identifier: Returns the Object ID of the user who added the tag. - :vartype object_identifier: str - :ivar tenant_id: Returns the Tenant ID that issued the token for the user who added the tag. - :vartype tenant_id: str - :ivar upn: Returns the User Principal Name of the user who added the tag. - :vartype upn: str - """ - - _validation = { - "tag": {"readonly": True}, - "timestamp": {"readonly": True}, - "object_identifier": {"readonly": True}, - "tenant_id": {"readonly": True}, - "upn": {"readonly": True}, - } - - _attribute_map = { - "tag": {"key": "tag", "type": "str"}, - "timestamp": {"key": "timestamp", "type": "iso-8601"}, - "object_identifier": {"key": "objectIdentifier", "type": "str"}, - "tenant_id": {"key": "tenantId", "type": "str"}, - "upn": {"key": "upn", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.tag: Optional[str] = None - self.timestamp: Optional[datetime.datetime] = None - self.object_identifier: Optional[str] = None - self.tenant_id: Optional[str] = None - self.upn: Optional[str] = None - - -class TriggerParameters(_serialization.Model): - """The trigger parameters update for the storage task assignment execution. - - :ivar start_from: When to start task execution. This is a required field when - ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when - ExecutionTrigger.properties.type is 'RunOnce'. - :vartype start_from: ~datetime.datetime - :ivar interval: Run interval of task execution. This is a required field when - ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when - ExecutionTrigger.properties.type is 'RunOnce'. - :vartype interval: int - :ivar interval_unit: Run interval unit of task execution. This is a required field when - ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when - ExecutionTrigger.properties.type is 'RunOnce'. "Days" - :vartype interval_unit: str or ~azure.mgmt.storage.models.IntervalUnit - :ivar end_by: When to end task execution. This is a required field when - ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when - ExecutionTrigger.properties.type is 'RunOnce'. - :vartype end_by: ~datetime.datetime - :ivar start_on: When to start task execution. This is a required field when - ExecutionTrigger.properties.type is 'RunOnce'; this property should not be present when - ExecutionTrigger.properties.type is 'OnSchedule'. - :vartype start_on: ~datetime.datetime - """ - - _validation = { - "interval": {"minimum": 1}, - } - - _attribute_map = { - "start_from": {"key": "startFrom", "type": "iso-8601"}, - "interval": {"key": "interval", "type": "int"}, - "interval_unit": {"key": "intervalUnit", "type": "str"}, - "end_by": {"key": "endBy", "type": "iso-8601"}, - "start_on": {"key": "startOn", "type": "iso-8601"}, - } - - def __init__( - self, - *, - start_from: Optional[datetime.datetime] = None, - interval: Optional[int] = None, - interval_unit: Optional[Union[str, "_models.IntervalUnit"]] = None, - end_by: Optional[datetime.datetime] = None, - start_on: Optional[datetime.datetime] = None, - **kwargs: Any - ) -> None: - """ - :keyword start_from: When to start task execution. This is a required field when - ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when - ExecutionTrigger.properties.type is 'RunOnce'. - :paramtype start_from: ~datetime.datetime - :keyword interval: Run interval of task execution. This is a required field when - ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when - ExecutionTrigger.properties.type is 'RunOnce'. - :paramtype interval: int - :keyword interval_unit: Run interval unit of task execution. This is a required field when - ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when - ExecutionTrigger.properties.type is 'RunOnce'. "Days" - :paramtype interval_unit: str or ~azure.mgmt.storage.models.IntervalUnit - :keyword end_by: When to end task execution. This is a required field when - ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when - ExecutionTrigger.properties.type is 'RunOnce'. - :paramtype end_by: ~datetime.datetime - :keyword start_on: When to start task execution. This is a required field when - ExecutionTrigger.properties.type is 'RunOnce'; this property should not be present when - ExecutionTrigger.properties.type is 'OnSchedule'. - :paramtype start_on: ~datetime.datetime - """ - super().__init__(**kwargs) - self.start_from = start_from - self.interval = interval - self.interval_unit = interval_unit - self.end_by = end_by - self.start_on = start_on - - -class TriggerParametersUpdate(_serialization.Model): - """The trigger parameters update for the storage task assignment execution. - - :ivar start_from: When to start task execution. This is a mutable field when - ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when - ExecutionTrigger.properties.type is 'RunOnce'. - :vartype start_from: ~datetime.datetime - :ivar interval: Run interval of task execution. This is a mutable field when - ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when - ExecutionTrigger.properties.type is 'RunOnce'. - :vartype interval: int - :ivar interval_unit: Run interval unit of task execution. This is a mutable field when - ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when - ExecutionTrigger.properties.type is 'RunOnce'. "Days" - :vartype interval_unit: str or ~azure.mgmt.storage.models.IntervalUnit - :ivar end_by: When to end task execution. This is a mutable field when - ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when - ExecutionTrigger.properties.type is 'RunOnce'. - :vartype end_by: ~datetime.datetime - :ivar start_on: When to start task execution. This is a mutable field when - ExecutionTrigger.properties.type is 'RunOnce'; this property should not be present when - ExecutionTrigger.properties.type is 'OnSchedule'. - :vartype start_on: ~datetime.datetime - """ - - _validation = { - "interval": {"minimum": 1}, - } - - _attribute_map = { - "start_from": {"key": "startFrom", "type": "iso-8601"}, - "interval": {"key": "interval", "type": "int"}, - "interval_unit": {"key": "intervalUnit", "type": "str"}, - "end_by": {"key": "endBy", "type": "iso-8601"}, - "start_on": {"key": "startOn", "type": "iso-8601"}, - } - - def __init__( - self, - *, - start_from: Optional[datetime.datetime] = None, - interval: Optional[int] = None, - interval_unit: Optional[Union[str, "_models.IntervalUnit"]] = None, - end_by: Optional[datetime.datetime] = None, - start_on: Optional[datetime.datetime] = None, - **kwargs: Any - ) -> None: - """ - :keyword start_from: When to start task execution. This is a mutable field when - ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when - ExecutionTrigger.properties.type is 'RunOnce'. - :paramtype start_from: ~datetime.datetime - :keyword interval: Run interval of task execution. This is a mutable field when - ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when - ExecutionTrigger.properties.type is 'RunOnce'. - :paramtype interval: int - :keyword interval_unit: Run interval unit of task execution. This is a mutable field when - ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when - ExecutionTrigger.properties.type is 'RunOnce'. "Days" - :paramtype interval_unit: str or ~azure.mgmt.storage.models.IntervalUnit - :keyword end_by: When to end task execution. This is a mutable field when - ExecutionTrigger.properties.type is 'OnSchedule'; this property should not be present when - ExecutionTrigger.properties.type is 'RunOnce'. - :paramtype end_by: ~datetime.datetime - :keyword start_on: When to start task execution. This is a mutable field when - ExecutionTrigger.properties.type is 'RunOnce'; this property should not be present when - ExecutionTrigger.properties.type is 'OnSchedule'. - :paramtype start_on: ~datetime.datetime - """ - super().__init__(**kwargs) - self.start_from = start_from - self.interval = interval - self.interval_unit = interval_unit - self.end_by = end_by - self.start_on = start_on - - -class UpdateHistoryProperty(_serialization.Model): - """An update history of the ImmutabilityPolicy of a blob container. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar update: The ImmutabilityPolicy update type of a blob container, possible values include: - put, lock and extend. Known values are: "put", "lock", and "extend". - :vartype update: str or ~azure.mgmt.storage.models.ImmutabilityPolicyUpdateType - :ivar immutability_period_since_creation_in_days: The immutability period for the blobs in the - container since the policy creation, in days. - :vartype immutability_period_since_creation_in_days: int - :ivar timestamp: Returns the date and time the ImmutabilityPolicy was updated. - :vartype timestamp: ~datetime.datetime - :ivar object_identifier: Returns the Object ID of the user who updated the ImmutabilityPolicy. - :vartype object_identifier: str - :ivar tenant_id: Returns the Tenant ID that issued the token for the user who updated the - ImmutabilityPolicy. - :vartype tenant_id: str - :ivar upn: Returns the User Principal Name of the user who updated the ImmutabilityPolicy. - :vartype upn: str - :ivar allow_protected_append_writes: This property can only be changed for unlocked time-based - retention policies. When enabled, new blocks can be written to an append blob while maintaining - immutability protection and compliance. Only new blocks can be added and any existing blocks - cannot be modified or deleted. This property cannot be changed with ExtendImmutabilityPolicy - API. - :vartype allow_protected_append_writes: bool - :ivar allow_protected_append_writes_all: This property can only be changed for unlocked - time-based retention policies. When enabled, new blocks can be written to both 'Append and Bock - Blobs' while maintaining immutability protection and compliance. Only new blocks can be added - and any existing blocks cannot be modified or deleted. This property cannot be changed with - ExtendImmutabilityPolicy API. The 'allowProtectedAppendWrites' and - 'allowProtectedAppendWritesAll' properties are mutually exclusive. - :vartype allow_protected_append_writes_all: bool - """ - - _validation = { - "update": {"readonly": True}, - "immutability_period_since_creation_in_days": {"readonly": True}, - "timestamp": {"readonly": True}, - "object_identifier": {"readonly": True}, - "tenant_id": {"readonly": True}, - "upn": {"readonly": True}, - } - - _attribute_map = { - "update": {"key": "update", "type": "str"}, - "immutability_period_since_creation_in_days": {"key": "immutabilityPeriodSinceCreationInDays", "type": "int"}, - "timestamp": {"key": "timestamp", "type": "iso-8601"}, - "object_identifier": {"key": "objectIdentifier", "type": "str"}, - "tenant_id": {"key": "tenantId", "type": "str"}, - "upn": {"key": "upn", "type": "str"}, - "allow_protected_append_writes": {"key": "allowProtectedAppendWrites", "type": "bool"}, - "allow_protected_append_writes_all": {"key": "allowProtectedAppendWritesAll", "type": "bool"}, - } - - def __init__( - self, - *, - allow_protected_append_writes: Optional[bool] = None, - allow_protected_append_writes_all: Optional[bool] = None, - **kwargs: Any - ) -> None: - """ - :keyword allow_protected_append_writes: This property can only be changed for unlocked - time-based retention policies. When enabled, new blocks can be written to an append blob while - maintaining immutability protection and compliance. Only new blocks can be added and any - existing blocks cannot be modified or deleted. This property cannot be changed with - ExtendImmutabilityPolicy API. - :paramtype allow_protected_append_writes: bool - :keyword allow_protected_append_writes_all: This property can only be changed for unlocked - time-based retention policies. When enabled, new blocks can be written to both 'Append and Bock - Blobs' while maintaining immutability protection and compliance. Only new blocks can be added - and any existing blocks cannot be modified or deleted. This property cannot be changed with - ExtendImmutabilityPolicy API. The 'allowProtectedAppendWrites' and - 'allowProtectedAppendWritesAll' properties are mutually exclusive. - :paramtype allow_protected_append_writes_all: bool - """ - super().__init__(**kwargs) - self.update: Optional[Union[str, "_models.ImmutabilityPolicyUpdateType"]] = None - self.immutability_period_since_creation_in_days: Optional[int] = None - self.timestamp: Optional[datetime.datetime] = None - self.object_identifier: Optional[str] = None - self.tenant_id: Optional[str] = None - self.upn: Optional[str] = None - self.allow_protected_append_writes = allow_protected_append_writes - self.allow_protected_append_writes_all = allow_protected_append_writes_all - - -class Usage(_serialization.Model): - """Describes Storage Resource Usage. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar unit: Gets the unit of measurement. Known values are: "Count", "Bytes", "Seconds", - "Percent", "CountsPerSecond", and "BytesPerSecond". - :vartype unit: str or ~azure.mgmt.storage.models.UsageUnit - :ivar current_value: Gets the current count of the allocated resources in the subscription. - :vartype current_value: int - :ivar limit: Gets the maximum count of the resources that can be allocated in the subscription. - :vartype limit: int - :ivar name: Gets the name of the type of usage. - :vartype name: ~azure.mgmt.storage.models.UsageName - """ - - _validation = { - "unit": {"readonly": True}, - "current_value": {"readonly": True}, - "limit": {"readonly": True}, - "name": {"readonly": True}, - } - - _attribute_map = { - "unit": {"key": "unit", "type": "str"}, - "current_value": {"key": "currentValue", "type": "int"}, - "limit": {"key": "limit", "type": "int"}, - "name": {"key": "name", "type": "UsageName"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.unit: Optional[Union[str, "_models.UsageUnit"]] = None - self.current_value: Optional[int] = None - self.limit: Optional[int] = None - self.name: Optional["_models.UsageName"] = None - - -class UsageListResult(_serialization.Model): - """The response from the List Usages operation. - - :ivar value: Gets or sets the list of Storage Resource Usages. - :vartype value: list[~azure.mgmt.storage.models.Usage] - """ - - _attribute_map = { - "value": {"key": "value", "type": "[Usage]"}, - } - - def __init__(self, *, value: Optional[list["_models.Usage"]] = None, **kwargs: Any) -> None: - """ - :keyword value: Gets or sets the list of Storage Resource Usages. - :paramtype value: list[~azure.mgmt.storage.models.Usage] - """ - super().__init__(**kwargs) - self.value = value - - -class UsageName(_serialization.Model): - """The usage names that can be used; currently limited to StorageAccount. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: Gets a string describing the resource name. - :vartype value: str - :ivar localized_value: Gets a localized string describing the resource name. - :vartype localized_value: str - """ - - _validation = { - "value": {"readonly": True}, - "localized_value": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "str"}, - "localized_value": {"key": "localizedValue", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value: Optional[str] = None - self.localized_value: Optional[str] = None - - -class UserAssignedIdentity(_serialization.Model): - """UserAssignedIdentity for the resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar principal_id: The principal ID of the identity. - :vartype principal_id: str - :ivar client_id: The client ID of the identity. - :vartype client_id: str - """ - - _validation = { - "principal_id": {"readonly": True}, - "client_id": {"readonly": True}, - } - - _attribute_map = { - "principal_id": {"key": "principalId", "type": "str"}, - "client_id": {"key": "clientId", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.principal_id: Optional[str] = None - self.client_id: Optional[str] = None - - -class VirtualNetworkRule(_serialization.Model): - """Virtual Network rule. - - All required parameters must be populated in order to send to server. - - :ivar virtual_network_resource_id: Resource ID of a subnet, for example: - /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. - Required. - :vartype virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value is "Allow". - :vartype action: str - :ivar state: Gets the state of virtual network rule. Known values are: "Provisioning", - "Deprovisioning", "Succeeded", "Failed", and "NetworkSourceDeleted". - :vartype state: str or ~azure.mgmt.storage.models.State - """ - - _validation = { - "virtual_network_resource_id": {"required": True}, - } - - _attribute_map = { - "virtual_network_resource_id": {"key": "id", "type": "str"}, - "action": {"key": "action", "type": "str"}, - "state": {"key": "state", "type": "str"}, - } - - def __init__( - self, - *, - virtual_network_resource_id: str, - action: Optional[Literal["Allow"]] = None, - state: Optional[Union[str, "_models.State"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword virtual_network_resource_id: Resource ID of a subnet, for example: - /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. - Required. - :paramtype virtual_network_resource_id: str - :keyword action: The action of virtual network rule. Default value is "Allow". - :paramtype action: str - :keyword state: Gets the state of virtual network rule. Known values are: "Provisioning", - "Deprovisioning", "Succeeded", "Failed", and "NetworkSourceDeleted". - :paramtype state: str or ~azure.mgmt.storage.models.State - """ - super().__init__(**kwargs) - self.virtual_network_resource_id = virtual_network_resource_id - self.action = action - self.state = state diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_patch.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_patch.py index 8bcb627aa475..87676c65a8f0 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_patch.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models/_patch.py @@ -7,9 +7,9 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/__init__.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/__init__.py index 0d649856038b..a3f91fe60a7f 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/__init__.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/__init__.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pylint: disable=wrong-import-position @@ -13,60 +12,64 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._blob_services_operations import BlobServicesOperations # type: ignore -from ._blob_containers_operations import BlobContainersOperations # type: ignore -from ._file_services_operations import FileServicesOperations # type: ignore -from ._file_shares_operations import FileSharesOperations # type: ignore -from ._queue_services_operations import QueueServicesOperations # type: ignore -from ._queue_operations import QueueOperations # type: ignore from ._operations import Operations # type: ignore -from ._skus_operations import SkusOperations # type: ignore -from ._storage_accounts_operations import StorageAccountsOperations # type: ignore -from ._deleted_accounts_operations import DeletedAccountsOperations # type: ignore -from ._usages_operations import UsagesOperations # type: ignore -from ._management_policies_operations import ManagementPoliciesOperations # type: ignore -from ._blob_inventory_policies_operations import BlobInventoryPoliciesOperations # type: ignore -from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations # type: ignore -from ._private_link_resources_operations import PrivateLinkResourcesOperations # type: ignore -from ._object_replication_policies_operations import ObjectReplicationPoliciesOperations # type: ignore -from ._local_users_operations import LocalUsersOperations # type: ignore -from ._encryption_scopes_operations import EncryptionScopesOperations # type: ignore -from ._table_services_operations import TableServicesOperations # type: ignore -from ._table_operations import TableOperations # type: ignore -from ._network_security_perimeter_configurations_operations import NetworkSecurityPerimeterConfigurationsOperations # type: ignore -from ._storage_task_assignments_operations import StorageTaskAssignmentsOperations # type: ignore -from ._storage_task_assignments_instances_report_operations import StorageTaskAssignmentsInstancesReportOperations # type: ignore -from ._storage_task_assignment_instances_report_operations import StorageTaskAssignmentInstancesReportOperations # type: ignore +from ._operations import BlobContainersOperations # type: ignore +from ._operations import BlobServicesOperations # type: ignore +from ._operations import StorageAccountsOperations # type: ignore +from ._operations import FileSharesOperations # type: ignore +from ._operations import FileServicesOperations # type: ignore +from ._operations import QueueServicesOperations # type: ignore +from ._operations import DeletedAccountsOperations # type: ignore +from ._operations import ManagementPoliciesOperations # type: ignore +from ._operations import BlobInventoryPoliciesOperations # type: ignore +from ._operations import PrivateEndpointConnectionsOperations # type: ignore +from ._operations import EncryptionScopesOperations # type: ignore +from ._operations import TableServicesOperations # type: ignore +from ._operations import NetworkSecurityPerimeterConfigurationsOperations # type: ignore +from ._operations import StorageTaskAssignmentsOperations # type: ignore +from ._operations import ConnectorsOperations # type: ignore +from ._operations import DataSharesOperations # type: ignore +from ._operations import PrivateLinkResourcesOperations # type: ignore +from ._operations import StorageTaskAssignmentsInstancesReportOperations # type: ignore +from ._operations import QueueOperations # type: ignore +from ._operations import ObjectReplicationPoliciesOperations # type: ignore +from ._operations import LocalUsersOperations # type: ignore +from ._operations import TableOperations # type: ignore +from ._operations import StorageTaskAssignmentInstancesReportOperations # type: ignore +from ._operations import SkusOperations # type: ignore +from ._operations import UsagesOperations # type: ignore from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ - "BlobServicesOperations", + "Operations", "BlobContainersOperations", - "FileServicesOperations", + "BlobServicesOperations", + "StorageAccountsOperations", "FileSharesOperations", + "FileServicesOperations", "QueueServicesOperations", - "QueueOperations", - "Operations", - "SkusOperations", - "StorageAccountsOperations", "DeletedAccountsOperations", - "UsagesOperations", "ManagementPoliciesOperations", "BlobInventoryPoliciesOperations", "PrivateEndpointConnectionsOperations", - "PrivateLinkResourcesOperations", - "ObjectReplicationPoliciesOperations", - "LocalUsersOperations", "EncryptionScopesOperations", "TableServicesOperations", - "TableOperations", "NetworkSecurityPerimeterConfigurationsOperations", "StorageTaskAssignmentsOperations", + "ConnectorsOperations", + "DataSharesOperations", + "PrivateLinkResourcesOperations", "StorageTaskAssignmentsInstancesReportOperations", + "QueueOperations", + "ObjectReplicationPoliciesOperations", + "LocalUsersOperations", + "TableOperations", "StorageTaskAssignmentInstancesReportOperations", + "SkusOperations", + "UsagesOperations", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_blob_containers_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_blob_containers_operations.py deleted file mode 100644 index b02f1ca1340c..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_blob_containers_operations.py +++ /dev/null @@ -1,2390 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Iterator, Literal, Optional, TypeVar, Union, cast, overload -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - StreamClosedError, - StreamConsumedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_request( - resource_group_name: str, - account_name: str, - subscription_id: str, - *, - maxpagesize: Optional[str] = None, - filter: Optional[str] = None, - include: Optional[Union[str, _models.ListContainersInclude]] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if maxpagesize is not None: - _params["$maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "str") - if filter is not None: - _params["$filter"] = _SERIALIZER.query("filter", filter, "str") - if include is not None: - _params["$include"] = _SERIALIZER.query("include", include, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_create_request( - resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_update_request( - resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_request( - resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_delete_request( - resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) - - -def build_set_legal_hold_request( - resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/setLegalHold", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_clear_legal_hold_request( - resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/clearLegalHold", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_create_or_update_immutability_policy_request( # pylint: disable=name-too-long - resource_group_name: str, - account_name: str, - container_name: str, - subscription_id: str, - *, - if_match: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - immutability_policy_name: Literal["default"] = kwargs.pop("immutability_policy_name", "default") - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3), - "immutabilityPolicyName": _SERIALIZER.url("immutability_policy_name", immutability_policy_name, "str"), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_immutability_policy_request( - resource_group_name: str, - account_name: str, - container_name: str, - subscription_id: str, - *, - if_match: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - immutability_policy_name: Literal["default"] = kwargs.pop("immutability_policy_name", "default") - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3), - "immutabilityPolicyName": _SERIALIZER.url("immutability_policy_name", immutability_policy_name, "str"), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_delete_immutability_policy_request( - resource_group_name: str, - account_name: str, - container_name: str, - subscription_id: str, - *, - if_match: str, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - immutability_policy_name: Literal["default"] = kwargs.pop("immutability_policy_name", "default") - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3), - "immutabilityPolicyName": _SERIALIZER.url("immutability_policy_name", immutability_policy_name, "str"), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_lock_immutability_policy_request( - resource_group_name: str, - account_name: str, - container_name: str, - subscription_id: str, - *, - if_match: str, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/lock", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_extend_immutability_policy_request( - resource_group_name: str, - account_name: str, - container_name: str, - subscription_id: str, - *, - if_match: str, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/extend", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_lease_request( - resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/lease", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_object_level_worm_request( - resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/migrate", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -class BlobContainersOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`blob_containers` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, - resource_group_name: str, - account_name: str, - maxpagesize: Optional[str] = None, - filter: Optional[str] = None, - include: Optional[Union[str, _models.ListContainersInclude]] = None, - **kwargs: Any - ) -> ItemPaged["_models.ListContainerItem"]: - """Lists all containers and does not support a prefix like data plane. Also SRP today does not - return continuation token. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param maxpagesize: Optional. Specified maximum number of containers that can be included in - the list. Default value is None. - :type maxpagesize: str - :param filter: Optional. When specified, only container names starting with the filter will be - listed. Default value is None. - :type filter: str - :param include: Optional, used to include the properties for soft deleted blob containers. - "deleted" Default value is None. - :type include: str or ~azure.mgmt.storage.models.ListContainersInclude - :return: An iterator like instance of either ListContainerItem or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.ListContainerItem] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ListContainerItems] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - maxpagesize=maxpagesize, - filter=filter, - include=include, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("ListContainerItems", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - @overload - def create( - self, - resource_group_name: str, - account_name: str, - container_name: str, - blob_container: _models.BlobContainer, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.BlobContainer: - """Creates a new container under the specified account as described by request body. The container - resource includes metadata and properties for that container. It does not include a list of the - blobs contained by the container. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param blob_container: Properties of the blob container to create. Required. - :type blob_container: ~azure.mgmt.storage.models.BlobContainer - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: BlobContainer or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobContainer - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def create( - self, - resource_group_name: str, - account_name: str, - container_name: str, - blob_container: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.BlobContainer: - """Creates a new container under the specified account as described by request body. The container - resource includes metadata and properties for that container. It does not include a list of the - blobs contained by the container. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param blob_container: Properties of the blob container to create. Required. - :type blob_container: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: BlobContainer or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobContainer - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def create( - self, - resource_group_name: str, - account_name: str, - container_name: str, - blob_container: Union[_models.BlobContainer, IO[bytes]], - **kwargs: Any - ) -> _models.BlobContainer: - """Creates a new container under the specified account as described by request body. The container - resource includes metadata and properties for that container. It does not include a list of the - blobs contained by the container. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param blob_container: Properties of the blob container to create. Is either a BlobContainer - type or a IO[bytes] type. Required. - :type blob_container: ~azure.mgmt.storage.models.BlobContainer or IO[bytes] - :return: BlobContainer or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobContainer - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.BlobContainer] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(blob_container, (IOBase, bytes)): - _content = blob_container - else: - _json = self._serialize.body(blob_container, "BlobContainer") - - _request = build_create_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("BlobContainer", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def update( - self, - resource_group_name: str, - account_name: str, - container_name: str, - blob_container: _models.BlobContainer, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.BlobContainer: - """Updates container properties as specified in request body. Properties not mentioned in the - request will be unchanged. Update fails if the specified container doesn't already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param blob_container: Properties to update for the blob container. Required. - :type blob_container: ~azure.mgmt.storage.models.BlobContainer - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: BlobContainer or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobContainer - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def update( - self, - resource_group_name: str, - account_name: str, - container_name: str, - blob_container: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.BlobContainer: - """Updates container properties as specified in request body. Properties not mentioned in the - request will be unchanged. Update fails if the specified container doesn't already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param blob_container: Properties to update for the blob container. Required. - :type blob_container: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: BlobContainer or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobContainer - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def update( - self, - resource_group_name: str, - account_name: str, - container_name: str, - blob_container: Union[_models.BlobContainer, IO[bytes]], - **kwargs: Any - ) -> _models.BlobContainer: - """Updates container properties as specified in request body. Properties not mentioned in the - request will be unchanged. Update fails if the specified container doesn't already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param blob_container: Properties to update for the blob container. Is either a BlobContainer - type or a IO[bytes] type. Required. - :type blob_container: ~azure.mgmt.storage.models.BlobContainer or IO[bytes] - :return: BlobContainer or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobContainer - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.BlobContainer] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(blob_container, (IOBase, bytes)): - _content = blob_container - else: - _json = self._serialize.body(blob_container, "BlobContainer") - - _request = build_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("BlobContainer", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get( - self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any - ) -> _models.BlobContainer: - """Gets properties of a specified container. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :return: BlobContainer or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobContainer - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.BlobContainer] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("BlobContainer", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def delete( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any - ) -> None: - """Deletes specified container under its account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - @overload - def set_legal_hold( - self, - resource_group_name: str, - account_name: str, - container_name: str, - legal_hold: _models.LegalHold, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LegalHold: - """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold - follows an append pattern and does not clear out the existing tags that are not specified in - the request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param legal_hold: The LegalHold property that will be set to a blob container. Required. - :type legal_hold: ~azure.mgmt.storage.models.LegalHold - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: LegalHold or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LegalHold - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def set_legal_hold( - self, - resource_group_name: str, - account_name: str, - container_name: str, - legal_hold: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LegalHold: - """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold - follows an append pattern and does not clear out the existing tags that are not specified in - the request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param legal_hold: The LegalHold property that will be set to a blob container. Required. - :type legal_hold: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: LegalHold or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LegalHold - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def set_legal_hold( - self, - resource_group_name: str, - account_name: str, - container_name: str, - legal_hold: Union[_models.LegalHold, IO[bytes]], - **kwargs: Any - ) -> _models.LegalHold: - """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold - follows an append pattern and does not clear out the existing tags that are not specified in - the request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param legal_hold: The LegalHold property that will be set to a blob container. Is either a - LegalHold type or a IO[bytes] type. Required. - :type legal_hold: ~azure.mgmt.storage.models.LegalHold or IO[bytes] - :return: LegalHold or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LegalHold - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.LegalHold] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(legal_hold, (IOBase, bytes)): - _content = legal_hold - else: - _json = self._serialize.body(legal_hold, "LegalHold") - - _request = build_set_legal_hold_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("LegalHold", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def clear_legal_hold( - self, - resource_group_name: str, - account_name: str, - container_name: str, - legal_hold: _models.LegalHold, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LegalHold: - """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent - operation. ClearLegalHold clears out only the specified tags in the request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param legal_hold: The LegalHold property that will be clear from a blob container. Required. - :type legal_hold: ~azure.mgmt.storage.models.LegalHold - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: LegalHold or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LegalHold - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def clear_legal_hold( - self, - resource_group_name: str, - account_name: str, - container_name: str, - legal_hold: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LegalHold: - """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent - operation. ClearLegalHold clears out only the specified tags in the request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param legal_hold: The LegalHold property that will be clear from a blob container. Required. - :type legal_hold: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: LegalHold or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LegalHold - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def clear_legal_hold( - self, - resource_group_name: str, - account_name: str, - container_name: str, - legal_hold: Union[_models.LegalHold, IO[bytes]], - **kwargs: Any - ) -> _models.LegalHold: - """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent - operation. ClearLegalHold clears out only the specified tags in the request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param legal_hold: The LegalHold property that will be clear from a blob container. Is either a - LegalHold type or a IO[bytes] type. Required. - :type legal_hold: ~azure.mgmt.storage.models.LegalHold or IO[bytes] - :return: LegalHold or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LegalHold - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.LegalHold] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(legal_hold, (IOBase, bytes)): - _content = legal_hold - else: - _json = self._serialize.body(legal_hold, "LegalHold") - - _request = build_clear_legal_hold_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("LegalHold", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def create_or_update_immutability_policy( - self, - resource_group_name: str, - account_name: str, - container_name: str, - if_match: Optional[str] = None, - parameters: Optional[_models.ImmutabilityPolicy] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but - not required for this operation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Default value is None. - :type if_match: str - :param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob - container. Default value is None. - :type parameters: ~azure.mgmt.storage.models.ImmutabilityPolicy - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def create_or_update_immutability_policy( - self, - resource_group_name: str, - account_name: str, - container_name: str, - if_match: Optional[str] = None, - parameters: Optional[IO[bytes]] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but - not required for this operation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Default value is None. - :type if_match: str - :param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob - container. Default value is None. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def create_or_update_immutability_policy( - self, - resource_group_name: str, - account_name: str, - container_name: str, - if_match: Optional[str] = None, - parameters: Optional[Union[_models.ImmutabilityPolicy, IO[bytes]]] = None, - **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but - not required for this operation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Default value is None. - :type if_match: str - :param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob - container. Is either a ImmutabilityPolicy type or a IO[bytes] type. Default value is None. - :type parameters: ~azure.mgmt.storage.models.ImmutabilityPolicy or IO[bytes] - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - immutability_policy_name: Literal["default"] = kwargs.pop("immutability_policy_name", "default") - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - content_type = content_type if parameters else None - cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" if parameters else None - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - if parameters is not None: - _json = self._serialize.body(parameters, "ImmutabilityPolicy") - else: - _json = None - - _request = build_create_or_update_immutability_policy_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - if_match=if_match, - immutability_policy_name=immutability_policy_name, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("ImmutabilityPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get_immutability_policy( - self, - resource_group_name: str, - account_name: str, - container_name: str, - if_match: Optional[str] = None, - **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Gets the existing immutability policy along with the corresponding ETag in response headers and - body. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Default value is None. - :type if_match: str - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - immutability_policy_name: Literal["default"] = kwargs.pop("immutability_policy_name", "default") - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) - - _request = build_get_immutability_policy_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - if_match=if_match, - immutability_policy_name=immutability_policy_name, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("ImmutabilityPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def delete_immutability_policy( - self, resource_group_name: str, account_name: str, container_name: str, if_match: str, **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Aborts an unlocked immutability policy. The response of delete has - immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this - operation. Deleting a locked immutability policy is not allowed, the only way is to delete the - container after deleting all expired blobs inside the policy locked container. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Required. - :type if_match: str - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - immutability_policy_name: Literal["default"] = kwargs.pop("immutability_policy_name", "default") - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) - - _request = build_delete_immutability_policy_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - if_match=if_match, - immutability_policy_name=immutability_policy_name, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("ImmutabilityPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def lock_immutability_policy( - self, resource_group_name: str, account_name: str, container_name: str, if_match: str, **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is - ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Required. - :type if_match: str - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) - - _request = build_lock_immutability_policy_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - if_match=if_match, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("ImmutabilityPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @overload - def extend_immutability_policy( - self, - resource_group_name: str, - account_name: str, - container_name: str, - if_match: str, - parameters: Optional[_models.ImmutabilityPolicy] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only - action allowed on a Locked policy will be this action. ETag in If-Match is required for this - operation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Required. - :type if_match: str - :param parameters: The ImmutabilityPolicy Properties that will be extended for a blob - container. Default value is None. - :type parameters: ~azure.mgmt.storage.models.ImmutabilityPolicy - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def extend_immutability_policy( - self, - resource_group_name: str, - account_name: str, - container_name: str, - if_match: str, - parameters: Optional[IO[bytes]] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only - action allowed on a Locked policy will be this action. ETag in If-Match is required for this - operation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Required. - :type if_match: str - :param parameters: The ImmutabilityPolicy Properties that will be extended for a blob - container. Default value is None. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def extend_immutability_policy( - self, - resource_group_name: str, - account_name: str, - container_name: str, - if_match: str, - parameters: Optional[Union[_models.ImmutabilityPolicy, IO[bytes]]] = None, - **kwargs: Any - ) -> _models.ImmutabilityPolicy: - """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only - action allowed on a Locked policy will be this action. ETag in If-Match is required for this - operation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param if_match: The entity state (ETag) version of the immutability policy to update must be - returned to the server for all update operations. The ETag value must include the leading and - trailing double quotes as returned by the service. Required. - :type if_match: str - :param parameters: The ImmutabilityPolicy Properties that will be extended for a blob - container. Is either a ImmutabilityPolicy type or a IO[bytes] type. Default value is None. - :type parameters: ~azure.mgmt.storage.models.ImmutabilityPolicy or IO[bytes] - :return: ImmutabilityPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - content_type = content_type if parameters else None - cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" if parameters else None - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - if parameters is not None: - _json = self._serialize.body(parameters, "ImmutabilityPolicy") - else: - _json = None - - _request = build_extend_immutability_policy_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - if_match=if_match, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("ImmutabilityPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @overload - def lease( - self, - resource_group_name: str, - account_name: str, - container_name: str, - parameters: Optional[_models.LeaseContainerRequest] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LeaseContainerResponse: - """The Lease Container operation establishes and manages a lock on a container for delete - operations. The lock duration can be 15 to 60 seconds, or can be infinite. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param parameters: Lease Container request body. Default value is None. - :type parameters: ~azure.mgmt.storage.models.LeaseContainerRequest - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: LeaseContainerResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LeaseContainerResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def lease( - self, - resource_group_name: str, - account_name: str, - container_name: str, - parameters: Optional[IO[bytes]] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LeaseContainerResponse: - """The Lease Container operation establishes and manages a lock on a container for delete - operations. The lock duration can be 15 to 60 seconds, or can be infinite. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param parameters: Lease Container request body. Default value is None. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: LeaseContainerResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LeaseContainerResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def lease( - self, - resource_group_name: str, - account_name: str, - container_name: str, - parameters: Optional[Union[_models.LeaseContainerRequest, IO[bytes]]] = None, - **kwargs: Any - ) -> _models.LeaseContainerResponse: - """The Lease Container operation establishes and manages a lock on a container for delete - operations. The lock duration can be 15 to 60 seconds, or can be infinite. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :param parameters: Lease Container request body. Is either a LeaseContainerRequest type or a - IO[bytes] type. Default value is None. - :type parameters: ~azure.mgmt.storage.models.LeaseContainerRequest or IO[bytes] - :return: LeaseContainerResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LeaseContainerResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - content_type = content_type if parameters else None - cls: ClsType[_models.LeaseContainerResponse] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" if parameters else None - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - if parameters is not None: - _json = self._serialize.body(parameters, "LeaseContainerRequest") - else: - _json = None - - _request = build_lease_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("LeaseContainerResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - def _object_level_worm_initial( - self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any - ) -> Iterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - - _request = build_object_level_worm_request( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def begin_object_level_worm( - self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any - ) -> LROPoller[None]: - """This operation migrates a blob container from container level WORM to object level immutability - enabled container. Prerequisites require a container level immutability policy either in locked - or unlocked state, Account level versioning must be enabled and there should be no Legal hold - on the container. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param container_name: The name of the blob container within the specified storage account. - Blob container names must be between 3 and 63 characters in length and use numbers, lower-case - letters and dash (-) only. Every dash (-) character must be immediately preceded and followed - by a letter or number. Required. - :type container_name: str - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._object_level_worm_initial( - resource_group_name=resource_group_name, - account_name=account_name, - container_name=container_name, - api_version=api_version, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller[None].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_blob_inventory_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_blob_inventory_policies_operations.py deleted file mode 100644 index 163888ad4e76..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_blob_inventory_policies_operations.py +++ /dev/null @@ -1,583 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_get_request( - resource_group_name: str, - account_name: str, - blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], - subscription_id: str, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/inventoryPolicies/{blobInventoryPolicyName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "blobInventoryPolicyName": _SERIALIZER.url("blob_inventory_policy_name", blob_inventory_policy_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_create_or_update_request( - resource_group_name: str, - account_name: str, - blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], - subscription_id: str, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/inventoryPolicies/{blobInventoryPolicyName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "blobInventoryPolicyName": _SERIALIZER.url("blob_inventory_policy_name", blob_inventory_policy_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_delete_request( - resource_group_name: str, - account_name: str, - blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], - subscription_id: str, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/inventoryPolicies/{blobInventoryPolicyName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "blobInventoryPolicyName": _SERIALIZER.url("blob_inventory_policy_name", blob_inventory_policy_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_list_request(resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/inventoryPolicies", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class BlobInventoryPoliciesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`blob_inventory_policies` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def get( - self, - resource_group_name: str, - account_name: str, - blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], - **kwargs: Any - ) -> _models.BlobInventoryPolicy: - """Gets the blob inventory policy associated with the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It - should always be 'default'. "default" Required. - :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName - :return: BlobInventoryPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.BlobInventoryPolicy] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - blob_inventory_policy_name=blob_inventory_policy_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("BlobInventoryPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def create_or_update( - self, - resource_group_name: str, - account_name: str, - blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], - properties: _models.BlobInventoryPolicy, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.BlobInventoryPolicy: - """Sets the blob inventory policy to the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It - should always be 'default'. "default" Required. - :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName - :param properties: The blob inventory policy set to a storage account. Required. - :type properties: ~azure.mgmt.storage.models.BlobInventoryPolicy - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: BlobInventoryPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def create_or_update( - self, - resource_group_name: str, - account_name: str, - blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], - properties: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.BlobInventoryPolicy: - """Sets the blob inventory policy to the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It - should always be 'default'. "default" Required. - :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName - :param properties: The blob inventory policy set to a storage account. Required. - :type properties: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: BlobInventoryPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def create_or_update( - self, - resource_group_name: str, - account_name: str, - blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], - properties: Union[_models.BlobInventoryPolicy, IO[bytes]], - **kwargs: Any - ) -> _models.BlobInventoryPolicy: - """Sets the blob inventory policy to the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It - should always be 'default'. "default" Required. - :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName - :param properties: The blob inventory policy set to a storage account. Is either a - BlobInventoryPolicy type or a IO[bytes] type. Required. - :type properties: ~azure.mgmt.storage.models.BlobInventoryPolicy or IO[bytes] - :return: BlobInventoryPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.BlobInventoryPolicy] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(properties, (IOBase, bytes)): - _content = properties - else: - _json = self._serialize.body(properties, "BlobInventoryPolicy") - - _request = build_create_or_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - blob_inventory_policy_name=blob_inventory_policy_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("BlobInventoryPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - account_name: str, - blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], - **kwargs: Any - ) -> None: - """Deletes the blob inventory policy associated with the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It - should always be 'default'. "default" Required. - :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - blob_inventory_policy_name=blob_inventory_policy_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - @distributed_trace - def list( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> ItemPaged["_models.BlobInventoryPolicy"]: - """Gets the blob inventory policy associated with the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: An iterator like instance of either BlobInventoryPolicy or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.BlobInventoryPolicy] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ListBlobInventoryPolicy] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("ListBlobInventoryPolicy", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_blob_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_blob_services_operations.py deleted file mode 100644 index e0d476e38ae1..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_blob_services_operations.py +++ /dev/null @@ -1,461 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Literal, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_request(resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_set_service_properties_request( - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - blob_services_name: Literal["default"] = kwargs.pop("blob_services_name", "default") - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/{BlobServicesName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "BlobServicesName": _SERIALIZER.url("blob_services_name", blob_services_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_service_properties_request( - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - blob_services_name: Literal["default"] = kwargs.pop("blob_services_name", "default") - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/{BlobServicesName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "BlobServicesName": _SERIALIZER.url("blob_services_name", blob_services_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class BlobServicesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`blob_services` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> ItemPaged["_models.BlobServiceProperties"]: - """List blob services of storage account. It returns a collection of one object named default. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: An iterator like instance of either BlobServiceProperties or the result of - cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.BlobServiceProperties] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.BlobServiceItems] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("BlobServiceItems", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - @overload - def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: _models.BlobServiceProperties, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.BlobServiceProperties: - """Sets the properties of a storage account’s Blob service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Blob service, including properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. Required. - :type parameters: ~azure.mgmt.storage.models.BlobServiceProperties - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: BlobServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.BlobServiceProperties: - """Sets the properties of a storage account’s Blob service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Blob service, including properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: BlobServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.BlobServiceProperties, IO[bytes]], - **kwargs: Any - ) -> _models.BlobServiceProperties: - """Sets the properties of a storage account’s Blob service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Blob service, including properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. Is either a - BlobServiceProperties type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.BlobServiceProperties or IO[bytes] - :return: BlobServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - blob_services_name: Literal["default"] = kwargs.pop("blob_services_name", "default") - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.BlobServiceProperties] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "BlobServiceProperties") - - _request = build_set_service_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - blob_services_name=blob_services_name, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("BlobServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get_service_properties( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> _models.BlobServiceProperties: - """Gets the properties of a storage account’s Blob service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: BlobServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.BlobServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - blob_services_name: Literal["default"] = kwargs.pop("blob_services_name", "default") - cls: ClsType[_models.BlobServiceProperties] = kwargs.pop("cls", None) - - _request = build_get_service_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - blob_services_name=blob_services_name, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("BlobServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_deleted_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_deleted_accounts_operations.py deleted file mode 100644 index 550ee4836edf..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_deleted_accounts_operations.py +++ /dev/null @@ -1,249 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, Callable, Optional, TypeVar -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Storage/deletedAccounts") - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_request(deleted_account_name: str, location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/providers/Microsoft.Storage/locations/{location}/deletedAccounts/{deletedAccountName}", - ) - path_format_arguments = { - "deletedAccountName": _SERIALIZER.url( - "deleted_account_name", deleted_account_name, "str", max_length=24, min_length=3 - ), - "location": _SERIALIZER.url("location", location, "str"), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class DeletedAccountsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`deleted_accounts` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list(self, **kwargs: Any) -> ItemPaged["_models.DeletedAccount"]: - """Lists deleted accounts under the subscription. - - :return: An iterator like instance of either DeletedAccount or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.DeletedAccount] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.DeletedAccountListResult] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("DeletedAccountListResult", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - @distributed_trace - def get(self, deleted_account_name: str, location: str, **kwargs: Any) -> _models.DeletedAccount: - """Get properties of specified deleted account resource. - - :param deleted_account_name: Name of the deleted storage account. Required. - :type deleted_account_name: str - :param location: The location of the deleted storage account. Required. - :type location: str - :return: DeletedAccount or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.DeletedAccount - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.DeletedAccount] = kwargs.pop("cls", None) - - _request = build_get_request( - deleted_account_name=deleted_account_name, - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("DeletedAccount", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_encryption_scopes_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_encryption_scopes_operations.py deleted file mode 100644 index cc8421c3fc61..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_encryption_scopes_operations.py +++ /dev/null @@ -1,728 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_put_request( - resource_group_name: str, account_name: str, encryption_scope_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "encryptionScopeName": _SERIALIZER.url( - "encryption_scope_name", encryption_scope_name, "str", max_length=63, min_length=3 - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_patch_request( - resource_group_name: str, account_name: str, encryption_scope_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "encryptionScopeName": _SERIALIZER.url( - "encryption_scope_name", encryption_scope_name, "str", max_length=63, min_length=3 - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_request( - resource_group_name: str, account_name: str, encryption_scope_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "encryptionScopeName": _SERIALIZER.url( - "encryption_scope_name", encryption_scope_name, "str", max_length=63, min_length=3 - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_list_request( - resource_group_name: str, - account_name: str, - subscription_id: str, - *, - maxpagesize: Optional[int] = None, - filter: Optional[str] = None, - include: Optional[Union[str, _models.ListEncryptionScopesInclude]] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if maxpagesize is not None: - _params["$maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int", maximum=5000, minimum=1) - if filter is not None: - _params["$filter"] = _SERIALIZER.query("filter", filter, "str") - if include is not None: - _params["$include"] = _SERIALIZER.query("include", include, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class EncryptionScopesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`encryption_scopes` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @overload - def put( - self, - resource_group_name: str, - account_name: str, - encryption_scope_name: str, - encryption_scope: _models.EncryptionScope, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.EncryptionScope: - """Synchronously creates or updates an encryption scope under the specified storage account. If an - encryption scope is already created and a subsequent request is issued with different - properties, the encryption scope properties will be updated per the specified request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param encryption_scope_name: The name of the encryption scope within the specified storage - account. Encryption scope names must be between 3 and 63 characters in length and use numbers, - lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and - followed by a letter or number. Required. - :type encryption_scope_name: str - :param encryption_scope: Encryption scope properties to be used for the create or update. - Required. - :type encryption_scope: ~azure.mgmt.storage.models.EncryptionScope - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: EncryptionScope or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.EncryptionScope - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def put( - self, - resource_group_name: str, - account_name: str, - encryption_scope_name: str, - encryption_scope: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.EncryptionScope: - """Synchronously creates or updates an encryption scope under the specified storage account. If an - encryption scope is already created and a subsequent request is issued with different - properties, the encryption scope properties will be updated per the specified request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param encryption_scope_name: The name of the encryption scope within the specified storage - account. Encryption scope names must be between 3 and 63 characters in length and use numbers, - lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and - followed by a letter or number. Required. - :type encryption_scope_name: str - :param encryption_scope: Encryption scope properties to be used for the create or update. - Required. - :type encryption_scope: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: EncryptionScope or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.EncryptionScope - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def put( - self, - resource_group_name: str, - account_name: str, - encryption_scope_name: str, - encryption_scope: Union[_models.EncryptionScope, IO[bytes]], - **kwargs: Any - ) -> _models.EncryptionScope: - """Synchronously creates or updates an encryption scope under the specified storage account. If an - encryption scope is already created and a subsequent request is issued with different - properties, the encryption scope properties will be updated per the specified request. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param encryption_scope_name: The name of the encryption scope within the specified storage - account. Encryption scope names must be between 3 and 63 characters in length and use numbers, - lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and - followed by a letter or number. Required. - :type encryption_scope_name: str - :param encryption_scope: Encryption scope properties to be used for the create or update. Is - either a EncryptionScope type or a IO[bytes] type. Required. - :type encryption_scope: ~azure.mgmt.storage.models.EncryptionScope or IO[bytes] - :return: EncryptionScope or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.EncryptionScope - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.EncryptionScope] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(encryption_scope, (IOBase, bytes)): - _content = encryption_scope - else: - _json = self._serialize.body(encryption_scope, "EncryptionScope") - - _request = build_put_request( - resource_group_name=resource_group_name, - account_name=account_name, - encryption_scope_name=encryption_scope_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("EncryptionScope", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def patch( - self, - resource_group_name: str, - account_name: str, - encryption_scope_name: str, - encryption_scope: _models.EncryptionScope, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.EncryptionScope: - """Update encryption scope properties as specified in the request body. Update fails if the - specified encryption scope does not already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param encryption_scope_name: The name of the encryption scope within the specified storage - account. Encryption scope names must be between 3 and 63 characters in length and use numbers, - lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and - followed by a letter or number. Required. - :type encryption_scope_name: str - :param encryption_scope: Encryption scope properties to be used for the update. Required. - :type encryption_scope: ~azure.mgmt.storage.models.EncryptionScope - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: EncryptionScope or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.EncryptionScope - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def patch( - self, - resource_group_name: str, - account_name: str, - encryption_scope_name: str, - encryption_scope: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.EncryptionScope: - """Update encryption scope properties as specified in the request body. Update fails if the - specified encryption scope does not already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param encryption_scope_name: The name of the encryption scope within the specified storage - account. Encryption scope names must be between 3 and 63 characters in length and use numbers, - lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and - followed by a letter or number. Required. - :type encryption_scope_name: str - :param encryption_scope: Encryption scope properties to be used for the update. Required. - :type encryption_scope: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: EncryptionScope or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.EncryptionScope - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def patch( - self, - resource_group_name: str, - account_name: str, - encryption_scope_name: str, - encryption_scope: Union[_models.EncryptionScope, IO[bytes]], - **kwargs: Any - ) -> _models.EncryptionScope: - """Update encryption scope properties as specified in the request body. Update fails if the - specified encryption scope does not already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param encryption_scope_name: The name of the encryption scope within the specified storage - account. Encryption scope names must be between 3 and 63 characters in length and use numbers, - lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and - followed by a letter or number. Required. - :type encryption_scope_name: str - :param encryption_scope: Encryption scope properties to be used for the update. Is either a - EncryptionScope type or a IO[bytes] type. Required. - :type encryption_scope: ~azure.mgmt.storage.models.EncryptionScope or IO[bytes] - :return: EncryptionScope or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.EncryptionScope - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.EncryptionScope] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(encryption_scope, (IOBase, bytes)): - _content = encryption_scope - else: - _json = self._serialize.body(encryption_scope, "EncryptionScope") - - _request = build_patch_request( - resource_group_name=resource_group_name, - account_name=account_name, - encryption_scope_name=encryption_scope_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("EncryptionScope", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get( - self, resource_group_name: str, account_name: str, encryption_scope_name: str, **kwargs: Any - ) -> _models.EncryptionScope: - """Returns the properties for the specified encryption scope. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param encryption_scope_name: The name of the encryption scope within the specified storage - account. Encryption scope names must be between 3 and 63 characters in length and use numbers, - lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and - followed by a letter or number. Required. - :type encryption_scope_name: str - :return: EncryptionScope or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.EncryptionScope - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.EncryptionScope] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - encryption_scope_name=encryption_scope_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("EncryptionScope", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def list( - self, - resource_group_name: str, - account_name: str, - maxpagesize: Optional[int] = None, - filter: Optional[str] = None, - include: Optional[Union[str, _models.ListEncryptionScopesInclude]] = None, - **kwargs: Any - ) -> ItemPaged["_models.EncryptionScope"]: - """Lists all the encryption scopes available under the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param maxpagesize: Optional, specifies the maximum number of encryption scopes that will be - included in the list response. Default value is None. - :type maxpagesize: int - :param filter: Optional. When specified, only encryption scope names starting with the filter - will be listed. Default value is None. - :type filter: str - :param include: Optional, when specified, will list encryption scopes with the specific state. - Defaults to All. Known values are: "All", "Enabled", and "Disabled". Default value is None. - :type include: str or ~azure.mgmt.storage.models.ListEncryptionScopesInclude - :return: An iterator like instance of either EncryptionScope or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.EncryptionScope] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.EncryptionScopeListResult] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - maxpagesize=maxpagesize, - filter=filter, - include=include, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("EncryptionScopeListResult", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_file_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_file_services_operations.py deleted file mode 100644 index 32f3dbbc8a7d..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_file_services_operations.py +++ /dev/null @@ -1,668 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Literal, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_request(resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_set_service_properties_request( - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - file_services_name: Literal["default"] = kwargs.pop("file_services_name", "default") - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/{FileServicesName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "FileServicesName": _SERIALIZER.url("file_services_name", file_services_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_service_properties_request( - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - file_services_name: Literal["default"] = kwargs.pop("file_services_name", "default") - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/{FileServicesName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "FileServicesName": _SERIALIZER.url("file_services_name", file_services_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_list_service_usages_request( - resource_group_name: str, - account_name: str, - subscription_id: str, - *, - maxpagesize: Optional[int] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - file_services_name: Literal["default"] = kwargs.pop("file_services_name", "default") - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/{FileServicesName}/usages", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "FileServicesName": _SERIALIZER.url("file_services_name", file_services_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if maxpagesize is not None: - _params["$maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_service_usage_request( - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - file_services_name: Literal["default"] = kwargs.pop("file_services_name", "default") - file_service_usages_name: Literal["default"] = kwargs.pop("file_service_usages_name", "default") - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/{FileServicesName}/usages/{fileServiceUsagesName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "FileServicesName": _SERIALIZER.url("file_services_name", file_services_name, "str"), - "fileServiceUsagesName": _SERIALIZER.url("file_service_usages_name", file_service_usages_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class FileServicesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`file_services` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _models.FileServiceItems: - """List all file services in storage accounts. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: FileServiceItems or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileServiceItems - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.FileServiceItems] = kwargs.pop("cls", None) - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("FileServiceItems", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: _models.FileServiceProperties, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.FileServiceProperties: - """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource - Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of file services in storage accounts, including CORS - (Cross-Origin Resource Sharing) rules. Required. - :type parameters: ~azure.mgmt.storage.models.FileServiceProperties - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: FileServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.FileServiceProperties: - """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource - Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of file services in storage accounts, including CORS - (Cross-Origin Resource Sharing) rules. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: FileServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.FileServiceProperties, IO[bytes]], - **kwargs: Any - ) -> _models.FileServiceProperties: - """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource - Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of file services in storage accounts, including CORS - (Cross-Origin Resource Sharing) rules. Is either a FileServiceProperties type or a IO[bytes] - type. Required. - :type parameters: ~azure.mgmt.storage.models.FileServiceProperties or IO[bytes] - :return: FileServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - file_services_name: Literal["default"] = kwargs.pop("file_services_name", "default") - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.FileServiceProperties] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "FileServiceProperties") - - _request = build_set_service_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - file_services_name=file_services_name, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("FileServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get_service_properties( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> _models.FileServiceProperties: - """Gets the properties of file services in storage accounts, including CORS (Cross-Origin Resource - Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: FileServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - file_services_name: Literal["default"] = kwargs.pop("file_services_name", "default") - cls: ClsType[_models.FileServiceProperties] = kwargs.pop("cls", None) - - _request = build_get_service_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - file_services_name=file_services_name, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("FileServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def list_service_usages( - self, resource_group_name: str, account_name: str, maxpagesize: Optional[int] = None, **kwargs: Any - ) -> ItemPaged["_models.FileServiceUsage"]: - """Gets the usages of file service in storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param maxpagesize: Optional, specifies the maximum number of file service usages to be - included in the list response. Default value is None. - :type maxpagesize: int - :return: An iterator like instance of either FileServiceUsage or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.FileServiceUsage] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - file_services_name: Literal["default"] = kwargs.pop("file_services_name", "default") - cls: ClsType[_models.FileServiceUsages] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_service_usages_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - maxpagesize=maxpagesize, - api_version=api_version, - file_services_name=file_services_name, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("FileServiceUsages", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - @distributed_trace - def get_service_usage(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _models.FileServiceUsage: - """Gets the usage of file service in storage account including account limits, file share limits - and constants used in recommendations and bursting formula. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: FileServiceUsage or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileServiceUsage - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - file_services_name: Literal["default"] = kwargs.pop("file_services_name", "default") - file_service_usages_name: Literal["default"] = kwargs.pop("file_service_usages_name", "default") - cls: ClsType[_models.FileServiceUsage] = kwargs.pop("cls", None) - - _request = build_get_service_usage_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - file_services_name=file_services_name, - file_service_usages_name=file_service_usages_name, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("FileServiceUsage", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_file_shares_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_file_shares_operations.py deleted file mode 100644 index e8088507547f..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_file_shares_operations.py +++ /dev/null @@ -1,1295 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_request( - resource_group_name: str, - account_name: str, - subscription_id: str, - *, - maxpagesize: Optional[str] = None, - filter: Optional[str] = None, - expand: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if maxpagesize is not None: - _params["$maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "str") - if filter is not None: - _params["$filter"] = _SERIALIZER.query("filter", filter, "str") - if expand is not None: - _params["$expand"] = _SERIALIZER.query("expand", expand, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_create_request( - resource_group_name: str, - account_name: str, - share_name: str, - subscription_id: str, - *, - expand: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "shareName": _SERIALIZER.url("share_name", share_name, "str", max_length=63, min_length=3), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if expand is not None: - _params["$expand"] = _SERIALIZER.query("expand", expand, "str") - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_update_request( - resource_group_name: str, account_name: str, share_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "shareName": _SERIALIZER.url("share_name", share_name, "str", max_length=63, min_length=3), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_request( - resource_group_name: str, - account_name: str, - share_name: str, - subscription_id: str, - *, - expand: Optional[str] = None, - x_ms_snapshot: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "shareName": _SERIALIZER.url("share_name", share_name, "str", max_length=63, min_length=3), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if expand is not None: - _params["$expand"] = _SERIALIZER.query("expand", expand, "str") - - # Construct headers - if x_ms_snapshot is not None: - _headers["x-ms-snapshot"] = _SERIALIZER.header("x_ms_snapshot", x_ms_snapshot, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_delete_request( - resource_group_name: str, - account_name: str, - share_name: str, - subscription_id: str, - *, - x_ms_snapshot: Optional[str] = None, - include: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "shareName": _SERIALIZER.url("share_name", share_name, "str", max_length=63, min_length=3), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if include is not None: - _params["$include"] = _SERIALIZER.query("include", include, "str") - - # Construct headers - if x_ms_snapshot is not None: - _headers["x-ms-snapshot"] = _SERIALIZER.header("x_ms_snapshot", x_ms_snapshot, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_restore_request( - resource_group_name: str, account_name: str, share_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}/restore", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "shareName": _SERIALIZER.url("share_name", share_name, "str", max_length=63, min_length=3), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_lease_request( - resource_group_name: str, - account_name: str, - share_name: str, - subscription_id: str, - *, - x_ms_snapshot: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}/lease", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "shareName": _SERIALIZER.url("share_name", share_name, "str", max_length=63, min_length=3), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if x_ms_snapshot is not None: - _headers["x-ms-snapshot"] = _SERIALIZER.header("x_ms_snapshot", x_ms_snapshot, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -class FileSharesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`file_shares` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, - resource_group_name: str, - account_name: str, - maxpagesize: Optional[str] = None, - filter: Optional[str] = None, - expand: Optional[str] = None, - **kwargs: Any - ) -> ItemPaged["_models.FileShareItem"]: - """Lists all shares. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param maxpagesize: Optional. Specified maximum number of shares that can be included in the - list. Default value is None. - :type maxpagesize: str - :param filter: Optional. When specified, only share names starting with the filter will be - listed. Default value is None. - :type filter: str - :param expand: Optional, used to expand the properties within share's properties. Valid values - are: deleted, snapshots. Should be passed as a string with delimiter ','. Default value is - None. - :type expand: str - :return: An iterator like instance of either FileShareItem or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.FileShareItem] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.FileShareItems] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - maxpagesize=maxpagesize, - filter=filter, - expand=expand, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("FileShareItems", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - @overload - def create( - self, - resource_group_name: str, - account_name: str, - share_name: str, - file_share: _models.FileShare, - expand: Optional[str] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.FileShare: - """Creates a new share under the specified account as described by request body. The share - resource includes metadata and properties for that share. It does not include a list of the - files contained by the share. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param file_share: Properties of the file share to create. Required. - :type file_share: ~azure.mgmt.storage.models.FileShare - :param expand: Optional, used to expand the properties within share's properties. Valid values - are: snapshots. Should be passed as a string with delimiter ','. Default value is None. - :type expand: str - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: FileShare or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileShare - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def create( - self, - resource_group_name: str, - account_name: str, - share_name: str, - file_share: IO[bytes], - expand: Optional[str] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.FileShare: - """Creates a new share under the specified account as described by request body. The share - resource includes metadata and properties for that share. It does not include a list of the - files contained by the share. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param file_share: Properties of the file share to create. Required. - :type file_share: IO[bytes] - :param expand: Optional, used to expand the properties within share's properties. Valid values - are: snapshots. Should be passed as a string with delimiter ','. Default value is None. - :type expand: str - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: FileShare or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileShare - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def create( - self, - resource_group_name: str, - account_name: str, - share_name: str, - file_share: Union[_models.FileShare, IO[bytes]], - expand: Optional[str] = None, - **kwargs: Any - ) -> _models.FileShare: - """Creates a new share under the specified account as described by request body. The share - resource includes metadata and properties for that share. It does not include a list of the - files contained by the share. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param file_share: Properties of the file share to create. Is either a FileShare type or a - IO[bytes] type. Required. - :type file_share: ~azure.mgmt.storage.models.FileShare or IO[bytes] - :param expand: Optional, used to expand the properties within share's properties. Valid values - are: snapshots. Should be passed as a string with delimiter ','. Default value is None. - :type expand: str - :return: FileShare or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileShare - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(file_share, (IOBase, bytes)): - _content = file_share - else: - _json = self._serialize.body(file_share, "FileShare") - - _request = build_create_request( - resource_group_name=resource_group_name, - account_name=account_name, - share_name=share_name, - subscription_id=self._config.subscription_id, - expand=expand, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("FileShare", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def update( - self, - resource_group_name: str, - account_name: str, - share_name: str, - file_share: _models.FileShare, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.FileShare: - """Updates share properties as specified in request body. Properties not mentioned in the request - will not be changed. Update fails if the specified share does not already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param file_share: Properties to update for the file share. Required. - :type file_share: ~azure.mgmt.storage.models.FileShare - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: FileShare or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileShare - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def update( - self, - resource_group_name: str, - account_name: str, - share_name: str, - file_share: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.FileShare: - """Updates share properties as specified in request body. Properties not mentioned in the request - will not be changed. Update fails if the specified share does not already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param file_share: Properties to update for the file share. Required. - :type file_share: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: FileShare or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileShare - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def update( - self, - resource_group_name: str, - account_name: str, - share_name: str, - file_share: Union[_models.FileShare, IO[bytes]], - **kwargs: Any - ) -> _models.FileShare: - """Updates share properties as specified in request body. Properties not mentioned in the request - will not be changed. Update fails if the specified share does not already exist. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param file_share: Properties to update for the file share. Is either a FileShare type or a - IO[bytes] type. Required. - :type file_share: ~azure.mgmt.storage.models.FileShare or IO[bytes] - :return: FileShare or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileShare - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(file_share, (IOBase, bytes)): - _content = file_share - else: - _json = self._serialize.body(file_share, "FileShare") - - _request = build_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - share_name=share_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("FileShare", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get( - self, - resource_group_name: str, - account_name: str, - share_name: str, - expand: Optional[str] = None, - x_ms_snapshot: Optional[str] = None, - **kwargs: Any - ) -> _models.FileShare: - """Gets properties of a specified share. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param expand: Optional, used to expand the properties within share's properties. Valid values - are: stats. Should be passed as a string with delimiter ','. Default value is None. - :type expand: str - :param x_ms_snapshot: Optional, used to retrieve properties of a snapshot. Default value is - None. - :type x_ms_snapshot: str - :return: FileShare or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.FileShare - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - share_name=share_name, - subscription_id=self._config.subscription_id, - expand=expand, - x_ms_snapshot=x_ms_snapshot, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("FileShare", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - account_name: str, - share_name: str, - x_ms_snapshot: Optional[str] = None, - include: Optional[str] = None, - **kwargs: Any - ) -> None: - """Deletes specified share under its account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param x_ms_snapshot: Optional, used to delete a snapshot. Default value is None. - :type x_ms_snapshot: str - :param include: Optional. Valid values are: snapshots, leased-snapshots, none. The default - value is snapshots. For 'snapshots', the file share is deleted including all of its file share - snapshots. If the file share contains leased-snapshots, the deletion fails. For - 'leased-snapshots', the file share is deleted included all of its file share snapshots - (leased/unleased). For 'none', the file share is deleted if it has no share snapshots. If the - file share contains any snapshots (leased or unleased), the deletion fails. Default value is - None. - :type include: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - share_name=share_name, - subscription_id=self._config.subscription_id, - x_ms_snapshot=x_ms_snapshot, - include=include, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - @overload - def restore( - self, - resource_group_name: str, - account_name: str, - share_name: str, - deleted_share: _models.DeletedShare, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> None: - """Restore a file share within a valid retention days if share soft delete is enabled. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param deleted_share: Required. - :type deleted_share: ~azure.mgmt.storage.models.DeletedShare - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def restore( - self, - resource_group_name: str, - account_name: str, - share_name: str, - deleted_share: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> None: - """Restore a file share within a valid retention days if share soft delete is enabled. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param deleted_share: Required. - :type deleted_share: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def restore( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - account_name: str, - share_name: str, - deleted_share: Union[_models.DeletedShare, IO[bytes]], - **kwargs: Any - ) -> None: - """Restore a file share within a valid retention days if share soft delete is enabled. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param deleted_share: Is either a DeletedShare type or a IO[bytes] type. Required. - :type deleted_share: ~azure.mgmt.storage.models.DeletedShare or IO[bytes] - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(deleted_share, (IOBase, bytes)): - _content = deleted_share - else: - _json = self._serialize.body(deleted_share, "DeletedShare") - - _request = build_restore_request( - resource_group_name=resource_group_name, - account_name=account_name, - share_name=share_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - @overload - def lease( - self, - resource_group_name: str, - account_name: str, - share_name: str, - x_ms_snapshot: Optional[str] = None, - parameters: Optional[_models.LeaseShareRequest] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LeaseShareResponse: - """The Lease Share operation establishes and manages a lock on a share for delete operations. The - lock duration can be 15 to 60 seconds, or can be infinite. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param x_ms_snapshot: Optional. Specify the snapshot time to lease a snapshot. Default value is - None. - :type x_ms_snapshot: str - :param parameters: Lease Share request body. Default value is None. - :type parameters: ~azure.mgmt.storage.models.LeaseShareRequest - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: LeaseShareResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LeaseShareResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def lease( - self, - resource_group_name: str, - account_name: str, - share_name: str, - x_ms_snapshot: Optional[str] = None, - parameters: Optional[IO[bytes]] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LeaseShareResponse: - """The Lease Share operation establishes and manages a lock on a share for delete operations. The - lock duration can be 15 to 60 seconds, or can be infinite. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param x_ms_snapshot: Optional. Specify the snapshot time to lease a snapshot. Default value is - None. - :type x_ms_snapshot: str - :param parameters: Lease Share request body. Default value is None. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: LeaseShareResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LeaseShareResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def lease( - self, - resource_group_name: str, - account_name: str, - share_name: str, - x_ms_snapshot: Optional[str] = None, - parameters: Optional[Union[_models.LeaseShareRequest, IO[bytes]]] = None, - **kwargs: Any - ) -> _models.LeaseShareResponse: - """The Lease Share operation establishes and manages a lock on a share for delete operations. The - lock duration can be 15 to 60 seconds, or can be infinite. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param share_name: The name of the file share within the specified storage account. File share - names must be between 3 and 63 characters in length and use numbers, lower-case letters and - dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter - or number. Required. - :type share_name: str - :param x_ms_snapshot: Optional. Specify the snapshot time to lease a snapshot. Default value is - None. - :type x_ms_snapshot: str - :param parameters: Lease Share request body. Is either a LeaseShareRequest type or a IO[bytes] - type. Default value is None. - :type parameters: ~azure.mgmt.storage.models.LeaseShareRequest or IO[bytes] - :return: LeaseShareResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LeaseShareResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - content_type = content_type if parameters else None - cls: ClsType[_models.LeaseShareResponse] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" if parameters else None - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - if parameters is not None: - _json = self._serialize.body(parameters, "LeaseShareRequest") - else: - _json = None - - _request = build_lease_request( - resource_group_name=resource_group_name, - account_name=account_name, - share_name=share_name, - subscription_id=self._config.subscription_id, - x_ms_snapshot=x_ms_snapshot, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("LeaseShareResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_local_users_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_local_users_operations.py deleted file mode 100644 index 82e3b1b55988..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_local_users_operations.py +++ /dev/null @@ -1,815 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_request( - resource_group_name: str, - account_name: str, - subscription_id: str, - *, - maxpagesize: Optional[int] = None, - filter: Optional[str] = None, - include: Optional[Union[str, _models.ListLocalUserIncludeParam]] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/localUsers", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if maxpagesize is not None: - _params["$maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int", maximum=5000, minimum=1) - if filter is not None: - _params["$filter"] = _SERIALIZER.query("filter", filter, "str") - if include is not None: - _params["$include"] = _SERIALIZER.query("include", include, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_request( - resource_group_name: str, account_name: str, username: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/localUsers/{username}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "username": _SERIALIZER.url("username", username, "str", max_length=64, min_length=3), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_create_or_update_request( - resource_group_name: str, account_name: str, username: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/localUsers/{username}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "username": _SERIALIZER.url("username", username, "str", max_length=64, min_length=3), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_delete_request( - resource_group_name: str, account_name: str, username: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/localUsers/{username}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "username": _SERIALIZER.url("username", username, "str", max_length=64, min_length=3), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_list_keys_request( - resource_group_name: str, account_name: str, username: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/localUsers/{username}/listKeys", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "username": _SERIALIZER.url("username", username, "str", max_length=64, min_length=3), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_regenerate_password_request( - resource_group_name: str, account_name: str, username: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/localUsers/{username}/regeneratePassword", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "username": _SERIALIZER.url("username", username, "str", max_length=64, min_length=3), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -class LocalUsersOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`local_users` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, - resource_group_name: str, - account_name: str, - maxpagesize: Optional[int] = None, - filter: Optional[str] = None, - include: Optional[Union[str, _models.ListLocalUserIncludeParam]] = None, - **kwargs: Any - ) -> ItemPaged["_models.LocalUser"]: - """List the local users associated with the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param maxpagesize: Optional, specifies the maximum number of local users that will be included - in the list response. Default value is None. - :type maxpagesize: int - :param filter: Optional. When specified, only local user names starting with the filter will be - listed. Default value is None. - :type filter: str - :param include: Optional, when specified, will list local users enabled for the specific - protocol. Lists all users by default. "nfsv3" Default value is None. - :type include: str or ~azure.mgmt.storage.models.ListLocalUserIncludeParam - :return: An iterator like instance of either LocalUser or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.LocalUser] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.LocalUsers] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - maxpagesize=maxpagesize, - filter=filter, - include=include, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("LocalUsers", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - @distributed_trace - def get(self, resource_group_name: str, account_name: str, username: str, **kwargs: Any) -> _models.LocalUser: - """Get the local user of the storage account by username. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param username: The name of local user. The username must contain lowercase letters and - numbers only. It must be unique only within the storage account. Required. - :type username: str - :return: LocalUser or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LocalUser - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.LocalUser] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - username=username, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("LocalUser", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def create_or_update( - self, - resource_group_name: str, - account_name: str, - username: str, - properties: _models.LocalUser, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LocalUser: - """Create or update the properties of a local user associated with the storage account. Properties - for NFSv3 enablement and extended groups cannot be set with other properties. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param username: The name of local user. The username must contain lowercase letters and - numbers only. It must be unique only within the storage account. Required. - :type username: str - :param properties: The local user associated with a storage account. Required. - :type properties: ~azure.mgmt.storage.models.LocalUser - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: LocalUser or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LocalUser - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def create_or_update( - self, - resource_group_name: str, - account_name: str, - username: str, - properties: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.LocalUser: - """Create or update the properties of a local user associated with the storage account. Properties - for NFSv3 enablement and extended groups cannot be set with other properties. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param username: The name of local user. The username must contain lowercase letters and - numbers only. It must be unique only within the storage account. Required. - :type username: str - :param properties: The local user associated with a storage account. Required. - :type properties: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: LocalUser or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LocalUser - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def create_or_update( - self, - resource_group_name: str, - account_name: str, - username: str, - properties: Union[_models.LocalUser, IO[bytes]], - **kwargs: Any - ) -> _models.LocalUser: - """Create or update the properties of a local user associated with the storage account. Properties - for NFSv3 enablement and extended groups cannot be set with other properties. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param username: The name of local user. The username must contain lowercase letters and - numbers only. It must be unique only within the storage account. Required. - :type username: str - :param properties: The local user associated with a storage account. Is either a LocalUser type - or a IO[bytes] type. Required. - :type properties: ~azure.mgmt.storage.models.LocalUser or IO[bytes] - :return: LocalUser or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LocalUser - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.LocalUser] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(properties, (IOBase, bytes)): - _content = properties - else: - _json = self._serialize.body(properties, "LocalUser") - - _request = build_create_or_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - username=username, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("LocalUser", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def delete( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, account_name: str, username: str, **kwargs: Any - ) -> None: - """Deletes the local user associated with the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param username: The name of local user. The username must contain lowercase letters and - numbers only. It must be unique only within the storage account. Required. - :type username: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - username=username, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - @distributed_trace - def list_keys( - self, resource_group_name: str, account_name: str, username: str, **kwargs: Any - ) -> _models.LocalUserKeys: - """List SSH authorized keys and shared key of the local user. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param username: The name of local user. The username must contain lowercase letters and - numbers only. It must be unique only within the storage account. Required. - :type username: str - :return: LocalUserKeys or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LocalUserKeys - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.LocalUserKeys] = kwargs.pop("cls", None) - - _request = build_list_keys_request( - resource_group_name=resource_group_name, - account_name=account_name, - username=username, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("LocalUserKeys", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def regenerate_password( - self, resource_group_name: str, account_name: str, username: str, **kwargs: Any - ) -> _models.LocalUserRegeneratePasswordResult: - """Regenerate the local user SSH password. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param username: The name of local user. The username must contain lowercase letters and - numbers only. It must be unique only within the storage account. Required. - :type username: str - :return: LocalUserRegeneratePasswordResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.LocalUserRegeneratePasswordResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.LocalUserRegeneratePasswordResult] = kwargs.pop("cls", None) - - _request = build_regenerate_password_request( - resource_group_name=resource_group_name, - account_name=account_name, - username=username, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("LocalUserRegeneratePasswordResult", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_management_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_management_policies_operations.py deleted file mode 100644 index 346bcd003df6..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_management_policies_operations.py +++ /dev/null @@ -1,453 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_get_request( - resource_group_name: str, - account_name: str, - management_policy_name: Union[str, _models.ManagementPolicyName], - subscription_id: str, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/managementPolicies/{managementPolicyName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "managementPolicyName": _SERIALIZER.url("management_policy_name", management_policy_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_create_or_update_request( - resource_group_name: str, - account_name: str, - management_policy_name: Union[str, _models.ManagementPolicyName], - subscription_id: str, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/managementPolicies/{managementPolicyName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "managementPolicyName": _SERIALIZER.url("management_policy_name", management_policy_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_delete_request( - resource_group_name: str, - account_name: str, - management_policy_name: Union[str, _models.ManagementPolicyName], - subscription_id: str, - **kwargs: Any -) -> HttpRequest: - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/managementPolicies/{managementPolicyName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "managementPolicyName": _SERIALIZER.url("management_policy_name", management_policy_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) - - -class ManagementPoliciesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`management_policies` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def get( - self, - resource_group_name: str, - account_name: str, - management_policy_name: Union[str, _models.ManagementPolicyName], - **kwargs: Any - ) -> _models.ManagementPolicy: - """Gets the managementpolicy associated with the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param management_policy_name: The name of the Storage Account Management Policy. It should - always be 'default'. "default" Required. - :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName - :return: ManagementPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ManagementPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ManagementPolicy] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - management_policy_name=management_policy_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ManagementPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def create_or_update( - self, - resource_group_name: str, - account_name: str, - management_policy_name: Union[str, _models.ManagementPolicyName], - properties: _models.ManagementPolicy, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ManagementPolicy: - """Sets the managementpolicy to the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param management_policy_name: The name of the Storage Account Management Policy. It should - always be 'default'. "default" Required. - :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName - :param properties: The ManagementPolicy set to a storage account. Required. - :type properties: ~azure.mgmt.storage.models.ManagementPolicy - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: ManagementPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ManagementPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def create_or_update( - self, - resource_group_name: str, - account_name: str, - management_policy_name: Union[str, _models.ManagementPolicyName], - properties: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ManagementPolicy: - """Sets the managementpolicy to the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param management_policy_name: The name of the Storage Account Management Policy. It should - always be 'default'. "default" Required. - :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName - :param properties: The ManagementPolicy set to a storage account. Required. - :type properties: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: ManagementPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ManagementPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def create_or_update( - self, - resource_group_name: str, - account_name: str, - management_policy_name: Union[str, _models.ManagementPolicyName], - properties: Union[_models.ManagementPolicy, IO[bytes]], - **kwargs: Any - ) -> _models.ManagementPolicy: - """Sets the managementpolicy to the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param management_policy_name: The name of the Storage Account Management Policy. It should - always be 'default'. "default" Required. - :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName - :param properties: The ManagementPolicy set to a storage account. Is either a ManagementPolicy - type or a IO[bytes] type. Required. - :type properties: ~azure.mgmt.storage.models.ManagementPolicy or IO[bytes] - :return: ManagementPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ManagementPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.ManagementPolicy] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(properties, (IOBase, bytes)): - _content = properties - else: - _json = self._serialize.body(properties, "ManagementPolicy") - - _request = build_create_or_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - management_policy_name=management_policy_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ManagementPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - account_name: str, - management_policy_name: Union[str, _models.ManagementPolicyName], - **kwargs: Any - ) -> None: - """Deletes the managementpolicy associated with the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param management_policy_name: The name of the Storage Account Management Policy. It should - always be 'default'. "default" Required. - :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - management_policy_name=management_policy_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_network_security_perimeter_configurations_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_network_security_perimeter_configurations_operations.py deleted file mode 100644 index e1dca78b2548..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_network_security_perimeter_configurations_operations.py +++ /dev/null @@ -1,476 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, Callable, Iterator, Optional, TypeVar, Union, cast -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - StreamClosedError, - StreamConsumedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_request(resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/networkSecurityPerimeterConfigurations", - ) - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_request( - resource_group_name: str, - account_name: str, - network_security_perimeter_configuration_name: str, - subscription_id: str, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/networkSecurityPerimeterConfigurations/{networkSecurityPerimeterConfigurationName}", - ) - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "networkSecurityPerimeterConfigurationName": _SERIALIZER.url( - "network_security_perimeter_configuration_name", - network_security_perimeter_configuration_name, - "str", - pattern=r"^.*$", - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_reconcile_request( - resource_group_name: str, - account_name: str, - network_security_perimeter_configuration_name: str, - subscription_id: str, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/networkSecurityPerimeterConfigurations/{networkSecurityPerimeterConfigurationName}/reconcile", - ) - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "networkSecurityPerimeterConfigurationName": _SERIALIZER.url( - "network_security_perimeter_configuration_name", - network_security_perimeter_configuration_name, - "str", - pattern=r"^.*$", - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -class NetworkSecurityPerimeterConfigurationsOperations: # pylint: disable=name-too-long - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`network_security_perimeter_configurations` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> ItemPaged["_models.NetworkSecurityPerimeterConfiguration"]: - """Gets list of effective NetworkSecurityPerimeterConfiguration for storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: An iterator like instance of either NetworkSecurityPerimeterConfiguration or the - result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.NetworkSecurityPerimeterConfiguration] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.NetworkSecurityPerimeterConfigurationList] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("NetworkSecurityPerimeterConfigurationList", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - @distributed_trace - def get( - self, - resource_group_name: str, - account_name: str, - network_security_perimeter_configuration_name: str, - **kwargs: Any - ) -> _models.NetworkSecurityPerimeterConfiguration: - """Gets effective NetworkSecurityPerimeterConfiguration for association. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param network_security_perimeter_configuration_name: The name for Network Security Perimeter - configuration. Required. - :type network_security_perimeter_configuration_name: str - :return: NetworkSecurityPerimeterConfiguration or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.NetworkSecurityPerimeterConfiguration - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.NetworkSecurityPerimeterConfiguration] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("NetworkSecurityPerimeterConfiguration", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - def _reconcile_initial( - self, - resource_group_name: str, - account_name: str, - network_security_perimeter_configuration_name: str, - **kwargs: Any - ) -> Iterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - - _request = build_reconcile_request( - resource_group_name=resource_group_name, - account_name=account_name, - network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def begin_reconcile( - self, - resource_group_name: str, - account_name: str, - network_security_perimeter_configuration_name: str, - **kwargs: Any - ) -> LROPoller[None]: - """Refreshes any information about the association. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param network_security_perimeter_configuration_name: The name for Network Security Perimeter - configuration. Required. - :type network_security_perimeter_configuration_name: str - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._reconcile_initial( - resource_group_name=resource_group_name, - account_name=account_name, - network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, - api_version=api_version, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller[None].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_object_replication_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_object_replication_policies_operations.py deleted file mode 100644 index 32037bd20853..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_object_replication_policies_operations.py +++ /dev/null @@ -1,595 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_request(resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_request( - resource_group_name: str, account_name: str, object_replication_policy_id: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "objectReplicationPolicyId": _SERIALIZER.url( - "object_replication_policy_id", object_replication_policy_id, "str", min_length=1 - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_create_or_update_request( - resource_group_name: str, account_name: str, object_replication_policy_id: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "objectReplicationPolicyId": _SERIALIZER.url( - "object_replication_policy_id", object_replication_policy_id, "str", min_length=1 - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_delete_request( - resource_group_name: str, account_name: str, object_replication_policy_id: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "objectReplicationPolicyId": _SERIALIZER.url( - "object_replication_policy_id", object_replication_policy_id, "str", min_length=1 - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - - -class ObjectReplicationPoliciesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`object_replication_policies` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> ItemPaged["_models.ObjectReplicationPolicy"]: - """List the object replication policies associated with the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: An iterator like instance of either ObjectReplicationPolicy or the result of - cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.ObjectReplicationPolicy] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ObjectReplicationPolicies] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("ObjectReplicationPolicies", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - @distributed_trace - def get( - self, resource_group_name: str, account_name: str, object_replication_policy_id: str, **kwargs: Any - ) -> _models.ObjectReplicationPolicy: - """Get the object replication policy of the storage account by policy ID. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param object_replication_policy_id: For the destination account, provide the value 'default'. - Configure the policy on the destination account first. For the source account, provide the - value of the policy ID that is returned when you download the policy that was defined on the - destination account. The policy is downloaded as a JSON file. Required. - :type object_replication_policy_id: str - :return: ObjectReplicationPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ObjectReplicationPolicy] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - object_replication_policy_id=object_replication_policy_id, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ObjectReplicationPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def create_or_update( - self, - resource_group_name: str, - account_name: str, - object_replication_policy_id: str, - properties: _models.ObjectReplicationPolicy, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ObjectReplicationPolicy: - """Create or update the object replication policy of the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param object_replication_policy_id: For the destination account, provide the value 'default'. - Configure the policy on the destination account first. For the source account, provide the - value of the policy ID that is returned when you download the policy that was defined on the - destination account. The policy is downloaded as a JSON file. Required. - :type object_replication_policy_id: str - :param properties: The object replication policy set to a storage account. A unique policy ID - will be created if absent. Required. - :type properties: ~azure.mgmt.storage.models.ObjectReplicationPolicy - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: ObjectReplicationPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def create_or_update( - self, - resource_group_name: str, - account_name: str, - object_replication_policy_id: str, - properties: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ObjectReplicationPolicy: - """Create or update the object replication policy of the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param object_replication_policy_id: For the destination account, provide the value 'default'. - Configure the policy on the destination account first. For the source account, provide the - value of the policy ID that is returned when you download the policy that was defined on the - destination account. The policy is downloaded as a JSON file. Required. - :type object_replication_policy_id: str - :param properties: The object replication policy set to a storage account. A unique policy ID - will be created if absent. Required. - :type properties: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: ObjectReplicationPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def create_or_update( - self, - resource_group_name: str, - account_name: str, - object_replication_policy_id: str, - properties: Union[_models.ObjectReplicationPolicy, IO[bytes]], - **kwargs: Any - ) -> _models.ObjectReplicationPolicy: - """Create or update the object replication policy of the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param object_replication_policy_id: For the destination account, provide the value 'default'. - Configure the policy on the destination account first. For the source account, provide the - value of the policy ID that is returned when you download the policy that was defined on the - destination account. The policy is downloaded as a JSON file. Required. - :type object_replication_policy_id: str - :param properties: The object replication policy set to a storage account. A unique policy ID - will be created if absent. Is either a ObjectReplicationPolicy type or a IO[bytes] type. - Required. - :type properties: ~azure.mgmt.storage.models.ObjectReplicationPolicy or IO[bytes] - :return: ObjectReplicationPolicy or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.ObjectReplicationPolicy] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(properties, (IOBase, bytes)): - _content = properties - else: - _json = self._serialize.body(properties, "ObjectReplicationPolicy") - - _request = build_create_or_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - object_replication_policy_id=object_replication_policy_id, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ObjectReplicationPolicy", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def delete( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, account_name: str, object_replication_policy_id: str, **kwargs: Any - ) -> None: - """Deletes the object replication policy associated with the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param object_replication_policy_id: For the destination account, provide the value 'default'. - Configure the policy on the destination account first. For the source account, provide the - value of the policy ID that is returned when you download the policy that was defined on the - destination account. The policy is downloaded as a JSON file. Required. - :type object_replication_policy_id: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - object_replication_policy_id=object_replication_policy_id, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_operations.py index 41d5808b06c3..47cfc6fd6e66 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_operations.py @@ -1,51 +1,3625 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from collections.abc import MutableMapping -from typing import Any, Callable, Optional, TypeVar +from io import IOBase +import json +from typing import Any, Callable, IO, Iterator, Literal, Optional, TypeVar, Union, cast, overload import urllib.parse -from azure.core import PipelineClient +from azure.core import MatchConditions, PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, ResourceExistsError, + ResourceModifiedError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models from .._configuration import StorageManagementClientConfiguration +from .._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize from .._utils.serialization import Deserializer, Serializer +from .._utils.utils import prep_if_match, prep_if_none_match +from .._validation import api_version_validation T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +JSON = MutableMapping[str, Any] List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -def build_list_request(**kwargs: Any) -> HttpRequest: +def build_operations_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/providers/Microsoft.Storage/operations") + _url = "/providers/Microsoft.Storage/operations" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_containers_get_request( + resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_containers_create_request( + resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_containers_update_request( + resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_containers_delete_request( + resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_blob_containers_set_legal_hold_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/setLegalHold" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_containers_clear_legal_hold_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/clearLegalHold" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_containers_lease_request( + resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/lease" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_containers_object_level_worm_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/migrate" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="POST", url=_url, params=_params, **kwargs) + + +def build_blob_containers_list_request( + resource_group_name: str, + account_name: str, + subscription_id: str, + *, + maxpagesize: Optional[str] = None, + filter: Optional[str] = None, + include: Optional[Union[str, _models.ListContainersInclude]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if maxpagesize is not None: + _params["$maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "str") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if include is not None: + _params["$include"] = _SERIALIZER.query("include", include, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_containers_get_immutability_policy_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + container_name: str, + subscription_id: str, + *, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_containers_create_or_update_immutability_policy_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + container_name: str, + subscription_id: str, + *, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_containers_delete_immutability_policy_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + container_name: str, + subscription_id: str, + *, + etag: str, + match_condition: MatchConditions, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_containers_lock_immutability_policy_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + container_name: str, + subscription_id: str, + *, + etag: str, + match_condition: MatchConditions, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/lock" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_containers_extend_immutability_policy_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + container_name: str, + subscription_id: str, + *, + etag: str, + match_condition: MatchConditions, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/extend" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "containerName": _SERIALIZER.url("container_name", container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_services_get_service_properties_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_services_set_service_properties_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_services_list_request( + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_accounts_check_name_availability_request( # pylint: disable=name-too-long + subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.Storage/checkNameAvailability" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_accounts_get_properties_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + subscription_id: str, + *, + expand: Optional[Union[str, _models.StorageAccountExpand]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_accounts_create_request( + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_accounts_update_request( + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_accounts_delete_request( + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_storage_accounts_list_by_resource_group_request( # pylint: disable=name-too-long + resource_group_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = ( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts" + ) + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_accounts_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.Storage/storageAccounts" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_accounts_list_keys_request( + resource_group_name: str, + account_name: str, + subscription_id: str, + *, + expand: Literal["kerb"] = "kerb", + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/listKeys" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_accounts_regenerate_key_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/regenerateKey" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_accounts_list_account_sas_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/listAccountSas" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_accounts_list_service_sas_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/listServiceSas" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_accounts_failover_request( + resource_group_name: str, + account_name: str, + subscription_id: str, + *, + failover_type: Literal["Planned"] = "Planned", + **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/failover" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if failover_type is not None: + _params["failoverType"] = _SERIALIZER.query("failover_type", failover_type, "str") + + return HttpRequest(method="POST", url=_url, params=_params, **kwargs) + + +def build_storage_accounts_hierarchical_namespace_migration_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, *, request_type: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/hnsonmigration" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + _params["requestType"] = _SERIALIZER.query("request_type", request_type, "str") + + return HttpRequest(method="POST", url=_url, params=_params, **kwargs) + + +def build_storage_accounts_abort_hierarchical_namespace_migration_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/aborthnsonmigration" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="POST", url=_url, params=_params, **kwargs) + + +def build_storage_accounts_customer_initiated_migration_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/startAccountMigration" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_accounts_restore_blob_ranges_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/restoreBlobRanges" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_accounts_revoke_user_delegation_keys_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/revokeUserDelegationKeys" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="POST", url=_url, params=_params, **kwargs) + + +def build_storage_accounts_get_customer_initiated_migration_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + migration_name: Union[str, _models.MigrationName], + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/accountMigrations/{migrationName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "migrationName": _SERIALIZER.url("migration_name", migration_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_shares_get_request( + resource_group_name: str, + account_name: str, + share_name: str, + subscription_id: str, + *, + expand: Optional[str] = None, + x_ms_snapshot: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "shareName": _SERIALIZER.url("share_name", share_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "str") + + # Construct headers + if x_ms_snapshot is not None: + _headers["x-ms-snapshot"] = _SERIALIZER.header("x_ms_snapshot", x_ms_snapshot, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_shares_create_request( + resource_group_name: str, + account_name: str, + share_name: str, + subscription_id: str, + *, + expand: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "shareName": _SERIALIZER.url("share_name", share_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_shares_update_request( + resource_group_name: str, account_name: str, share_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "shareName": _SERIALIZER.url("share_name", share_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_shares_delete_request( + resource_group_name: str, + account_name: str, + share_name: str, + subscription_id: str, + *, + x_ms_snapshot: Optional[str] = None, + include: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "shareName": _SERIALIZER.url("share_name", share_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if include is not None: + _params["$include"] = _SERIALIZER.query("include", include, "str") + + # Construct headers + if x_ms_snapshot is not None: + _headers["x-ms-snapshot"] = _SERIALIZER.header("x_ms_snapshot", x_ms_snapshot, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_shares_restore_request( + resource_group_name: str, account_name: str, share_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}/restore" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "shareName": _SERIALIZER.url("share_name", share_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_shares_lease_request( + resource_group_name: str, + account_name: str, + share_name: str, + subscription_id: str, + *, + x_ms_snapshot: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}/lease" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "shareName": _SERIALIZER.url("share_name", share_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if x_ms_snapshot is not None: + _headers["x-ms-snapshot"] = _SERIALIZER.header("x_ms_snapshot", x_ms_snapshot, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_shares_list_request( + resource_group_name: str, + account_name: str, + subscription_id: str, + *, + maxpagesize: Optional[str] = None, + filter: Optional[str] = None, + expand: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if maxpagesize is not None: + _params["$maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "str") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_services_get_service_properties_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_services_set_service_properties_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_services_list_request( + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_services_get_service_usage_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/usages/default" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_services_list_service_usages_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + subscription_id: str, + *, + maxpagesize: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/usages" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if maxpagesize is not None: + _params["$maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_queue_services_get_service_properties_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_queue_services_set_service_properties_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_queue_services_list_request( + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_deleted_accounts_get_request( + deleted_account_name: str, location: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.Storage/locations/{location}/deletedAccounts/{deletedAccountName}" + path_format_arguments = { + "deletedAccountName": _SERIALIZER.url("deleted_account_name", deleted_account_name, "str"), + "location": _SERIALIZER.url("location", location, "str"), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_deleted_accounts_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.Storage/deletedAccounts" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_management_policies_get_request( + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, _models.ManagementPolicyName], + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/managementPolicies/{managementPolicyName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "managementPolicyName": _SERIALIZER.url("management_policy_name", management_policy_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_management_policies_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, _models.ManagementPolicyName], + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/managementPolicies/{managementPolicyName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "managementPolicyName": _SERIALIZER.url("management_policy_name", management_policy_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_management_policies_delete_request( + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, _models.ManagementPolicyName], + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/managementPolicies/{managementPolicyName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "managementPolicyName": _SERIALIZER.url("management_policy_name", management_policy_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_blob_inventory_policies_get_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/inventoryPolicies/{blobInventoryPolicyName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "blobInventoryPolicyName": _SERIALIZER.url("blob_inventory_policy_name", blob_inventory_policy_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_inventory_policies_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/inventoryPolicies/{blobInventoryPolicyName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "blobInventoryPolicyName": _SERIALIZER.url("blob_inventory_policy_name", blob_inventory_policy_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_blob_inventory_policies_delete_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/inventoryPolicies/{blobInventoryPolicyName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "blobInventoryPolicyName": _SERIALIZER.url("blob_inventory_policy_name", blob_inventory_policy_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_blob_inventory_policies_list_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/inventoryPolicies" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_private_endpoint_connections_get_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_private_endpoint_connections_put_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_private_endpoint_connections_delete_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_private_endpoint_connections_list_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_encryption_scopes_get_request( + resource_group_name: str, account_name: str, encryption_scope_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "encryptionScopeName": _SERIALIZER.url("encryption_scope_name", encryption_scope_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_encryption_scopes_put_request( + resource_group_name: str, account_name: str, encryption_scope_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "encryptionScopeName": _SERIALIZER.url("encryption_scope_name", encryption_scope_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_encryption_scopes_patch_request( + resource_group_name: str, account_name: str, encryption_scope_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "encryptionScopeName": _SERIALIZER.url("encryption_scope_name", encryption_scope_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_encryption_scopes_list_request( + resource_group_name: str, + account_name: str, + subscription_id: str, + *, + maxpagesize: Optional[int] = None, + filter: Optional[str] = None, + include: Optional[Union[str, _models.ListEncryptionScopesInclude]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if maxpagesize is not None: + _params["$maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if include is not None: + _params["$include"] = _SERIALIZER.query("include", include, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_table_services_get_service_properties_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_table_services_set_service_properties_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_table_services_list_request( + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_network_security_perimeter_configurations_get_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + network_security_perimeter_configuration_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/networkSecurityPerimeterConfigurations/{networkSecurityPerimeterConfigurationName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "networkSecurityPerimeterConfigurationName": _SERIALIZER.url( + "network_security_perimeter_configuration_name", network_security_perimeter_configuration_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_network_security_perimeter_configurations_list_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/networkSecurityPerimeterConfigurations" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_network_security_perimeter_configurations_reconcile_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + network_security_perimeter_configuration_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/networkSecurityPerimeterConfigurations/{networkSecurityPerimeterConfigurationName}/reconcile" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "networkSecurityPerimeterConfigurationName": _SERIALIZER.url( + "network_security_perimeter_configuration_name", network_security_perimeter_configuration_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="POST", url=_url, params=_params, **kwargs) + + +def build_storage_task_assignments_get_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, storage_task_assignment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/storageTaskAssignments/{storageTaskAssignmentName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "storageTaskAssignmentName": _SERIALIZER.url( + "storage_task_assignment_name", storage_task_assignment_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_task_assignments_create_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, storage_task_assignment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/storageTaskAssignments/{storageTaskAssignmentName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "storageTaskAssignmentName": _SERIALIZER.url( + "storage_task_assignment_name", storage_task_assignment_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_task_assignments_update_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, storage_task_assignment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/storageTaskAssignments/{storageTaskAssignmentName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "storageTaskAssignmentName": _SERIALIZER.url( + "storage_task_assignment_name", storage_task_assignment_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_task_assignments_delete_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, storage_task_assignment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/storageTaskAssignments/{storageTaskAssignmentName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "storageTaskAssignmentName": _SERIALIZER.url( + "storage_task_assignment_name", storage_task_assignment_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_storage_task_assignments_list_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, *, top: Optional[int] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/storageTaskAssignments" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if top is not None: + _params["$top"] = _SERIALIZER.query("top", top, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_task_assignments_stop_assignment_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, storage_task_assignment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/storageTaskAssignments/{storageTaskAssignmentName}/stopAssignment" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "storageTaskAssignmentName": _SERIALIZER.url( + "storage_task_assignment_name", storage_task_assignment_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="POST", url=_url, params=_params, **kwargs) + + +def build_connectors_get_request( + resource_group_name: str, account_name: str, connector_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/connectors/{connectorName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "connectorName": _SERIALIZER.url("connector_name", connector_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_connectors_create_request( + resource_group_name: str, account_name: str, connector_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/connectors/{connectorName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "connectorName": _SERIALIZER.url("connector_name", connector_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_connectors_update_request( + resource_group_name: str, account_name: str, connector_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/connectors/{connectorName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "connectorName": _SERIALIZER.url("connector_name", connector_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_connectors_delete_request( + resource_group_name: str, account_name: str, connector_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/connectors/{connectorName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "connectorName": _SERIALIZER.url("connector_name", connector_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_connectors_list_by_storage_account_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/connectors" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_connectors_test_existing_connection_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, connector_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/connectors/{connectorName}/testExistingConnection" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "connectorName": _SERIALIZER.url("connector_name", connector_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_data_shares_get_request( + resource_group_name: str, account_name: str, data_share_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/dataShares/{dataShareName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "dataShareName": _SERIALIZER.url("data_share_name", data_share_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_data_shares_create_request( + resource_group_name: str, account_name: str, data_share_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/dataShares/{dataShareName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "dataShareName": _SERIALIZER.url("data_share_name", data_share_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_data_shares_update_request( + resource_group_name: str, account_name: str, data_share_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/dataShares/{dataShareName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "dataShareName": _SERIALIZER.url("data_share_name", data_share_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_data_shares_delete_request( + resource_group_name: str, account_name: str, data_share_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/dataShares/{dataShareName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "dataShareName": _SERIALIZER.url("data_share_name", data_share_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_data_shares_list_by_storage_account_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/dataShares" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_private_link_resources_list_by_storage_account_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateLinkResources" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_task_assignments_instances_report_list_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + subscription_id: str, + *, + maxpagesize: Optional[int] = None, + filter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/reports" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if maxpagesize is not None: + _params["$maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_queue_list_request( + resource_group_name: str, + account_name: str, + subscription_id: str, + *, + maxpagesize: Optional[str] = None, + filter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if maxpagesize is not None: + _params["$maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "str") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_queue_get_request( + resource_group_name: str, account_name: str, queue_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues/{queueName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "queueName": _SERIALIZER.url("queue_name", queue_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_queue_create_request( + resource_group_name: str, account_name: str, queue_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues/{queueName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "queueName": _SERIALIZER.url("queue_name", queue_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_queue_update_request( + resource_group_name: str, account_name: str, queue_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues/{queueName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "queueName": _SERIALIZER.url("queue_name", queue_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_queue_delete_request( + resource_group_name: str, account_name: str, queue_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues/{queueName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "queueName": _SERIALIZER.url("queue_name", queue_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_object_replication_policies_get_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, object_replication_policy_id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "objectReplicationPolicyId": _SERIALIZER.url( + "object_replication_policy_id", object_replication_policy_id, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_object_replication_policies_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, object_replication_policy_id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "objectReplicationPolicyId": _SERIALIZER.url( + "object_replication_policy_id", object_replication_policy_id, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_object_replication_policies_delete_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, object_replication_policy_id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "objectReplicationPolicyId": _SERIALIZER.url( + "object_replication_policy_id", object_replication_policy_id, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_object_replication_policies_list_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_local_users_get_request( + resource_group_name: str, account_name: str, username: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/localUsers/{username}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "username": _SERIALIZER.url("username", username, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_local_users_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, username: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/localUsers/{username}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "username": _SERIALIZER.url("username", username, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_local_users_delete_request( + resource_group_name: str, account_name: str, username: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/localUsers/{username}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "username": _SERIALIZER.url("username", username, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_local_users_list_request( + resource_group_name: str, + account_name: str, + subscription_id: str, + *, + maxpagesize: Optional[int] = None, + filter: Optional[str] = None, + include: Optional[Union[str, _models.ListLocalUserIncludeParam]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/localUsers" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if maxpagesize is not None: + _params["$maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if include is not None: + _params["$include"] = _SERIALIZER.query("include", include, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_local_users_list_keys_request( + resource_group_name: str, account_name: str, username: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/localUsers/{username}/listKeys" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "username": _SERIALIZER.url("username", username, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_local_users_regenerate_password_request( # pylint: disable=name-too-long + resource_group_name: str, account_name: str, username: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/localUsers/{username}/regeneratePassword" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "username": _SERIALIZER.url("username", username, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_table_get_request( + resource_group_name: str, account_name: str, table_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables/{tableName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "tableName": _SERIALIZER.url("table_name", table_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_table_create_request( + resource_group_name: str, account_name: str, table_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables/{tableName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "tableName": _SERIALIZER.url("table_name", table_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_table_update_request( + resource_group_name: str, account_name: str, table_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables/{tableName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "tableName": _SERIALIZER.url("table_name", table_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_table_delete_request( + resource_group_name: str, account_name: str, table_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables/{tableName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "tableName": _SERIALIZER.url("table_name", table_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_table_list_request( + resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_task_assignment_instances_report_list_request( # pylint: disable=name-too-long + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + subscription_id: str, + *, + maxpagesize: Optional[int] = None, + filter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/storageTaskAssignments/{storageTaskAssignmentName}/reports" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "accountName": _SERIALIZER.url("account_name", account_name, "str"), + "storageTaskAssignmentName": _SERIALIZER.url( + "storage_task_assignment_name", storage_task_assignment_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if maxpagesize is not None: + _params["$maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_skus_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.Storage/skus" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_usages_list_by_location_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-08-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.Storage/locations/{location}/usages" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "location": _SERIALIZER.url("location", location, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -63,10 +3637,16285 @@ class Operations: Instead, you should access the following operations through :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`operations` attribute. + :attr:`operations` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, **kwargs: Any) -> ItemPaged["_models.Operation"]: + """Lists all of the available Storage Rest API operations. + + :return: An iterator like instance of Operation + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.Operation] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Operation]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_operations_list_request( + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Operation], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class BlobContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`blob_containers` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any + ) -> _models.BlobContainer: + """Gets properties of a specified container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BlobContainer] = kwargs.pop("cls", None) + + _request = build_blob_containers_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BlobContainer, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def create( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: _models.BlobContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobContainer: + """Creates a new container under the specified account as described by request body. The container + resource includes metadata and properties for that container. It does not include a list of the + blobs contained by the container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param blob_container: Properties of the blob container to create. Required. + :type blob_container: ~azure.mgmt.storage.models.BlobContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobContainer: + """Creates a new container under the specified account as described by request body. The container + resource includes metadata and properties for that container. It does not include a list of the + blobs contained by the container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param blob_container: Properties of the blob container to create. Required. + :type blob_container: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobContainer: + """Creates a new container under the specified account as described by request body. The container + resource includes metadata and properties for that container. It does not include a list of the + blobs contained by the container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param blob_container: Properties of the blob container to create. Required. + :type blob_container: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: Union[_models.BlobContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.BlobContainer: + """Creates a new container under the specified account as described by request body. The container + resource includes metadata and properties for that container. It does not include a list of the + blobs contained by the container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param blob_container: Properties of the blob container to create. Is one of the following + types: BlobContainer, JSON, IO[bytes] Required. + :type blob_container: ~azure.mgmt.storage.models.BlobContainer or JSON or IO[bytes] + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BlobContainer] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(blob_container, (IOBase, bytes)): + _content = blob_container + else: + _content = json.dumps(blob_container, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_blob_containers_create_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BlobContainer, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def update( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: _models.BlobContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobContainer: + """Updates container properties as specified in request body. Properties not mentioned in the + request will be unchanged. Update fails if the specified container doesn't already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param blob_container: Properties to update for the blob container. Required. + :type blob_container: ~azure.mgmt.storage.models.BlobContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobContainer: + """Updates container properties as specified in request body. Properties not mentioned in the + request will be unchanged. Update fails if the specified container doesn't already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param blob_container: Properties to update for the blob container. Required. + :type blob_container: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobContainer: + """Updates container properties as specified in request body. Properties not mentioned in the + request will be unchanged. Update fails if the specified container doesn't already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param blob_container: Properties to update for the blob container. Required. + :type blob_container: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: Union[_models.BlobContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.BlobContainer: + """Updates container properties as specified in request body. Properties not mentioned in the + request will be unchanged. Update fails if the specified container doesn't already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param blob_container: Properties to update for the blob container. Is one of the following + types: BlobContainer, JSON, IO[bytes] Required. + :type blob_container: ~azure.mgmt.storage.models.BlobContainer or JSON or IO[bytes] + :return: BlobContainer. The BlobContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BlobContainer] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(blob_container, (IOBase, bytes)): + _content = blob_container + else: + _content = json.dumps(blob_container, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_blob_containers_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BlobContainer, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any + ) -> None: + """Deletes specified container under its account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_containers_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @overload + def set_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: _models.LegalHold, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LegalHold: + """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold + follows an append pattern and does not clear out the existing tags that are not specified in + the request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param legal_hold: The LegalHold property that will be set to a blob container. Required. + :type legal_hold: ~azure.mgmt.storage.models.LegalHold + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LegalHold. The LegalHold is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LegalHold + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def set_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LegalHold: + """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold + follows an append pattern and does not clear out the existing tags that are not specified in + the request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param legal_hold: The LegalHold property that will be set to a blob container. Required. + :type legal_hold: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LegalHold. The LegalHold is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LegalHold + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def set_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LegalHold: + """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold + follows an append pattern and does not clear out the existing tags that are not specified in + the request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param legal_hold: The LegalHold property that will be set to a blob container. Required. + :type legal_hold: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: LegalHold. The LegalHold is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LegalHold + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def set_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: Union[_models.LegalHold, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.LegalHold: + """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold + follows an append pattern and does not clear out the existing tags that are not specified in + the request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param legal_hold: The LegalHold property that will be set to a blob container. Is one of the + following types: LegalHold, JSON, IO[bytes] Required. + :type legal_hold: ~azure.mgmt.storage.models.LegalHold or JSON or IO[bytes] + :return: LegalHold. The LegalHold is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LegalHold + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.LegalHold] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(legal_hold, (IOBase, bytes)): + _content = legal_hold + else: + _content = json.dumps(legal_hold, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_blob_containers_set_legal_hold_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.LegalHold, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def clear_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: _models.LegalHold, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LegalHold: + """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent + operation. ClearLegalHold clears out only the specified tags in the request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param legal_hold: The LegalHold property that will be clear from a blob container. Required. + :type legal_hold: ~azure.mgmt.storage.models.LegalHold + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LegalHold. The LegalHold is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LegalHold + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def clear_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LegalHold: + """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent + operation. ClearLegalHold clears out only the specified tags in the request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param legal_hold: The LegalHold property that will be clear from a blob container. Required. + :type legal_hold: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LegalHold. The LegalHold is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LegalHold + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def clear_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LegalHold: + """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent + operation. ClearLegalHold clears out only the specified tags in the request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param legal_hold: The LegalHold property that will be clear from a blob container. Required. + :type legal_hold: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: LegalHold. The LegalHold is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LegalHold + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def clear_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: Union[_models.LegalHold, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.LegalHold: + """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent + operation. ClearLegalHold clears out only the specified tags in the request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param legal_hold: The LegalHold property that will be clear from a blob container. Is one of + the following types: LegalHold, JSON, IO[bytes] Required. + :type legal_hold: ~azure.mgmt.storage.models.LegalHold or JSON or IO[bytes] + :return: LegalHold. The LegalHold is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LegalHold + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.LegalHold] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(legal_hold, (IOBase, bytes)): + _content = legal_hold + else: + _content = json.dumps(legal_hold, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_blob_containers_clear_legal_hold_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.LegalHold, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def lease( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[_models.LeaseContainerRequest] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LeaseContainerResponse: + """The Lease Container operation establishes and manages a lock on a container for delete + operations. The lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: ~azure.mgmt.storage.models.LeaseContainerRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LeaseContainerResponse. The LeaseContainerResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LeaseContainerResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def lease( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[JSON] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LeaseContainerResponse: + """The Lease Container operation establishes and manages a lock on a container for delete + operations. The lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LeaseContainerResponse. The LeaseContainerResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LeaseContainerResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def lease( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[IO[bytes]] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LeaseContainerResponse: + """The Lease Container operation establishes and manages a lock on a container for delete + operations. The lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: LeaseContainerResponse. The LeaseContainerResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LeaseContainerResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def lease( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[Union[_models.LeaseContainerRequest, JSON, IO[bytes]]] = None, + **kwargs: Any + ) -> _models.LeaseContainerResponse: + """The Lease Container operation establishes and manages a lock on a container for delete + operations. The lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The content of the action request. Is one of the following types: + LeaseContainerRequest, JSON, IO[bytes] Default value is None. + :type parameters: ~azure.mgmt.storage.models.LeaseContainerRequest or JSON or IO[bytes] + :return: LeaseContainerResponse. The LeaseContainerResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LeaseContainerResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if parameters else None + cls: ClsType[_models.LeaseContainerResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" if parameters else None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_blob_containers_lease_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.LeaseContainerResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _object_level_worm_initial( + self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_blob_containers_object_level_worm_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_object_level_worm( + self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any + ) -> LROPoller[None]: + """This operation migrates a blob container from container level WORM to object level immutability + enabled container. Prerequisites require a container level immutability policy either in locked + or unlocked state, Account level versioning must be enabled and there should be no Legal hold + on the container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._object_level_worm_initial( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + account_name: str, + *, + filter: Optional[str] = None, + include: Optional[Union[str, _models.ListContainersInclude]] = None, + **kwargs: Any + ) -> ItemPaged["_models.ListContainerItem"]: + """Lists all containers and does not support a prefix like data plane. Also SRP today does not + return continuation token. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword filter: Optional. When specified, only container names starting with the filter will + be listed. Default value is None. + :paramtype filter: str + :keyword include: Optional, used to include the properties for soft deleted blob containers. + "deleted" Default value is None. + :paramtype include: str or ~azure.mgmt.storage.models.ListContainersInclude + :return: An iterator like instance of ListContainerItem + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.ListContainerItem] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.ListContainerItem]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_blob_containers_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + maxpagesize=maxpagesize, + filter=filter, + include=include, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.ListContainerItem], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def get_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + *, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Gets the existing immutability policy along with the corresponding ETag in response headers and + body. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) + + _request = build_blob_containers_get_immutability_policy_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ImmutabilityPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def create_or_update_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[_models.ImmutabilityPolicy] = None, + *, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but + not required for this operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob + container. Default value is None. + :type parameters: ~azure.mgmt.storage.models.ImmutabilityPolicy + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[JSON] = None, + *, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but + not required for this operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob + container. Default value is None. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[IO[bytes]] = None, + *, + content_type: str = "application/json", + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but + not required for this operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob + container. Default value is None. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[Union[_models.ImmutabilityPolicy, JSON, IO[bytes]]] = None, + *, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but + not required for this operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob + container. Is one of the following types: ImmutabilityPolicy, JSON, IO[bytes] Default value is + None. + :type parameters: ~azure.mgmt.storage.models.ImmutabilityPolicy or JSON or IO[bytes] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if parameters else None + cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" if parameters else None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_blob_containers_create_or_update_immutability_policy_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ImmutabilityPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + *, + etag: str, + match_condition: MatchConditions, + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Aborts an unlocked immutability policy. The response of delete has + immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this + operation. Deleting a locked immutability policy is not allowed, the only way is to delete the + container after deleting all expired blobs inside the policy locked container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Required. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Required. + :paramtype match_condition: ~azure.core.MatchConditions + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) + + _request = build_blob_containers_delete_immutability_policy_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ImmutabilityPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def lock_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + *, + etag: str, + match_condition: MatchConditions, + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is + ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Required. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Required. + :paramtype match_condition: ~azure.core.MatchConditions + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) + + _request = build_blob_containers_lock_immutability_policy_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ImmutabilityPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def extend_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[_models.ImmutabilityPolicy] = None, + *, + etag: str, + match_condition: MatchConditions, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only + action allowed on a Locked policy will be this action. ETag in If-Match is required for this + operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: ~azure.mgmt.storage.models.ImmutabilityPolicy + :keyword etag: check if resource is changed. Set None to skip checking etag. Required. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Required. + :paramtype match_condition: ~azure.core.MatchConditions + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def extend_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[JSON] = None, + *, + etag: str, + match_condition: MatchConditions, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only + action allowed on a Locked policy will be this action. ETag in If-Match is required for this + operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: JSON + :keyword etag: check if resource is changed. Set None to skip checking etag. Required. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Required. + :paramtype match_condition: ~azure.core.MatchConditions + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def extend_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[IO[bytes]] = None, + *, + etag: str, + match_condition: MatchConditions, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only + action allowed on a Locked policy will be this action. ETag in If-Match is required for this + operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: IO[bytes] + :keyword etag: check if resource is changed. Set None to skip checking etag. Required. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Required. + :paramtype match_condition: ~azure.core.MatchConditions + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def extend_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional[Union[_models.ImmutabilityPolicy, JSON, IO[bytes]]] = None, + *, + etag: str, + match_condition: MatchConditions, + **kwargs: Any + ) -> _models.ImmutabilityPolicy: + """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only + action allowed on a Locked policy will be this action. ETag in If-Match is required for this + operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. Required. + :type container_name: str + :param parameters: The content of the action request. Is one of the following types: + ImmutabilityPolicy, JSON, IO[bytes] Default value is None. + :type parameters: ~azure.mgmt.storage.models.ImmutabilityPolicy or JSON or IO[bytes] + :keyword etag: check if resource is changed. Set None to skip checking etag. Required. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Required. + :paramtype match_condition: ~azure.core.MatchConditions + :return: ImmutabilityPolicy. The ImmutabilityPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ImmutabilityPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if parameters else None + cls: ClsType[_models.ImmutabilityPolicy] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" if parameters else None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_blob_containers_extend_immutability_policy_request( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + subscription_id=self._config.subscription_id, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ImmutabilityPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + +class BlobServicesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`blob_services` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get_service_properties( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> _models.BlobServiceProperties: + """Gets the properties of a storage account’s Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: BlobServiceProperties. The BlobServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BlobServiceProperties] = kwargs.pop("cls", None) + + _request = build_blob_services_get_service_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BlobServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: _models.BlobServiceProperties, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobServiceProperties: + """Sets the properties of a storage account’s Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Blob service, including properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. Required. + :type parameters: ~azure.mgmt.storage.models.BlobServiceProperties + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobServiceProperties. The BlobServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobServiceProperties: + """Sets the properties of a storage account’s Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Blob service, including properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobServiceProperties. The BlobServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobServiceProperties: + """Sets the properties of a storage account’s Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Blob service, including properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobServiceProperties. The BlobServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.BlobServiceProperties, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.BlobServiceProperties: + """Sets the properties of a storage account’s Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Blob service, including properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. Is one of the following + types: BlobServiceProperties, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.BlobServiceProperties or JSON or IO[bytes] + :return: BlobServiceProperties. The BlobServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BlobServiceProperties] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_blob_services_set_service_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BlobServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> ItemPaged["_models.BlobServiceProperties"]: + """List blob services of storage account. It returns a collection of one object named default. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of BlobServiceProperties + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.BlobServiceProperties] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.BlobServiceProperties]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_blob_services_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.BlobServiceProperties], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class StorageAccountsOperations: # pylint: disable=too-many-public-methods + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`storage_accounts` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @overload + def check_name_availability( + self, + account_name: _models.StorageAccountCheckNameAvailabilityParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CheckNameAvailabilityResult: + """Checks that the storage account name is valid and is not already in use. + + :param account_name: The request body. Required. + :type account_name: ~azure.mgmt.storage.models.StorageAccountCheckNameAvailabilityParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: CheckNameAvailabilityResult. The CheckNameAvailabilityResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.CheckNameAvailabilityResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def check_name_availability( + self, account_name: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.CheckNameAvailabilityResult: + """Checks that the storage account name is valid and is not already in use. + + :param account_name: The request body. Required. + :type account_name: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: CheckNameAvailabilityResult. The CheckNameAvailabilityResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.CheckNameAvailabilityResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def check_name_availability( + self, account_name: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.CheckNameAvailabilityResult: + """Checks that the storage account name is valid and is not already in use. + + :param account_name: The request body. Required. + :type account_name: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: CheckNameAvailabilityResult. The CheckNameAvailabilityResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.CheckNameAvailabilityResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def check_name_availability( + self, account_name: Union[_models.StorageAccountCheckNameAvailabilityParameters, JSON, IO[bytes]], **kwargs: Any + ) -> _models.CheckNameAvailabilityResult: + """Checks that the storage account name is valid and is not already in use. + + :param account_name: The request body. Is one of the following types: + StorageAccountCheckNameAvailabilityParameters, JSON, IO[bytes] Required. + :type account_name: ~azure.mgmt.storage.models.StorageAccountCheckNameAvailabilityParameters or + JSON or IO[bytes] + :return: CheckNameAvailabilityResult. The CheckNameAvailabilityResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.CheckNameAvailabilityResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.CheckNameAvailabilityResult] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(account_name, (IOBase, bytes)): + _content = account_name + else: + _content = json.dumps(account_name, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_accounts_check_name_availability_request( + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.CheckNameAvailabilityResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_properties( + self, + resource_group_name: str, + account_name: str, + *, + expand: Optional[Union[str, _models.StorageAccountExpand]] = None, + **kwargs: Any + ) -> _models.StorageAccount: + """Returns the properties for the specified storage account including but not limited to name, SKU + name, location, and account status. The ListKeys operation should be used to retrieve storage + keys. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword expand: May be used to expand the properties within account's properties. By default, + data is not included when fetching properties. Currently we only support geoReplicationStats + and blobRestoreStatus. Known values are: "geoReplicationStats" and "blobRestoreStatus". Default + value is None. + :paramtype expand: str or ~azure.mgmt.storage.models.StorageAccountExpand + :return: StorageAccount. The StorageAccount is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccount + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageAccount] = kwargs.pop("cls", None) + + _request = build_storage_accounts_get_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageAccount, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_initial( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.StorageAccountCreateParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_accounts_create_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create( + self, + resource_group_name: str, + account_name: str, + parameters: _models.StorageAccountCreateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageAccount]: + """Asynchronously creates a new storage account with the specified parameters. If an account is + already created and a subsequent create request is issued with different properties, the + account properties will be updated. If an account is already created and a subsequent create or + update request is issued with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for the created account. Required. + :type parameters: ~azure.mgmt.storage.models.StorageAccountCreateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageAccount. The StorageAccount is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageAccount] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageAccount]: + """Asynchronously creates a new storage account with the specified parameters. If an account is + already created and a subsequent create request is issued with different properties, the + account properties will be updated. If an account is already created and a subsequent create or + update request is issued with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for the created account. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageAccount. The StorageAccount is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageAccount] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageAccount]: + """Asynchronously creates a new storage account with the specified parameters. If an account is + already created and a subsequent create request is issued with different properties, the + account properties will be updated. If an account is already created and a subsequent create or + update request is issued with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for the created account. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageAccount. The StorageAccount is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageAccount] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.StorageAccountCreateParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.StorageAccount]: + """Asynchronously creates a new storage account with the specified parameters. If an account is + already created and a subsequent create request is issued with different properties, the + account properties will be updated. If an account is already created and a subsequent create or + update request is issued with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for the created account. Is one of the following + types: StorageAccountCreateParameters, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.StorageAccountCreateParameters or JSON or + IO[bytes] + :return: An instance of LROPoller that returns StorageAccount. The StorageAccount is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageAccount] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageAccount] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_initial( + resource_group_name=resource_group_name, + account_name=account_name, + parameters=parameters, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageAccount, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.StorageAccount].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.StorageAccount]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @overload + def update( + self, + resource_group_name: str, + account_name: str, + parameters: _models.StorageAccountUpdateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageAccount: + """The update operation can be used to update the SKU, encryption, access tier, or tags for a + storage account. It can also be used to map the account to a custom domain. Only one custom + domain is supported per storage account; the replacement/change of custom domain is not + supported. In order to replace an old custom domain, the old value must be cleared/unregistered + before a new value can be set. The update of multiple properties is supported. This call does + not change the storage keys for the account. If you want to change the storage account keys, + use the regenerate keys operation. The location and name of the storage account cannot be + changed after creation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for the updated account. Required. + :type parameters: ~azure.mgmt.storage.models.StorageAccountUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageAccount. The StorageAccount is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccount + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageAccount: + """The update operation can be used to update the SKU, encryption, access tier, or tags for a + storage account. It can also be used to map the account to a custom domain. Only one custom + domain is supported per storage account; the replacement/change of custom domain is not + supported. In order to replace an old custom domain, the old value must be cleared/unregistered + before a new value can be set. The update of multiple properties is supported. This call does + not change the storage keys for the account. If you want to change the storage account keys, + use the regenerate keys operation. The location and name of the storage account cannot be + changed after creation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for the updated account. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageAccount. The StorageAccount is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccount + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageAccount: + """The update operation can be used to update the SKU, encryption, access tier, or tags for a + storage account. It can also be used to map the account to a custom domain. Only one custom + domain is supported per storage account; the replacement/change of custom domain is not + supported. In order to replace an old custom domain, the old value must be cleared/unregistered + before a new value can be set. The update of multiple properties is supported. This call does + not change the storage keys for the account. If you want to change the storage account keys, + use the regenerate keys operation. The location and name of the storage account cannot be + changed after creation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for the updated account. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageAccount. The StorageAccount is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccount + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.StorageAccountUpdateParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.StorageAccount: + """The update operation can be used to update the SKU, encryption, access tier, or tags for a + storage account. It can also be used to map the account to a custom domain. Only one custom + domain is supported per storage account; the replacement/change of custom domain is not + supported. In order to replace an old custom domain, the old value must be cleared/unregistered + before a new value can be set. The update of multiple properties is supported. This call does + not change the storage keys for the account. If you want to change the storage account keys, + use the regenerate keys operation. The location and name of the storage account cannot be + changed after creation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for the updated account. Is one of the following + types: StorageAccountUpdateParameters, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.StorageAccountUpdateParameters or JSON or + IO[bytes] + :return: StorageAccount. The StorageAccount is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccount + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageAccount] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_accounts_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageAccount, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> None: + """Deletes a storage account in Microsoft Azure. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_storage_accounts_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.StorageAccount"]: + """Lists all the storage accounts available under the given resource group. Note that storage keys + are not returned; use the ListKeys operation for this. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of StorageAccount + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.StorageAccount] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageAccount]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_accounts_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageAccount], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list(self, **kwargs: Any) -> ItemPaged["_models.StorageAccount"]: + """Lists all the storage accounts available under the subscription. Note that storage keys are not + returned; use the ListKeys operation for this. + + :return: An iterator like instance of StorageAccount + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.StorageAccount] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageAccount]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_accounts_list_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageAccount], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_keys( + self, resource_group_name: str, account_name: str, *, expand: Literal["kerb"] = "kerb", **kwargs: Any + ) -> _models.StorageAccountListKeysResult: + """Lists the access keys or Kerberos keys (if active directory enabled) for the specified storage + account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword expand: Specifies type of the key to be listed. Possible value is kerb. Known values + are "kerb" and None. Default value is "kerb". + :paramtype expand: str + :return: StorageAccountListKeysResult. The StorageAccountListKeysResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageAccountListKeysResult] = kwargs.pop("cls", None) + + _request = build_storage_accounts_list_keys_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageAccountListKeysResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def regenerate_key( + self, + resource_group_name: str, + account_name: str, + regenerate_key: _models.StorageAccountRegenerateKeyParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageAccountListKeysResult: + """Regenerates one of the access keys or Kerberos keys for the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2, + kerb1, kerb2. Required. + :type regenerate_key: ~azure.mgmt.storage.models.StorageAccountRegenerateKeyParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageAccountListKeysResult. The StorageAccountListKeysResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def regenerate_key( + self, + resource_group_name: str, + account_name: str, + regenerate_key: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageAccountListKeysResult: + """Regenerates one of the access keys or Kerberos keys for the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2, + kerb1, kerb2. Required. + :type regenerate_key: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageAccountListKeysResult. The StorageAccountListKeysResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def regenerate_key( + self, + resource_group_name: str, + account_name: str, + regenerate_key: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageAccountListKeysResult: + """Regenerates one of the access keys or Kerberos keys for the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2, + kerb1, kerb2. Required. + :type regenerate_key: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageAccountListKeysResult. The StorageAccountListKeysResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def regenerate_key( + self, + resource_group_name: str, + account_name: str, + regenerate_key: Union[_models.StorageAccountRegenerateKeyParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.StorageAccountListKeysResult: + """Regenerates one of the access keys or Kerberos keys for the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2, + kerb1, kerb2. Is one of the following types: StorageAccountRegenerateKeyParameters, JSON, + IO[bytes] Required. + :type regenerate_key: ~azure.mgmt.storage.models.StorageAccountRegenerateKeyParameters or JSON + or IO[bytes] + :return: StorageAccountListKeysResult. The StorageAccountListKeysResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageAccountListKeysResult] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(regenerate_key, (IOBase, bytes)): + _content = regenerate_key + else: + _content = json.dumps(regenerate_key, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_accounts_regenerate_key_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageAccountListKeysResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def list_account_sas( + self, + resource_group_name: str, + account_name: str, + parameters: _models.AccountSasParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ListAccountSasResponse: + """List SAS credentials of a storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide to list SAS credentials for the storage account. + Required. + :type parameters: ~azure.mgmt.storage.models.AccountSasParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ListAccountSasResponse. The ListAccountSasResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListAccountSasResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def list_account_sas( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ListAccountSasResponse: + """List SAS credentials of a storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide to list SAS credentials for the storage account. + Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ListAccountSasResponse. The ListAccountSasResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListAccountSasResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def list_account_sas( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ListAccountSasResponse: + """List SAS credentials of a storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide to list SAS credentials for the storage account. + Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: ListAccountSasResponse. The ListAccountSasResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListAccountSasResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def list_account_sas( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.AccountSasParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.ListAccountSasResponse: + """List SAS credentials of a storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide to list SAS credentials for the storage account. + Is one of the following types: AccountSasParameters, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.AccountSasParameters or JSON or IO[bytes] + :return: ListAccountSasResponse. The ListAccountSasResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListAccountSasResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ListAccountSasResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_accounts_list_account_sas_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ListAccountSasResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def list_service_sas( + self, + resource_group_name: str, + account_name: str, + parameters: _models.ServiceSasParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ListServiceSasResponse: + """List service SAS credentials of a specific resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide to list service SAS credentials. Required. + :type parameters: ~azure.mgmt.storage.models.ServiceSasParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ListServiceSasResponse. The ListServiceSasResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListServiceSasResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def list_service_sas( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ListServiceSasResponse: + """List service SAS credentials of a specific resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide to list service SAS credentials. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ListServiceSasResponse. The ListServiceSasResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListServiceSasResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def list_service_sas( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ListServiceSasResponse: + """List service SAS credentials of a specific resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide to list service SAS credentials. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: ListServiceSasResponse. The ListServiceSasResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListServiceSasResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def list_service_sas( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.ServiceSasParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.ListServiceSasResponse: + """List service SAS credentials of a specific resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide to list service SAS credentials. Is one of the + following types: ServiceSasParameters, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.ServiceSasParameters or JSON or IO[bytes] + :return: ListServiceSasResponse. The ListServiceSasResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListServiceSasResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ListServiceSasResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_accounts_list_service_sas_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ListServiceSasResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _failover_initial( + self, + resource_group_name: str, + account_name: str, + *, + failover_type: Literal["Planned"] = "Planned", + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_accounts_failover_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + failover_type=failover_type, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_failover( + self, + resource_group_name: str, + account_name: str, + *, + failover_type: Literal["Planned"] = "Planned", + **kwargs: Any + ) -> LROPoller[None]: + """A failover request can be triggered for a storage account in the event a primary endpoint + becomes unavailable for any reason. The failover occurs from the storage account's primary + cluster to the secondary cluster for RA-GRS accounts. The secondary cluster will become primary + after failover and the account is converted to LRS. In the case of a Planned Failover, the + primary and secondary clusters are swapped after failover and the account remains + geo-replicated. Failover should continue to be used in the event of availability issues as + Planned failover is only available while the primary and secondary endpoints are available. The + primary use case of a Planned Failover is disaster recovery testing drills. This type of + failover is invoked by setting FailoverType parameter to 'Planned'. Learn more about the + failover options here- + `https://learn.microsoft.com/azure/storage/common/storage-disaster-recovery-guidance + `_. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword failover_type: The parameter is set to 'Planned' to indicate whether a Planned + failover is requested. Known values are "Planned" and None. Default value is "Planned". + :paramtype failover_type: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._failover_initial( + resource_group_name=resource_group_name, + account_name=account_name, + failover_type=failover_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + def _hierarchical_namespace_migration_initial( # pylint: disable=name-too-long + self, resource_group_name: str, account_name: str, *, request_type: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_accounts_hierarchical_namespace_migration_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + request_type=request_type, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_hierarchical_namespace_migration( + self, resource_group_name: str, account_name: str, *, request_type: str, **kwargs: Any + ) -> LROPoller[None]: + """Live Migration of storage account to enable Hns. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword request_type: Required. Hierarchical namespace migration type can either be a + hierarchical namespace validation request 'HnsOnValidationRequest' or a hydration request + 'HnsOnHydrationRequest'. The validation request will validate the migration whereas the + hydration request will migrate the account. Required. + :paramtype request_type: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._hierarchical_namespace_migration_initial( + resource_group_name=resource_group_name, + account_name=account_name, + request_type=request_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + def _abort_hierarchical_namespace_migration_initial( # pylint: disable=name-too-long + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_accounts_abort_hierarchical_namespace_migration_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_abort_hierarchical_namespace_migration( # pylint: disable=name-too-long + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Abort live Migration of storage account to enable Hns. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._abort_hierarchical_namespace_migration_initial( + resource_group_name=resource_group_name, + account_name=account_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + def _customer_initiated_migration_initial( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.StorageAccountMigration, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_accounts_customer_initiated_migration_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_customer_initiated_migration( + self, + resource_group_name: str, + account_name: str, + parameters: _models.StorageAccountMigration, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Account Migration request can be triggered for a storage account to change its redundancy + level. The migration updates the non-zonal redundant storage account to a zonal redundant + account or vice-versa in order to have better reliability and availability. Zone-redundant + storage (ZRS) replicates your storage account synchronously across three Azure availability + zones in the primary region. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The request parameters required to perform storage account migration. + Required. + :type parameters: ~azure.mgmt.storage.models.StorageAccountMigration + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_customer_initiated_migration( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Account Migration request can be triggered for a storage account to change its redundancy + level. The migration updates the non-zonal redundant storage account to a zonal redundant + account or vice-versa in order to have better reliability and availability. Zone-redundant + storage (ZRS) replicates your storage account synchronously across three Azure availability + zones in the primary region. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The request parameters required to perform storage account migration. + Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_customer_initiated_migration( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Account Migration request can be triggered for a storage account to change its redundancy + level. The migration updates the non-zonal redundant storage account to a zonal redundant + account or vice-versa in order to have better reliability and availability. Zone-redundant + storage (ZRS) replicates your storage account synchronously across three Azure availability + zones in the primary region. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The request parameters required to perform storage account migration. + Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_customer_initiated_migration( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.StorageAccountMigration, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[None]: + """Account Migration request can be triggered for a storage account to change its redundancy + level. The migration updates the non-zonal redundant storage account to a zonal redundant + account or vice-versa in order to have better reliability and availability. Zone-redundant + storage (ZRS) replicates your storage account synchronously across three Azure availability + zones in the primary region. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The request parameters required to perform storage account migration. Is one + of the following types: StorageAccountMigration, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.StorageAccountMigration or JSON or IO[bytes] + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._customer_initiated_migration_initial( + resource_group_name=resource_group_name, + account_name=account_name, + parameters=parameters, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + def _restore_blob_ranges_initial( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.BlobRestoreParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_accounts_restore_blob_ranges_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_restore_blob_ranges( + self, + resource_group_name: str, + account_name: str, + parameters: _models.BlobRestoreParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BlobRestoreStatus]: + """Restore blobs in the specified blob ranges. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for restore blob ranges. Required. + :type parameters: ~azure.mgmt.storage.models.BlobRestoreParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BlobRestoreStatus. The BlobRestoreStatus is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.BlobRestoreStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_restore_blob_ranges( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BlobRestoreStatus]: + """Restore blobs in the specified blob ranges. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for restore blob ranges. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BlobRestoreStatus. The BlobRestoreStatus is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.BlobRestoreStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_restore_blob_ranges( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BlobRestoreStatus]: + """Restore blobs in the specified blob ranges. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for restore blob ranges. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BlobRestoreStatus. The BlobRestoreStatus is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.BlobRestoreStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_restore_blob_ranges( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.BlobRestoreParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.BlobRestoreStatus]: + """Restore blobs in the specified blob ranges. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The parameters to provide for restore blob ranges. Is one of the following + types: BlobRestoreParameters, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.BlobRestoreParameters or JSON or IO[bytes] + :return: An instance of LROPoller that returns BlobRestoreStatus. The BlobRestoreStatus is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.BlobRestoreStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BlobRestoreStatus] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._restore_blob_ranges_initial( + resource_group_name=resource_group_name, + account_name=account_name, + parameters=parameters, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.BlobRestoreStatus, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.BlobRestoreStatus].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.BlobRestoreStatus]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @distributed_trace + def revoke_user_delegation_keys( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> None: + """Revoke user delegation keys. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_storage_accounts_revoke_user_delegation_keys_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def get_customer_initiated_migration( + self, + resource_group_name: str, + account_name: str, + migration_name: Union[str, _models.MigrationName], + **kwargs: Any + ) -> _models.StorageAccountMigration: + """Gets the status of the ongoing migration for the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param migration_name: The name of the Storage Account Migration. It should always be + 'default'. "default" Required. + :type migration_name: str or ~azure.mgmt.storage.models.MigrationName + :return: StorageAccountMigration. The StorageAccountMigration is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageAccountMigration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageAccountMigration] = kwargs.pop("cls", None) + + _request = build_storage_accounts_get_customer_initiated_migration_request( + resource_group_name=resource_group_name, + account_name=account_name, + migration_name=migration_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageAccountMigration, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class FileSharesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`file_shares` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, + resource_group_name: str, + account_name: str, + share_name: str, + *, + expand: Optional[str] = None, + x_ms_snapshot: Optional[str] = None, + **kwargs: Any + ) -> _models.FileShare: + """Gets properties of a specified share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :keyword expand: Optional, used to expand the properties within share's properties. Valid + values are: stats. Should be passed as a string with delimiter ','. Default value is None. + :paramtype expand: str + :keyword x_ms_snapshot: Optional, used to retrieve properties of a snapshot. Default value is + None. + :paramtype x_ms_snapshot: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) + + _request = build_file_shares_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + share_name=share_name, + subscription_id=self._config.subscription_id, + expand=expand, + x_ms_snapshot=x_ms_snapshot, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileShare, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def create( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: _models.FileShare, + *, + expand: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileShare: + """Creates a new share under the specified account as described by request body. The share + resource includes metadata and properties for that share. It does not include a list of the + files contained by the share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param file_share: Properties of the file share to create. Required. + :type file_share: ~azure.mgmt.storage.models.FileShare + :keyword expand: Optional, used to expand the properties within share's properties. Valid + values are: snapshots. Should be passed as a string with delimiter ','. Default value is None. + :paramtype expand: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: JSON, + *, + expand: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileShare: + """Creates a new share under the specified account as described by request body. The share + resource includes metadata and properties for that share. It does not include a list of the + files contained by the share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param file_share: Properties of the file share to create. Required. + :type file_share: JSON + :keyword expand: Optional, used to expand the properties within share's properties. Valid + values are: snapshots. Should be passed as a string with delimiter ','. Default value is None. + :paramtype expand: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: IO[bytes], + *, + expand: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileShare: + """Creates a new share under the specified account as described by request body. The share + resource includes metadata and properties for that share. It does not include a list of the + files contained by the share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param file_share: Properties of the file share to create. Required. + :type file_share: IO[bytes] + :keyword expand: Optional, used to expand the properties within share's properties. Valid + values are: snapshots. Should be passed as a string with delimiter ','. Default value is None. + :paramtype expand: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: Union[_models.FileShare, JSON, IO[bytes]], + *, + expand: Optional[str] = None, + **kwargs: Any + ) -> _models.FileShare: + """Creates a new share under the specified account as described by request body. The share + resource includes metadata and properties for that share. It does not include a list of the + files contained by the share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param file_share: Properties of the file share to create. Is one of the following types: + FileShare, JSON, IO[bytes] Required. + :type file_share: ~azure.mgmt.storage.models.FileShare or JSON or IO[bytes] + :keyword expand: Optional, used to expand the properties within share's properties. Valid + values are: snapshots. Should be passed as a string with delimiter ','. Default value is None. + :paramtype expand: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(file_share, (IOBase, bytes)): + _content = file_share + else: + _content = json.dumps(file_share, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_shares_create_request( + resource_group_name=resource_group_name, + account_name=account_name, + share_name=share_name, + subscription_id=self._config.subscription_id, + expand=expand, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileShare, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def update( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: _models.FileShare, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileShare: + """Updates share properties as specified in request body. Properties not mentioned in the request + will not be changed. Update fails if the specified share does not already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param file_share: Properties to update for the file share. Required. + :type file_share: ~azure.mgmt.storage.models.FileShare + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileShare: + """Updates share properties as specified in request body. Properties not mentioned in the request + will not be changed. Update fails if the specified share does not already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param file_share: Properties to update for the file share. Required. + :type file_share: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileShare: + """Updates share properties as specified in request body. Properties not mentioned in the request + will not be changed. Update fails if the specified share does not already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param file_share: Properties to update for the file share. Required. + :type file_share: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: Union[_models.FileShare, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.FileShare: + """Updates share properties as specified in request body. Properties not mentioned in the request + will not be changed. Update fails if the specified share does not already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param file_share: Properties to update for the file share. Is one of the following types: + FileShare, JSON, IO[bytes] Required. + :type file_share: ~azure.mgmt.storage.models.FileShare or JSON or IO[bytes] + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(file_share, (IOBase, bytes)): + _content = file_share + else: + _content = json.dumps(file_share, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_shares_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + share_name=share_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileShare, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + account_name: str, + share_name: str, + *, + x_ms_snapshot: Optional[str] = None, + include: Optional[str] = None, + **kwargs: Any + ) -> None: + """Deletes specified share under its account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :keyword x_ms_snapshot: Optional, used to delete a snapshot. Default value is None. + :paramtype x_ms_snapshot: str + :keyword include: Optional. Valid values are: snapshots, leased-snapshots, none. The default + value is snapshots. For 'snapshots', the file share is deleted including all of its file share + snapshots. If the file share contains leased-snapshots, the deletion fails. For + 'leased-snapshots', the file share is deleted included all of its file share snapshots + (leased/unleased). For 'none', the file share is deleted if it has no share snapshots. If the + file share contains any snapshots (leased or unleased), the deletion fails. Default value is + None. + :paramtype include: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_file_shares_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + share_name=share_name, + subscription_id=self._config.subscription_id, + x_ms_snapshot=x_ms_snapshot, + include=include, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @overload + def restore( + self, + resource_group_name: str, + account_name: str, + share_name: str, + deleted_share: _models.DeletedShare, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Restore a file share within a valid retention days if share soft delete is enabled. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param deleted_share: Required. + :type deleted_share: ~azure.mgmt.storage.models.DeletedShare + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def restore( + self, + resource_group_name: str, + account_name: str, + share_name: str, + deleted_share: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Restore a file share within a valid retention days if share soft delete is enabled. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param deleted_share: Required. + :type deleted_share: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def restore( + self, + resource_group_name: str, + account_name: str, + share_name: str, + deleted_share: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Restore a file share within a valid retention days if share soft delete is enabled. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param deleted_share: Required. + :type deleted_share: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def restore( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + account_name: str, + share_name: str, + deleted_share: Union[_models.DeletedShare, JSON, IO[bytes]], + **kwargs: Any + ) -> None: + """Restore a file share within a valid retention days if share soft delete is enabled. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param deleted_share: Is one of the following types: DeletedShare, JSON, IO[bytes] Required. + :type deleted_share: ~azure.mgmt.storage.models.DeletedShare or JSON or IO[bytes] + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(deleted_share, (IOBase, bytes)): + _content = deleted_share + else: + _content = json.dumps(deleted_share, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_shares_restore_request( + resource_group_name=resource_group_name, + account_name=account_name, + share_name=share_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @overload + def lease( + self, + resource_group_name: str, + account_name: str, + share_name: str, + parameters: Optional[_models.LeaseShareRequest] = None, + *, + x_ms_snapshot: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LeaseShareResponse: + """The Lease Share operation establishes and manages a lock on a share for delete operations. The + lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: ~azure.mgmt.storage.models.LeaseShareRequest + :keyword x_ms_snapshot: Optional. Specify the snapshot time to lease a snapshot. Default value + is None. + :paramtype x_ms_snapshot: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LeaseShareResponse. The LeaseShareResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LeaseShareResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def lease( + self, + resource_group_name: str, + account_name: str, + share_name: str, + parameters: Optional[JSON] = None, + *, + x_ms_snapshot: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LeaseShareResponse: + """The Lease Share operation establishes and manages a lock on a share for delete operations. The + lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: JSON + :keyword x_ms_snapshot: Optional. Specify the snapshot time to lease a snapshot. Default value + is None. + :paramtype x_ms_snapshot: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LeaseShareResponse. The LeaseShareResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LeaseShareResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def lease( + self, + resource_group_name: str, + account_name: str, + share_name: str, + parameters: Optional[IO[bytes]] = None, + *, + x_ms_snapshot: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LeaseShareResponse: + """The Lease Share operation establishes and manages a lock on a share for delete operations. The + lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param parameters: The content of the action request. Default value is None. + :type parameters: IO[bytes] + :keyword x_ms_snapshot: Optional. Specify the snapshot time to lease a snapshot. Default value + is None. + :paramtype x_ms_snapshot: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: LeaseShareResponse. The LeaseShareResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LeaseShareResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def lease( + self, + resource_group_name: str, + account_name: str, + share_name: str, + parameters: Optional[Union[_models.LeaseShareRequest, JSON, IO[bytes]]] = None, + *, + x_ms_snapshot: Optional[str] = None, + **kwargs: Any + ) -> _models.LeaseShareResponse: + """The Lease Share operation establishes and manages a lock on a share for delete operations. The + lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. Required. + :type share_name: str + :param parameters: The content of the action request. Is one of the following types: + LeaseShareRequest, JSON, IO[bytes] Default value is None. + :type parameters: ~azure.mgmt.storage.models.LeaseShareRequest or JSON or IO[bytes] + :keyword x_ms_snapshot: Optional. Specify the snapshot time to lease a snapshot. Default value + is None. + :paramtype x_ms_snapshot: str + :return: LeaseShareResponse. The LeaseShareResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LeaseShareResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if parameters else None + cls: ClsType[_models.LeaseShareResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" if parameters else None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_file_shares_lease_request( + resource_group_name=resource_group_name, + account_name=account_name, + share_name=share_name, + subscription_id=self._config.subscription_id, + x_ms_snapshot=x_ms_snapshot, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.LeaseShareResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + account_name: str, + *, + filter: Optional[str] = None, + expand: Optional[str] = None, + **kwargs: Any + ) -> ItemPaged["_models.FileShareItem"]: + """Lists all shares. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword filter: Optional. When specified, only share names starting with the filter will be + listed. Default value is None. + :paramtype filter: str + :keyword expand: Optional, used to expand the properties within share's properties. Valid + values are: deleted, snapshots. Should be passed as a string with delimiter ','. Default value + is None. + :paramtype expand: str + :return: An iterator like instance of FileShareItem + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.FileShareItem] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.FileShareItem]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_file_shares_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + maxpagesize=maxpagesize, + filter=filter, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.FileShareItem], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class FileServicesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`file_services` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get_service_properties( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> _models.FileServiceProperties: + """Gets the properties of file services in storage accounts, including CORS (Cross-Origin Resource + Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: FileServiceProperties. The FileServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileServiceProperties] = kwargs.pop("cls", None) + + _request = build_file_services_get_service_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: _models.FileServiceProperties, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileServiceProperties: + """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource + Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of file services in storage accounts, including CORS + (Cross-Origin Resource Sharing) rules. Required. + :type parameters: ~azure.mgmt.storage.models.FileServiceProperties + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: FileServiceProperties. The FileServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileServiceProperties: + """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource + Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of file services in storage accounts, including CORS + (Cross-Origin Resource Sharing) rules. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: FileServiceProperties. The FileServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileServiceProperties: + """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource + Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of file services in storage accounts, including CORS + (Cross-Origin Resource Sharing) rules. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: FileServiceProperties. The FileServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.FileServiceProperties, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.FileServiceProperties: + """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource + Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of file services in storage accounts, including CORS + (Cross-Origin Resource Sharing) rules. Is one of the following types: FileServiceProperties, + JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.FileServiceProperties or JSON or IO[bytes] + :return: FileServiceProperties. The FileServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FileServiceProperties] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_services_set_service_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _models.FileServiceItems: + """List all file services in storage accounts. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: FileServiceItems. The FileServiceItems is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileServiceItems + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileServiceItems] = kwargs.pop("cls", None) + + _request = build_file_services_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileServiceItems, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_service_usage(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _models.FileServiceUsage: + """Gets the usage of file service in storage account including account limits, file share limits + and constants used in recommendations and bursting formula. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: FileServiceUsage. The FileServiceUsage is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.FileServiceUsage + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileServiceUsage] = kwargs.pop("cls", None) + + _request = build_file_services_get_service_usage_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileServiceUsage, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_service_usages( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> ItemPaged["_models.FileServiceUsage"]: + """Gets the usages of file service in storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of FileServiceUsage + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.FileServiceUsage] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.FileServiceUsage]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_file_services_list_service_usages_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.FileServiceUsage], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class QueueServicesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`queue_services` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get_service_properties( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> _models.QueueServiceProperties: + """Gets the properties of a storage account’s Queue service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: QueueServiceProperties. The QueueServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.QueueServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.QueueServiceProperties] = kwargs.pop("cls", None) + + _request = build_queue_services_get_service_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.QueueServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: _models.QueueServiceProperties, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueueServiceProperties: + """Sets the properties of a storage account’s Queue service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Queue service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. + :type parameters: ~azure.mgmt.storage.models.QueueServiceProperties + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QueueServiceProperties. The QueueServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.QueueServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueueServiceProperties: + """Sets the properties of a storage account’s Queue service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Queue service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QueueServiceProperties. The QueueServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.QueueServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueueServiceProperties: + """Sets the properties of a storage account’s Queue service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Queue service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: QueueServiceProperties. The QueueServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.QueueServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.QueueServiceProperties, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.QueueServiceProperties: + """Sets the properties of a storage account’s Queue service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Queue service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Is one of + the following types: QueueServiceProperties, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.QueueServiceProperties or JSON or IO[bytes] + :return: QueueServiceProperties. The QueueServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.QueueServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.QueueServiceProperties] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_queue_services_set_service_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.QueueServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _models.ListQueueServices: + """List all queue services for the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: ListQueueServices. The ListQueueServices is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListQueueServices + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ListQueueServices] = kwargs.pop("cls", None) + + _request = build_queue_services_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ListQueueServices, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class DeletedAccountsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`deleted_accounts` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, deleted_account_name: str, location: str, **kwargs: Any) -> _models.DeletedAccount: + """Get properties of specified deleted account resource. + + :param deleted_account_name: Name of the deleted storage account. Required. + :type deleted_account_name: str + :param location: The name of the Azure region. Required. + :type location: str + :return: DeletedAccount. The DeletedAccount is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.DeletedAccount + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.DeletedAccount] = kwargs.pop("cls", None) + + _request = build_deleted_accounts_get_request( + deleted_account_name=deleted_account_name, + location=location, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.DeletedAccount, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list(self, **kwargs: Any) -> ItemPaged["_models.DeletedAccount"]: + """Lists deleted accounts under the subscription. + + :return: An iterator like instance of DeletedAccount + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.DeletedAccount] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.DeletedAccount]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_deleted_accounts_list_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.DeletedAccount], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class ManagementPoliciesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`management_policies` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, _models.ManagementPolicyName], + **kwargs: Any + ) -> _models.ManagementPolicy: + """Gets the managementpolicy associated with the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. "default" Required. + :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName + :return: ManagementPolicy. The ManagementPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ManagementPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ManagementPolicy] = kwargs.pop("cls", None) + + _request = build_management_policies_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + management_policy_name=management_policy_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ManagementPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def create_or_update( + self, + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, _models.ManagementPolicyName], + properties: _models.ManagementPolicy, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ManagementPolicy: + """Sets the managementpolicy to the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. "default" Required. + :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName + :param properties: The ManagementPolicy set to a storage account. Required. + :type properties: ~azure.mgmt.storage.models.ManagementPolicy + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ManagementPolicy. The ManagementPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ManagementPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, _models.ManagementPolicyName], + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ManagementPolicy: + """Sets the managementpolicy to the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. "default" Required. + :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName + :param properties: The ManagementPolicy set to a storage account. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ManagementPolicy. The ManagementPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ManagementPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, _models.ManagementPolicyName], + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ManagementPolicy: + """Sets the managementpolicy to the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. "default" Required. + :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName + :param properties: The ManagementPolicy set to a storage account. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: ManagementPolicy. The ManagementPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ManagementPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, _models.ManagementPolicyName], + properties: Union[_models.ManagementPolicy, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.ManagementPolicy: + """Sets the managementpolicy to the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. "default" Required. + :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName + :param properties: The ManagementPolicy set to a storage account. Is one of the following + types: ManagementPolicy, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.storage.models.ManagementPolicy or JSON or IO[bytes] + :return: ManagementPolicy. The ManagementPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ManagementPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ManagementPolicy] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_management_policies_create_or_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + management_policy_name=management_policy_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ManagementPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, _models.ManagementPolicyName], + **kwargs: Any + ) -> None: + """Deletes the managementpolicy associated with the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. "default" Required. + :type management_policy_name: str or ~azure.mgmt.storage.models.ManagementPolicyName + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_management_policies_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + management_policy_name=management_policy_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + +class BlobInventoryPoliciesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`blob_inventory_policies` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], + **kwargs: Any + ) -> _models.BlobInventoryPolicy: + """Gets the blob inventory policy associated with the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. "default" Required. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName + :return: BlobInventoryPolicy. The BlobInventoryPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BlobInventoryPolicy] = kwargs.pop("cls", None) + + _request = build_blob_inventory_policies_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + blob_inventory_policy_name=blob_inventory_policy_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BlobInventoryPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def create_or_update( + self, + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], + properties: _models.BlobInventoryPolicy, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobInventoryPolicy: + """Sets the blob inventory policy to the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. "default" Required. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName + :param properties: The blob inventory policy set to a storage account. Required. + :type properties: ~azure.mgmt.storage.models.BlobInventoryPolicy + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobInventoryPolicy. The BlobInventoryPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobInventoryPolicy: + """Sets the blob inventory policy to the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. "default" Required. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName + :param properties: The blob inventory policy set to a storage account. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobInventoryPolicy. The BlobInventoryPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.BlobInventoryPolicy: + """Sets the blob inventory policy to the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. "default" Required. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName + :param properties: The blob inventory policy set to a storage account. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: BlobInventoryPolicy. The BlobInventoryPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], + properties: Union[_models.BlobInventoryPolicy, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.BlobInventoryPolicy: + """Sets the blob inventory policy to the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. "default" Required. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName + :param properties: The blob inventory policy set to a storage account. Is one of the following + types: BlobInventoryPolicy, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.storage.models.BlobInventoryPolicy or JSON or IO[bytes] + :return: BlobInventoryPolicy. The BlobInventoryPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.BlobInventoryPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BlobInventoryPolicy] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_blob_inventory_policies_create_or_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + blob_inventory_policy_name=blob_inventory_policy_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BlobInventoryPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, _models.BlobInventoryPolicyName], + **kwargs: Any + ) -> None: + """Deletes the blob inventory policy associated with the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. "default" Required. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.models.BlobInventoryPolicyName + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_blob_inventory_policies_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + blob_inventory_policy_name=blob_inventory_policy_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> ItemPaged["_models.BlobInventoryPolicy"]: + """Gets the blob inventory policy associated with the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of BlobInventoryPolicy + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.BlobInventoryPolicy] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.BlobInventoryPolicy]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_blob_inventory_policies_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.BlobInventoryPolicy], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class PrivateEndpointConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`private_endpoint_connections` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, account_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Gets the specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: PrivateEndpointConnection. The PrivateEndpointConnection is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) + + _request = build_private_endpoint_connections_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.PrivateEndpointConnection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def put( + self, + resource_group_name: str, + account_name: str, + private_endpoint_connection_name: str, + properties: _models.PrivateEndpointConnection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Update the state of specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param properties: The private endpoint connection properties. Required. + :type properties: ~azure.mgmt.storage.models.PrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: PrivateEndpointConnection. The PrivateEndpointConnection is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def put( + self, + resource_group_name: str, + account_name: str, + private_endpoint_connection_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Update the state of specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param properties: The private endpoint connection properties. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: PrivateEndpointConnection. The PrivateEndpointConnection is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def put( + self, + resource_group_name: str, + account_name: str, + private_endpoint_connection_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Update the state of specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param properties: The private endpoint connection properties. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: PrivateEndpointConnection. The PrivateEndpointConnection is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def put( + self, + resource_group_name: str, + account_name: str, + private_endpoint_connection_name: str, + properties: Union[_models.PrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Update the state of specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param properties: The private endpoint connection properties. Is one of the following types: + PrivateEndpointConnection, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.storage.models.PrivateEndpointConnection or JSON or IO[bytes] + :return: PrivateEndpointConnection. The PrivateEndpointConnection is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_private_endpoint_connections_put_request( + resource_group_name=resource_group_name, + account_name=account_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.PrivateEndpointConnection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, account_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> None: + """Deletes the specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_private_endpoint_connections_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> ItemPaged["_models.PrivateEndpointConnection"]: + """List all the private endpoint connections associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of PrivateEndpointConnection + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.PrivateEndpointConnection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_private_endpoint_connections_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.PrivateEndpointConnection], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class EncryptionScopesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`encryption_scopes` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, account_name: str, encryption_scope_name: str, **kwargs: Any + ) -> _models.EncryptionScope: + """Returns the properties for the specified encryption scope. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.EncryptionScope] = kwargs.pop("cls", None) + + _request = build_encryption_scopes_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + encryption_scope_name=encryption_scope_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.EncryptionScope, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def put( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: _models.EncryptionScope, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EncryptionScope: + """Synchronously creates or updates an encryption scope under the specified storage account. If an + encryption scope is already created and a subsequent request is issued with different + properties, the encryption scope properties will be updated per the specified request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the create or update. + Required. + :type encryption_scope: ~azure.mgmt.storage.models.EncryptionScope + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def put( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EncryptionScope: + """Synchronously creates or updates an encryption scope under the specified storage account. If an + encryption scope is already created and a subsequent request is issued with different + properties, the encryption scope properties will be updated per the specified request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the create or update. + Required. + :type encryption_scope: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def put( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EncryptionScope: + """Synchronously creates or updates an encryption scope under the specified storage account. If an + encryption scope is already created and a subsequent request is issued with different + properties, the encryption scope properties will be updated per the specified request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the create or update. + Required. + :type encryption_scope: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def put( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: Union[_models.EncryptionScope, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.EncryptionScope: + """Synchronously creates or updates an encryption scope under the specified storage account. If an + encryption scope is already created and a subsequent request is issued with different + properties, the encryption scope properties will be updated per the specified request. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the create or update. Is + one of the following types: EncryptionScope, JSON, IO[bytes] Required. + :type encryption_scope: ~azure.mgmt.storage.models.EncryptionScope or JSON or IO[bytes] + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EncryptionScope] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(encryption_scope, (IOBase, bytes)): + _content = encryption_scope + else: + _content = json.dumps(encryption_scope, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_encryption_scopes_put_request( + resource_group_name=resource_group_name, + account_name=account_name, + encryption_scope_name=encryption_scope_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.EncryptionScope, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def patch( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: _models.EncryptionScope, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EncryptionScope: + """Update encryption scope properties as specified in the request body. Update fails if the + specified encryption scope does not already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the update. Required. + :type encryption_scope: ~azure.mgmt.storage.models.EncryptionScope + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def patch( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EncryptionScope: + """Update encryption scope properties as specified in the request body. Update fails if the + specified encryption scope does not already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the update. Required. + :type encryption_scope: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def patch( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EncryptionScope: + """Update encryption scope properties as specified in the request body. Update fails if the + specified encryption scope does not already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the update. Required. + :type encryption_scope: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def patch( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: Union[_models.EncryptionScope, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.EncryptionScope: + """Update encryption scope properties as specified in the request body. Update fails if the + specified encryption scope does not already exist. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. Required. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the update. Is one of the + following types: EncryptionScope, JSON, IO[bytes] Required. + :type encryption_scope: ~azure.mgmt.storage.models.EncryptionScope or JSON or IO[bytes] + :return: EncryptionScope. The EncryptionScope is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.EncryptionScope + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EncryptionScope] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(encryption_scope, (IOBase, bytes)): + _content = encryption_scope + else: + _content = json.dumps(encryption_scope, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_encryption_scopes_patch_request( + resource_group_name=resource_group_name, + account_name=account_name, + encryption_scope_name=encryption_scope_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.EncryptionScope, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + account_name: str, + *, + filter: Optional[str] = None, + include: Optional[Union[str, _models.ListEncryptionScopesInclude]] = None, + **kwargs: Any + ) -> ItemPaged["_models.EncryptionScope"]: + """Lists all the encryption scopes available under the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword filter: Optional. When specified, only encryption scope names starting with the filter + will be listed. Default value is None. + :paramtype filter: str + :keyword include: Optional, when specified, will list encryption scopes with the specific + state. Defaults to All. Known values are: "All", "Enabled", and "Disabled". Default value is + None. + :paramtype include: str or ~azure.mgmt.storage.models.ListEncryptionScopesInclude + :return: An iterator like instance of EncryptionScope + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.EncryptionScope] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.EncryptionScope]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_encryption_scopes_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + maxpagesize=maxpagesize, + filter=filter, + include=include, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.EncryptionScope], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class TableServicesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`table_services` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get_service_properties( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> _models.TableServiceProperties: + """Gets the properties of a storage account’s Table service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: TableServiceProperties. The TableServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.TableServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.TableServiceProperties] = kwargs.pop("cls", None) + + _request = build_table_services_get_service_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.TableServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: _models.TableServiceProperties, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.TableServiceProperties: + """Sets the properties of a storage account’s Table service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Table service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. + :type parameters: ~azure.mgmt.storage.models.TableServiceProperties + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: TableServiceProperties. The TableServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.TableServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.TableServiceProperties: + """Sets the properties of a storage account’s Table service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Table service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: TableServiceProperties. The TableServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.TableServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.TableServiceProperties: + """Sets the properties of a storage account’s Table service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Table service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: TableServiceProperties. The TableServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.TableServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: Union[_models.TableServiceProperties, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.TableServiceProperties: + """Sets the properties of a storage account’s Table service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param parameters: The properties of a storage account’s Table service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Is one of + the following types: TableServiceProperties, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.TableServiceProperties or JSON or IO[bytes] + :return: TableServiceProperties. The TableServiceProperties is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.TableServiceProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.TableServiceProperties] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_table_services_set_service_properties_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.TableServiceProperties, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _models.ListTableServices: + """List all table services for the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: ListTableServices. The ListTableServices is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ListTableServices + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ListTableServices] = kwargs.pop("cls", None) + + _request = build_table_services_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ListTableServices, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class NetworkSecurityPerimeterConfigurationsOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`network_security_perimeter_configurations` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, + resource_group_name: str, + account_name: str, + network_security_perimeter_configuration_name: str, + **kwargs: Any + ) -> _models.NetworkSecurityPerimeterConfiguration: + """Gets effective NetworkSecurityPerimeterConfiguration for association. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param network_security_perimeter_configuration_name: The name for Network Security Perimeter + configuration. Required. + :type network_security_perimeter_configuration_name: str + :return: NetworkSecurityPerimeterConfiguration. The NetworkSecurityPerimeterConfiguration is + compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.NetworkSecurityPerimeterConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.NetworkSecurityPerimeterConfiguration] = kwargs.pop("cls", None) + + _request = build_network_security_perimeter_configurations_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.NetworkSecurityPerimeterConfiguration, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> ItemPaged["_models.NetworkSecurityPerimeterConfiguration"]: + """Gets list of effective NetworkSecurityPerimeterConfiguration for storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of NetworkSecurityPerimeterConfiguration + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.NetworkSecurityPerimeterConfiguration] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.NetworkSecurityPerimeterConfiguration]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_network_security_perimeter_configurations_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.NetworkSecurityPerimeterConfiguration], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + def _reconcile_initial( + self, + resource_group_name: str, + account_name: str, + network_security_perimeter_configuration_name: str, + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_network_security_perimeter_configurations_reconcile_request( + resource_group_name=resource_group_name, + account_name=account_name, + network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_reconcile( + self, + resource_group_name: str, + account_name: str, + network_security_perimeter_configuration_name: str, + **kwargs: Any + ) -> LROPoller[None]: + """Refreshes any information about the association. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param network_security_perimeter_configuration_name: The name for Network Security Perimeter + configuration. Required. + :type network_security_perimeter_configuration_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._reconcile_initial( + resource_group_name=resource_group_name, + account_name=account_name, + network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + +class StorageTaskAssignmentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`storage_task_assignments` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, account_name: str, storage_task_assignment_name: str, **kwargs: Any + ) -> _models.StorageTaskAssignment: + """Get the storage task assignment properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :return: StorageTaskAssignment. The StorageTaskAssignment is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageTaskAssignment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageTaskAssignment] = kwargs.pop("cls", None) + + _request = build_storage_task_assignments_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageTaskAssignment, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_initial( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: Union[_models.StorageTaskAssignment, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_task_assignments_create_request( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: _models.StorageTaskAssignment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageTaskAssignment]: + """Asynchronously creates a new storage task assignment sub-resource with the specified + parameters. If a storage task assignment is already created and a subsequent create request is + issued with different properties, the storage task assignment properties will be updated. If a + storage task assignment is already created and a subsequent create or update request is issued + with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :param parameters: The parameters to create a Storage Task Assignment. Required. + :type parameters: ~azure.mgmt.storage.models.StorageTaskAssignment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageTaskAssignment. The StorageTaskAssignment + is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageTaskAssignment]: + """Asynchronously creates a new storage task assignment sub-resource with the specified + parameters. If a storage task assignment is already created and a subsequent create request is + issued with different properties, the storage task assignment properties will be updated. If a + storage task assignment is already created and a subsequent create or update request is issued + with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :param parameters: The parameters to create a Storage Task Assignment. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageTaskAssignment. The StorageTaskAssignment + is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageTaskAssignment]: + """Asynchronously creates a new storage task assignment sub-resource with the specified + parameters. If a storage task assignment is already created and a subsequent create request is + issued with different properties, the storage task assignment properties will be updated. If a + storage task assignment is already created and a subsequent create or update request is issued + with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :param parameters: The parameters to create a Storage Task Assignment. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageTaskAssignment. The StorageTaskAssignment + is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: Union[_models.StorageTaskAssignment, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.StorageTaskAssignment]: + """Asynchronously creates a new storage task assignment sub-resource with the specified + parameters. If a storage task assignment is already created and a subsequent create request is + issued with different properties, the storage task assignment properties will be updated. If a + storage task assignment is already created and a subsequent create or update request is issued + with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :param parameters: The parameters to create a Storage Task Assignment. Is one of the following + types: StorageTaskAssignment, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.StorageTaskAssignment or JSON or IO[bytes] + :return: An instance of LROPoller that returns StorageTaskAssignment. The StorageTaskAssignment + is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageTaskAssignment] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_initial( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + parameters=parameters, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageTaskAssignment, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.StorageTaskAssignment].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.StorageTaskAssignment]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: Union[_models.StorageTaskAssignmentUpdateParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_task_assignments_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: _models.StorageTaskAssignmentUpdateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageTaskAssignment]: + """Update storage task assignment properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :param parameters: The parameters to update a Storage Task Assignment. Required. + :type parameters: ~azure.mgmt.storage.models.StorageTaskAssignmentUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageTaskAssignment. The StorageTaskAssignment + is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageTaskAssignment]: + """Update storage task assignment properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :param parameters: The parameters to update a Storage Task Assignment. Required. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageTaskAssignment. The StorageTaskAssignment + is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageTaskAssignment]: + """Update storage task assignment properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :param parameters: The parameters to update a Storage Task Assignment. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageTaskAssignment. The StorageTaskAssignment + is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + parameters: Union[_models.StorageTaskAssignmentUpdateParameters, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.StorageTaskAssignment]: + """Update storage task assignment properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :param parameters: The parameters to update a Storage Task Assignment. Is one of the following + types: StorageTaskAssignmentUpdateParameters, JSON, IO[bytes] Required. + :type parameters: ~azure.mgmt.storage.models.StorageTaskAssignmentUpdateParameters or JSON or + IO[bytes] + :return: An instance of LROPoller that returns StorageTaskAssignment. The StorageTaskAssignment + is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageTaskAssignment] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + parameters=parameters, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageTaskAssignment, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.StorageTaskAssignment].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.StorageTaskAssignment]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, resource_group_name: str, account_name: str, storage_task_assignment_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_task_assignments_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, account_name: str, storage_task_assignment_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete the storage task assignment sub-resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, account_name: str, *, top: Optional[int] = None, **kwargs: Any + ) -> ItemPaged["_models.StorageTaskAssignment"]: + """List all the storage task assignments in an account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword top: Optional, specifies the maximum number of storage task assignment Ids to be + included in the list response. Default value is None. + :paramtype top: int + :return: An iterator like instance of StorageTaskAssignment + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.StorageTaskAssignment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageTaskAssignment]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_task_assignments_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + top=top, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageTaskAssignment], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "storage_task_assignment_name", + ] + }, + api_versions_list=["2025-08-01"], + ) + def _stop_assignment_initial( + self, resource_group_name: str, account_name: str, storage_task_assignment_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_task_assignments_stop_assignment_request( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "storage_task_assignment_name", + ] + }, + api_versions_list=["2025-08-01"], + ) + def begin_stop_assignment( + self, resource_group_name: str, account_name: str, storage_task_assignment_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Stops any active running storage action for the storage task assignment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._stop_assignment_initial( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + +class ConnectorsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`connectors` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "connector_name", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + def get(self, resource_group_name: str, account_name: str, connector_name: str, **kwargs: Any) -> _models.Connector: + """Get the specified Storage Connector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :return: Connector. The Connector is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Connector + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Connector] = kwargs.pop("cls", None) + + _request = build_connectors_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Connector, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "connector_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + def _create_initial( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + resource: Union[_models.Connector, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_connectors_create_request( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + resource: _models.Connector, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Connector]: + """Create a Storage Connector if it does not already exist; otherwise, error out. This API will + not allow you to replace an already existing resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param resource: Create a Storage Connector if it does not already exist; otherwise, error out. + This API will not allow you to replace an already existing resource. Required. + :type resource: ~azure.mgmt.storage.models.Connector + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Connector. The Connector is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Connector]: + """Create a Storage Connector if it does not already exist; otherwise, error out. This API will + not allow you to replace an already existing resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param resource: Create a Storage Connector if it does not already exist; otherwise, error out. + This API will not allow you to replace an already existing resource. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Connector. The Connector is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Connector]: + """Create a Storage Connector if it does not already exist; otherwise, error out. This API will + not allow you to replace an already existing resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param resource: Create a Storage Connector if it does not already exist; otherwise, error out. + This API will not allow you to replace an already existing resource. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Connector. The Connector is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "connector_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + def begin_create( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + resource: Union[_models.Connector, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Connector]: + """Create a Storage Connector if it does not already exist; otherwise, error out. This API will + not allow you to replace an already existing resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param resource: Create a Storage Connector if it does not already exist; otherwise, error out. + This API will not allow you to replace an already existing resource. Is one of the following + types: Connector, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.storage.models.Connector or JSON or IO[bytes] + :return: An instance of LROPoller that returns Connector. The Connector is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Connector] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_initial( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Connector, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Connector].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Connector]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "connector_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + def _update_initial( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + properties: Union[_models.Connector, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_connectors_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + properties: _models.Connector, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Connector]: + """Update a Storage Connector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param properties: The updated properties of the Storage Connector. Required. + :type properties: ~azure.mgmt.storage.models.Connector + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Connector. The Connector is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Connector]: + """Update a Storage Connector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param properties: The updated properties of the Storage Connector. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Connector. The Connector is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Connector]: + """Update a Storage Connector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param properties: The updated properties of the Storage Connector. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Connector. The Connector is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "connector_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + def begin_update( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + properties: Union[_models.Connector, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Connector]: + """Update a Storage Connector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param properties: The updated properties of the Storage Connector. Is one of the following + types: Connector, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.storage.models.Connector or JSON or IO[bytes] + :return: An instance of LROPoller that returns Connector. The Connector is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Connector] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Connector, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Connector].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Connector]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": ["api_version", "subscription_id", "resource_group_name", "account_name", "connector_name"] + }, + api_versions_list=["2025-08-01"], + ) + def _delete_initial( + self, resource_group_name: str, account_name: str, connector_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_connectors_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": ["api_version", "subscription_id", "resource_group_name", "account_name", "connector_name"] + }, + api_versions_list=["2025-08-01"], + ) + def begin_delete( + self, resource_group_name: str, account_name: str, connector_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete a Storage Connector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": ["api_version", "subscription_id", "resource_group_name", "account_name", "accept"] + }, + api_versions_list=["2025-08-01"], + ) + def list_by_storage_account( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> ItemPaged["_models.Connector"]: + """List all Storage Connectors in a Storage Account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of Connector + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.Connector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Connector]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_connectors_list_by_storage_account_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Connector], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "connector_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + def _test_existing_connection_initial( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + body: Union[_models.TestExistingConnectionRequest, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_connectors_test_existing_connection_request( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_test_existing_connection( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + body: _models.TestExistingConnectionRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.TestConnectionResponse]: + """This method is used to verify that the connection to the backing data store works. This API is + designed to be used for monitoring and debugging purposes. From the caller’s perspective, this + method does the following: Calls List on the backing data store, attempting to list up to one + blob/object/etc. If the above succeeds, and if a blob/object/etc is found, calls Get on that + object, attempting to download one byte. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param body: This method is used to verify that the connection to the backing data store works. + This API is designed to be used for monitoring and debugging purposes. From the caller’s + perspective, this method does the following: Calls List on the backing data store, attempting + to list up to one blob/object/etc. If the above succeeds, and if a blob/object/etc is found, + calls Get on that object, attempting to download one byte. Required. + :type body: ~azure.mgmt.storage.models.TestExistingConnectionRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns TestConnectionResponse. The + TestConnectionResponse is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.TestConnectionResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_test_existing_connection( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + body: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.TestConnectionResponse]: + """This method is used to verify that the connection to the backing data store works. This API is + designed to be used for monitoring and debugging purposes. From the caller’s perspective, this + method does the following: Calls List on the backing data store, attempting to list up to one + blob/object/etc. If the above succeeds, and if a blob/object/etc is found, calls Get on that + object, attempting to download one byte. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param body: This method is used to verify that the connection to the backing data store works. + This API is designed to be used for monitoring and debugging purposes. From the caller’s + perspective, this method does the following: Calls List on the backing data store, attempting + to list up to one blob/object/etc. If the above succeeds, and if a blob/object/etc is found, + calls Get on that object, attempting to download one byte. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns TestConnectionResponse. The + TestConnectionResponse is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.TestConnectionResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_test_existing_connection( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + body: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.TestConnectionResponse]: + """This method is used to verify that the connection to the backing data store works. This API is + designed to be used for monitoring and debugging purposes. From the caller’s perspective, this + method does the following: Calls List on the backing data store, attempting to list up to one + blob/object/etc. If the above succeeds, and if a blob/object/etc is found, calls Get on that + object, attempting to download one byte. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param body: This method is used to verify that the connection to the backing data store works. + This API is designed to be used for monitoring and debugging purposes. From the caller’s + perspective, this method does the following: Calls List on the backing data store, attempting + to list up to one blob/object/etc. If the above succeeds, and if a blob/object/etc is found, + calls Get on that object, attempting to download one byte. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns TestConnectionResponse. The + TestConnectionResponse is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.TestConnectionResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "connector_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + def begin_test_existing_connection( + self, + resource_group_name: str, + account_name: str, + connector_name: str, + body: Union[_models.TestExistingConnectionRequest, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.TestConnectionResponse]: + """This method is used to verify that the connection to the backing data store works. This API is + designed to be used for monitoring and debugging purposes. From the caller’s perspective, this + method does the following: Calls List on the backing data store, attempting to list up to one + blob/object/etc. If the above succeeds, and if a blob/object/etc is found, calls Get on that + object, attempting to download one byte. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param connector_name: The name of the Storage Connector. Required. + :type connector_name: str + :param body: This method is used to verify that the connection to the backing data store works. + This API is designed to be used for monitoring and debugging purposes. From the caller’s + perspective, this method does the following: Calls List on the backing data store, attempting + to list up to one blob/object/etc. If the above succeeds, and if a blob/object/etc is found, + calls Get on that object, attempting to download one byte. Is one of the following types: + TestExistingConnectionRequest, JSON, IO[bytes] Required. + :type body: ~azure.mgmt.storage.models.TestExistingConnectionRequest or JSON or IO[bytes] + :return: An instance of LROPoller that returns TestConnectionResponse. The + TestConnectionResponse is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.TestConnectionResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.TestConnectionResponse] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._test_existing_connection_initial( + resource_group_name=resource_group_name, + account_name=account_name, + connector_name=connector_name, + body=body, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response_headers = {} + response = pipeline_response.http_response + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = _deserialize(_models.TestConnectionResponse, response.json()) + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.TestConnectionResponse].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.TestConnectionResponse]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + +class DataSharesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`data_shares` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "data_share_name", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + def get( + self, resource_group_name: str, account_name: str, data_share_name: str, **kwargs: Any + ) -> _models.DataShare: + """Get the specified Storage DataShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :return: DataShare. The DataShare is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.DataShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.DataShare] = kwargs.pop("cls", None) + + _request = build_data_shares_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + data_share_name=data_share_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.DataShare, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "data_share_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + def _create_initial( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + resource: Union[_models.DataShare, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_data_shares_create_request( + resource_group_name=resource_group_name, + account_name=account_name, + data_share_name=data_share_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + resource: _models.DataShare, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DataShare]: + """Create a Storage DataShare if it does not already exist; otherwise, error out. This API will + not allow you to replace an already existing resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :param resource: Create a Storage DataShare if it does not already exist; otherwise, error out. + This API will not allow you to replace an already existing resource. Required. + :type resource: ~azure.mgmt.storage.models.DataShare + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns DataShare. The DataShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DataShare]: + """Create a Storage DataShare if it does not already exist; otherwise, error out. This API will + not allow you to replace an already existing resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :param resource: Create a Storage DataShare if it does not already exist; otherwise, error out. + This API will not allow you to replace an already existing resource. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns DataShare. The DataShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DataShare]: + """Create a Storage DataShare if it does not already exist; otherwise, error out. This API will + not allow you to replace an already existing resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :param resource: Create a Storage DataShare if it does not already exist; otherwise, error out. + This API will not allow you to replace an already existing resource. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns DataShare. The DataShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "data_share_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + def begin_create( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + resource: Union[_models.DataShare, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.DataShare]: + """Create a Storage DataShare if it does not already exist; otherwise, error out. This API will + not allow you to replace an already existing resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :param resource: Create a Storage DataShare if it does not already exist; otherwise, error out. + This API will not allow you to replace an already existing resource. Is one of the following + types: DataShare, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.storage.models.DataShare or JSON or IO[bytes] + :return: An instance of LROPoller that returns DataShare. The DataShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DataShare] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_initial( + resource_group_name=resource_group_name, + account_name=account_name, + data_share_name=data_share_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.DataShare, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.DataShare].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.DataShare]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "data_share_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + def _update_initial( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + properties: Union[_models.DataShare, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_data_shares_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + data_share_name=data_share_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + properties: _models.DataShare, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DataShare]: + """Update a Storage DataShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :param properties: The updated properties of the Storage DataShare. Required. + :type properties: ~azure.mgmt.storage.models.DataShare + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns DataShare. The DataShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DataShare]: + """Update a Storage DataShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :param properties: The updated properties of the Storage DataShare. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns DataShare. The DataShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DataShare]: + """Update a Storage DataShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :param properties: The updated properties of the Storage DataShare. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns DataShare. The DataShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "account_name", + "data_share_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01"], + ) + def begin_update( + self, + resource_group_name: str, + account_name: str, + data_share_name: str, + properties: Union[_models.DataShare, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.DataShare]: + """Update a Storage DataShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :param properties: The updated properties of the Storage DataShare. Is one of the following + types: DataShare, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.storage.models.DataShare or JSON or IO[bytes] + :return: An instance of LROPoller that returns DataShare. The DataShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DataShare] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + account_name=account_name, + data_share_name=data_share_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.DataShare, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.DataShare].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.DataShare]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": ["api_version", "subscription_id", "resource_group_name", "account_name", "data_share_name"] + }, + api_versions_list=["2025-08-01"], + ) + def _delete_initial( + self, resource_group_name: str, account_name: str, data_share_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_data_shares_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + data_share_name=data_share_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": ["api_version", "subscription_id", "resource_group_name", "account_name", "data_share_name"] + }, + api_versions_list=["2025-08-01"], + ) + def begin_delete( + self, resource_group_name: str, account_name: str, data_share_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete a Storage DataShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param data_share_name: The name of the Storage DataShare. Required. + :type data_share_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + account_name=account_name, + data_share_name=data_share_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": ["api_version", "subscription_id", "resource_group_name", "account_name", "accept"] + }, + api_versions_list=["2025-08-01"], + ) + def list_by_storage_account( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> ItemPaged["_models.DataShare"]: + """List all Storage DataShares in a Storage Account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of DataShare + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.DataShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.DataShare]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_data_shares_list_by_storage_account_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.DataShare], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class PrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`private_link_resources` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_by_storage_account( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> _models.PrivateLinkResourceListResult: + """Gets the private link resources that need to be created for a storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: PrivateLinkResourceListResult. The PrivateLinkResourceListResult is compatible with + MutableMapping + :rtype: ~azure.mgmt.storage.models.PrivateLinkResourceListResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.PrivateLinkResourceListResult] = kwargs.pop("cls", None) + + _request = build_private_link_resources_list_by_storage_account_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.PrivateLinkResourceListResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class StorageTaskAssignmentsInstancesReportOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`storage_task_assignments_instances_report` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, account_name: str, *, filter: Optional[str] = None, **kwargs: Any + ) -> ItemPaged["_models.StorageTaskReportInstance"]: + """Fetch the report summary of all the storage task assignments and instances in an account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword filter: Optional. When specified, it can be used to query using reporting properties. + See `Constructing Filter Strings + `_ + for details. Default value is None. + :paramtype filter: str + :return: An iterator like instance of StorageTaskReportInstance + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.StorageTaskReportInstance] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.StorageTaskReportInstance]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_task_assignments_instances_report_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + maxpagesize=maxpagesize, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageTaskReportInstance], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class QueueOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`queue` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, account_name: str, *, filter: Optional[str] = None, **kwargs: Any + ) -> ItemPaged["_models.ListQueue"]: + """Gets a list of all the queues under the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword filter: Optional, When specified, only the queues with a name starting with the given + filter will be listed. Default value is None. + :paramtype filter: str + :return: An iterator like instance of ListQueue + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.ListQueue] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.ListQueue]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_queue_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + maxpagesize=maxpagesize, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.ListQueue], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def get(self, resource_group_name: str, account_name: str, queue_name: str, **kwargs: Any) -> _models.StorageQueue: + """Gets the queue with the specified queue name, under the specified account if it exists. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageQueue] = kwargs.pop("cls", None) + + _request = build_queue_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + queue_name=queue_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageQueue, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def create( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: _models.StorageQueue, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageQueue: + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. Required. + :type queue: ~azure.mgmt.storage.models.StorageQueue + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageQueue: + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. Required. + :type queue: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageQueue: + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. Required. + :type queue: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: Union[_models.StorageQueue, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.StorageQueue: + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. Is one of the following types: + StorageQueue, JSON, IO[bytes] Required. + :type queue: ~azure.mgmt.storage.models.StorageQueue or JSON or IO[bytes] + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageQueue] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(queue, (IOBase, bytes)): + _content = queue + else: + _content = json.dumps(queue, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_queue_create_request( + resource_group_name=resource_group_name, + account_name=account_name, + queue_name=queue_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageQueue, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def update( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: _models.StorageQueue, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageQueue: + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. Required. + :type queue: ~azure.mgmt.storage.models.StorageQueue + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageQueue: + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. Required. + :type queue: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.StorageQueue: + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. Required. + :type queue: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: Union[_models.StorageQueue, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.StorageQueue: + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. Is one of the following types: + StorageQueue, JSON, IO[bytes] Required. + :type queue: ~azure.mgmt.storage.models.StorageQueue or JSON or IO[bytes] + :return: StorageQueue. The StorageQueue is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.StorageQueue + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageQueue] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(queue, (IOBase, bytes)): + _content = queue + else: + _content = json.dumps(queue, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_queue_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + queue_name=queue_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageQueue, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, account_name: str, queue_name: str, **kwargs: Any + ) -> None: + """Deletes the queue with the specified queue name, under the specified account if it exists. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. Required. + :type queue_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_queue_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + queue_name=queue_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + +class ObjectReplicationPoliciesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`object_replication_policies` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, account_name: str, object_replication_policy_id: str, **kwargs: Any + ) -> _models.ObjectReplicationPolicy: + """Get the object replication policy of the storage account by policy ID. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. Required. + :type object_replication_policy_id: str + :return: ObjectReplicationPolicy. The ObjectReplicationPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ObjectReplicationPolicy] = kwargs.pop("cls", None) + + _request = build_object_replication_policies_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + object_replication_policy_id=object_replication_policy_id, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ObjectReplicationPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def create_or_update( + self, + resource_group_name: str, + account_name: str, + object_replication_policy_id: str, + properties: _models.ObjectReplicationPolicy, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ObjectReplicationPolicy: + """Create or update the object replication policy of the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. Required. + :type object_replication_policy_id: str + :param properties: The object replication policy set to a storage account. A unique policy ID + will be created if absent. Required. + :type properties: ~azure.mgmt.storage.models.ObjectReplicationPolicy + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ObjectReplicationPolicy. The ObjectReplicationPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + account_name: str, + object_replication_policy_id: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ObjectReplicationPolicy: + """Create or update the object replication policy of the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. Required. + :type object_replication_policy_id: str + :param properties: The object replication policy set to a storage account. A unique policy ID + will be created if absent. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: ObjectReplicationPolicy. The ObjectReplicationPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + account_name: str, + object_replication_policy_id: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ObjectReplicationPolicy: + """Create or update the object replication policy of the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. Required. + :type object_replication_policy_id: str + :param properties: The object replication policy set to a storage account. A unique policy ID + will be created if absent. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: ObjectReplicationPolicy. The ObjectReplicationPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + account_name: str, + object_replication_policy_id: str, + properties: Union[_models.ObjectReplicationPolicy, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.ObjectReplicationPolicy: + """Create or update the object replication policy of the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. Required. + :type object_replication_policy_id: str + :param properties: The object replication policy set to a storage account. A unique policy ID + will be created if absent. Is one of the following types: ObjectReplicationPolicy, JSON, + IO[bytes] Required. + :type properties: ~azure.mgmt.storage.models.ObjectReplicationPolicy or JSON or IO[bytes] + :return: ObjectReplicationPolicy. The ObjectReplicationPolicy is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.ObjectReplicationPolicy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ObjectReplicationPolicy] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_object_replication_policies_create_or_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + object_replication_policy_id=object_replication_policy_id, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ObjectReplicationPolicy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, account_name: str, object_replication_policy_id: str, **kwargs: Any + ) -> None: + """Deletes the object replication policy associated with the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. Required. + :type object_replication_policy_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_object_replication_policies_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + object_replication_policy_id=object_replication_policy_id, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, account_name: str, **kwargs: Any + ) -> ItemPaged["_models.ObjectReplicationPolicy"]: + """List the object replication policies associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of ObjectReplicationPolicy + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.ObjectReplicationPolicy] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.ObjectReplicationPolicy]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_object_replication_policies_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.ObjectReplicationPolicy], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class LocalUsersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`local_users` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, account_name: str, username: str, **kwargs: Any) -> _models.LocalUser: + """Get the local user of the storage account by username. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param username: The name of local user. The username must contain lowercase letters and + numbers only. It must be unique only within the storage account. Required. + :type username: str + :return: LocalUser. The LocalUser is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LocalUser + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.LocalUser] = kwargs.pop("cls", None) + + _request = build_local_users_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + username=username, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.LocalUser, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def create_or_update( + self, + resource_group_name: str, + account_name: str, + username: str, + properties: _models.LocalUser, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LocalUser: + """Create or update the properties of a local user associated with the storage account. Properties + for NFSv3 enablement and extended groups cannot be set with other properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param username: The name of local user. The username must contain lowercase letters and + numbers only. It must be unique only within the storage account. Required. + :type username: str + :param properties: The local user associated with a storage account. Required. + :type properties: ~azure.mgmt.storage.models.LocalUser + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LocalUser. The LocalUser is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LocalUser + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + account_name: str, + username: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LocalUser: + """Create or update the properties of a local user associated with the storage account. Properties + for NFSv3 enablement and extended groups cannot be set with other properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param username: The name of local user. The username must contain lowercase letters and + numbers only. It must be unique only within the storage account. Required. + :type username: str + :param properties: The local user associated with a storage account. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: LocalUser. The LocalUser is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LocalUser + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + account_name: str, + username: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LocalUser: + """Create or update the properties of a local user associated with the storage account. Properties + for NFSv3 enablement and extended groups cannot be set with other properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param username: The name of local user. The username must contain lowercase letters and + numbers only. It must be unique only within the storage account. Required. + :type username: str + :param properties: The local user associated with a storage account. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: LocalUser. The LocalUser is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LocalUser + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + account_name: str, + username: str, + properties: Union[_models.LocalUser, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.LocalUser: + """Create or update the properties of a local user associated with the storage account. Properties + for NFSv3 enablement and extended groups cannot be set with other properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param username: The name of local user. The username must contain lowercase letters and + numbers only. It must be unique only within the storage account. Required. + :type username: str + :param properties: The local user associated with a storage account. Is one of the following + types: LocalUser, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.storage.models.LocalUser or JSON or IO[bytes] + :return: LocalUser. The LocalUser is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LocalUser + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.LocalUser] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_local_users_create_or_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + username=username, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.LocalUser, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, account_name: str, username: str, **kwargs: Any + ) -> None: + """Deletes the local user associated with the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param username: The name of local user. The username must contain lowercase letters and + numbers only. It must be unique only within the storage account. Required. + :type username: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_local_users_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + username=username, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + account_name: str, + *, + filter: Optional[str] = None, + include: Optional[Union[str, _models.ListLocalUserIncludeParam]] = None, + **kwargs: Any + ) -> ItemPaged["_models.LocalUser"]: + """List the local users associated with the storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :keyword filter: Optional. When specified, only local user names starting with the filter will + be listed. Default value is None. + :paramtype filter: str + :keyword include: Optional, when specified, will list local users enabled for the specific + protocol. Lists all users by default. "nfsv3" Default value is None. + :paramtype include: str or ~azure.mgmt.storage.models.ListLocalUserIncludeParam + :return: An iterator like instance of LocalUser + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.LocalUser] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.LocalUser]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_local_users_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + maxpagesize=maxpagesize, + filter=filter, + include=include, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.LocalUser], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_keys( + self, resource_group_name: str, account_name: str, username: str, **kwargs: Any + ) -> _models.LocalUserKeys: + """List SSH authorized keys and shared key of the local user. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param username: The name of local user. The username must contain lowercase letters and + numbers only. It must be unique only within the storage account. Required. + :type username: str + :return: LocalUserKeys. The LocalUserKeys is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.LocalUserKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.LocalUserKeys] = kwargs.pop("cls", None) + + _request = build_local_users_list_keys_request( + resource_group_name=resource_group_name, + account_name=account_name, + username=username, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.LocalUserKeys, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def regenerate_password( + self, resource_group_name: str, account_name: str, username: str, **kwargs: Any + ) -> _models.LocalUserRegeneratePasswordResult: + """Regenerate the local user SSH password. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param username: The name of local user. The username must contain lowercase letters and + numbers only. It must be unique only within the storage account. Required. + :type username: str + :return: LocalUserRegeneratePasswordResult. The LocalUserRegeneratePasswordResult is compatible + with MutableMapping + :rtype: ~azure.mgmt.storage.models.LocalUserRegeneratePasswordResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.LocalUserRegeneratePasswordResult] = kwargs.pop("cls", None) + + _request = build_local_users_regenerate_password_request( + resource_group_name=resource_group_name, + account_name=account_name, + username=username, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.LocalUserRegeneratePasswordResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class TableOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`table` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, account_name: str, table_name: str, **kwargs: Any) -> _models.Table: + """Gets the table with the specified table name, under the specified account if it exists. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Table] = kwargs.pop("cls", None) + + _request = build_table_get_request( + resource_group_name=resource_group_name, + account_name=account_name, + table_name=table_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Table, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def create( + self, + resource_group_name: str, + account_name: str, + table_name: str, + parameters: Optional[_models.Table] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Table: + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :param parameters: The parameters to provide to create a table. Default value is None. + :type parameters: ~azure.mgmt.storage.models.Table + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create( + self, + resource_group_name: str, + account_name: str, + table_name: str, + parameters: Optional[JSON] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Table: + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :param parameters: The parameters to provide to create a table. Default value is None. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create( + self, + resource_group_name: str, + account_name: str, + table_name: str, + parameters: Optional[IO[bytes]] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Table: + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :param parameters: The parameters to provide to create a table. Default value is None. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create( + self, + resource_group_name: str, + account_name: str, + table_name: str, + parameters: Optional[Union[_models.Table, JSON, IO[bytes]]] = None, + **kwargs: Any + ) -> _models.Table: + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :param parameters: The parameters to provide to create a table. Is one of the following types: + Table, JSON, IO[bytes] Default value is None. + :type parameters: ~azure.mgmt.storage.models.Table or JSON or IO[bytes] + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if parameters else None + cls: ClsType[_models.Table] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" if parameters else None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_table_create_request( + resource_group_name=resource_group_name, + account_name=account_name, + table_name=table_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Table, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def update( + self, + resource_group_name: str, + account_name: str, + table_name: str, + parameters: Optional[_models.Table] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Table: + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :param parameters: The parameters to provide to create a table. Default value is None. + :type parameters: ~azure.mgmt.storage.models.Table + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, + resource_group_name: str, + account_name: str, + table_name: str, + parameters: Optional[JSON] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Table: + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :param parameters: The parameters to provide to create a table. Default value is None. + :type parameters: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, + resource_group_name: str, + account_name: str, + table_name: str, + parameters: Optional[IO[bytes]] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Table: + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :param parameters: The parameters to provide to create a table. Default value is None. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update( + self, + resource_group_name: str, + account_name: str, + table_name: str, + parameters: Optional[Union[_models.Table, JSON, IO[bytes]]] = None, + **kwargs: Any + ) -> _models.Table: + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :param parameters: The parameters to provide to create a table. Is one of the following types: + Table, JSON, IO[bytes] Default value is None. + :type parameters: ~azure.mgmt.storage.models.Table or JSON or IO[bytes] + :return: Table. The Table is compatible with MutableMapping + :rtype: ~azure.mgmt.storage.models.Table + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if parameters else None + cls: ClsType[_models.Table] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" if parameters else None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_table_update_request( + resource_group_name=resource_group_name, + account_name=account_name, + table_name=table_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Table, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, account_name: str, table_name: str, **kwargs: Any + ) -> None: + """Deletes the table with the specified table name, under the specified account if it exists. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. Required. + :type table_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_table_delete_request( + resource_group_name=resource_group_name, + account_name=account_name, + table_name=table_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> ItemPaged["_models.Table"]: + """Gets a list of all the tables under the specified storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :return: An iterator like instance of Table + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.Table] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Table]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_table_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Table], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.CloudError, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class StorageTaskAssignmentInstancesReportOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`storage_task_assignment_instances_report` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + account_name: str, + storage_task_assignment_name: str, + *, + filter: Optional[str] = None, + **kwargs: Any + ) -> ItemPaged["_models.StorageTaskReportInstance"]: + """Fetch the report summary of a single storage task assignment's instances. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. Required. + :type account_name: str + :param storage_task_assignment_name: The name of the storage task assignment within the + specified resource group. Storage task assignment names must be between 3 and 24 characters in + length and use numbers and lower-case letters only. Required. + :type storage_task_assignment_name: str + :keyword filter: Optional. When specified, it can be used to query using reporting properties. + See `Constructing Filter Strings + `_ + for details. Default value is None. + :paramtype filter: str + :return: An iterator like instance of StorageTaskReportInstance + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.StorageTaskReportInstance] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.StorageTaskReportInstance]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_task_assignment_instances_report_list_request( + resource_group_name=resource_group_name, + account_name=account_name, + storage_task_assignment_name=storage_task_assignment_name, + subscription_id=self._config.subscription_id, + maxpagesize=maxpagesize, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageTaskReportInstance], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class SkusOperations: """ + .. warning:: + **DO NOT** instantiate this class directly. - models = _models + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`skus` attribute. + """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) @@ -76,18 +19925,128 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list(self, **kwargs: Any) -> ItemPaged["_models.Operation"]: - """Lists all of the available Storage Rest API operations. + def list(self, **kwargs: Any) -> ItemPaged["_models.SkuInformation"]: + """Lists the available SKUs supported by Microsoft.Storage for given subscription. - :return: An iterator like instance of either Operation or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.Operation] + :return: An iterator like instance of SkuInformation + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.SkuInformation] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.SkuInformation]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_skus_list_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.SkuInformation], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class UsagesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storage.StorageManagementClient`'s + :attr:`usages` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_by_location(self, location: str, **kwargs: Any) -> ItemPaged["_models.Usage"]: + """Gets the current usage count and the limit for the resources of the location under the + subscription. + + :param location: The location name. Required. + :type location: str + :return: An iterator like instance of Usage + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.Usage] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + _params = kwargs.pop("params", {}) or {} - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None) + cls: ClsType[List[_models.Usage]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -100,12 +20059,19 @@ def list(self, **kwargs: Any) -> ItemPaged["_models.Operation"]: def prepare_request(next_link=None): if not next_link: - _request = build_list_request( - api_version=api_version, + _request = build_usages_list_by_location_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) - _request.url = self._client.format_url(_request.url) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) else: # make call to next link with the client's api-version @@ -120,16 +20086,24 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + return _request def extract_data(pipeline_response): - deserialized = self._deserialize("OperationListResult", pipeline_response) - list_of_elem = deserialized.value + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Usage], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore - return None, iter(list_of_elem) + return deserialized.get("nextLink") or None, iter(list_of_elem) def get_next(next_link=None): _request = prepare_request(next_link) @@ -142,7 +20116,11 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_patch.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_patch.py index 8bcb627aa475..87676c65a8f0 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_patch.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_patch.py @@ -7,9 +7,9 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_private_endpoint_connections_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_private_endpoint_connections_operations.py deleted file mode 100644 index 005612cbe624..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_private_endpoint_connections_operations.py +++ /dev/null @@ -1,590 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_request(resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_request( - resource_group_name: str, - account_name: str, - private_endpoint_connection_name: str, - subscription_id: str, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "privateEndpointConnectionName": _SERIALIZER.url( - "private_endpoint_connection_name", private_endpoint_connection_name, "str" - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_put_request( - resource_group_name: str, - account_name: str, - private_endpoint_connection_name: str, - subscription_id: str, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "privateEndpointConnectionName": _SERIALIZER.url( - "private_endpoint_connection_name", private_endpoint_connection_name, "str" - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_delete_request( - resource_group_name: str, - account_name: str, - private_endpoint_connection_name: str, - subscription_id: str, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "privateEndpointConnectionName": _SERIALIZER.url( - "private_endpoint_connection_name", private_endpoint_connection_name, "str" - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - - -class PrivateEndpointConnectionsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`private_endpoint_connections` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> ItemPaged["_models.PrivateEndpointConnection"]: - """List all the private endpoint connections associated with the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: An iterator like instance of either PrivateEndpointConnection or the result of - cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.PrivateEndpointConnection] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.PrivateEndpointConnectionListResult] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("PrivateEndpointConnectionListResult", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - @distributed_trace - def get( - self, resource_group_name: str, account_name: str, private_endpoint_connection_name: str, **kwargs: Any - ) -> _models.PrivateEndpointConnection: - """Gets the specified private endpoint connection associated with the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the Azure resource. Required. - :type private_endpoint_connection_name: str - :return: PrivateEndpointConnection or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - private_endpoint_connection_name=private_endpoint_connection_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def put( - self, - resource_group_name: str, - account_name: str, - private_endpoint_connection_name: str, - properties: _models.PrivateEndpointConnection, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.PrivateEndpointConnection: - """Update the state of specified private endpoint connection associated with the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the Azure resource. Required. - :type private_endpoint_connection_name: str - :param properties: The private endpoint connection properties. Required. - :type properties: ~azure.mgmt.storage.models.PrivateEndpointConnection - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: PrivateEndpointConnection or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def put( - self, - resource_group_name: str, - account_name: str, - private_endpoint_connection_name: str, - properties: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.PrivateEndpointConnection: - """Update the state of specified private endpoint connection associated with the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the Azure resource. Required. - :type private_endpoint_connection_name: str - :param properties: The private endpoint connection properties. Required. - :type properties: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: PrivateEndpointConnection or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def put( - self, - resource_group_name: str, - account_name: str, - private_endpoint_connection_name: str, - properties: Union[_models.PrivateEndpointConnection, IO[bytes]], - **kwargs: Any - ) -> _models.PrivateEndpointConnection: - """Update the state of specified private endpoint connection associated with the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the Azure resource. Required. - :type private_endpoint_connection_name: str - :param properties: The private endpoint connection properties. Is either a - PrivateEndpointConnection type or a IO[bytes] type. Required. - :type properties: ~azure.mgmt.storage.models.PrivateEndpointConnection or IO[bytes] - :return: PrivateEndpointConnection or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.PrivateEndpointConnection - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(properties, (IOBase, bytes)): - _content = properties - else: - _json = self._serialize.body(properties, "PrivateEndpointConnection") - - _request = build_put_request( - resource_group_name=resource_group_name, - account_name=account_name, - private_endpoint_connection_name=private_endpoint_connection_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def delete( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, account_name: str, private_endpoint_connection_name: str, **kwargs: Any - ) -> None: - """Deletes the specified private endpoint connection associated with the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the Azure resource. Required. - :type private_endpoint_connection_name: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - private_endpoint_connection_name=private_endpoint_connection_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_private_link_resources_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_private_link_resources_operations.py deleted file mode 100644 index 28d1381f18a0..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_private_link_resources_operations.py +++ /dev/null @@ -1,150 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, Callable, Optional, TypeVar - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_by_storage_account_request( - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateLinkResources", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class PrivateLinkResourcesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`private_link_resources` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list_by_storage_account( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> _models.PrivateLinkResourceListResult: - """Gets the private link resources that need to be created for a storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: PrivateLinkResourceListResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.PrivateLinkResourceListResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.PrivateLinkResourceListResult] = kwargs.pop("cls", None) - - _request = build_list_by_storage_account_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("PrivateLinkResourceListResult", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_queue_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_queue_operations.py deleted file mode 100644 index ea14a37f1097..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_queue_operations.py +++ /dev/null @@ -1,814 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_create_request( - resource_group_name: str, account_name: str, queue_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues/{queueName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "queueName": _SERIALIZER.url( - "queue_name", - queue_name, - "str", - max_length=63, - min_length=3, - pattern=r"^[a-z0-9]([a-z0-9]|(-(?!-))){1,61}[a-z0-9]$", - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_update_request( - resource_group_name: str, account_name: str, queue_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues/{queueName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "queueName": _SERIALIZER.url( - "queue_name", - queue_name, - "str", - max_length=63, - min_length=3, - pattern=r"^[a-z0-9]([a-z0-9]|(-(?!-))){1,61}[a-z0-9]$", - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_request( - resource_group_name: str, account_name: str, queue_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues/{queueName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "queueName": _SERIALIZER.url( - "queue_name", - queue_name, - "str", - max_length=63, - min_length=3, - pattern=r"^[a-z0-9]([a-z0-9]|(-(?!-))){1,61}[a-z0-9]$", - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_delete_request( - resource_group_name: str, account_name: str, queue_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues/{queueName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "queueName": _SERIALIZER.url( - "queue_name", - queue_name, - "str", - max_length=63, - min_length=3, - pattern=r"^[a-z0-9]([a-z0-9]|(-(?!-))){1,61}[a-z0-9]$", - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_list_request( - resource_group_name: str, - account_name: str, - subscription_id: str, - *, - maxpagesize: Optional[str] = None, - filter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if maxpagesize is not None: - _params["$maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "str") - if filter is not None: - _params["$filter"] = _SERIALIZER.query("filter", filter, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class QueueOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`queue` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @overload - def create( - self, - resource_group_name: str, - account_name: str, - queue_name: str, - queue: _models.StorageQueue, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.StorageQueue: - """Creates a new queue with the specified queue name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param queue_name: A queue name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, - it should begin and end with an alphanumeric character and it cannot have two consecutive - dash(-) characters. Required. - :type queue_name: str - :param queue: Queue properties and metadata to be created with. Required. - :type queue: ~azure.mgmt.storage.models.StorageQueue - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: StorageQueue or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageQueue - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def create( - self, - resource_group_name: str, - account_name: str, - queue_name: str, - queue: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.StorageQueue: - """Creates a new queue with the specified queue name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param queue_name: A queue name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, - it should begin and end with an alphanumeric character and it cannot have two consecutive - dash(-) characters. Required. - :type queue_name: str - :param queue: Queue properties and metadata to be created with. Required. - :type queue: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: StorageQueue or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageQueue - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def create( - self, - resource_group_name: str, - account_name: str, - queue_name: str, - queue: Union[_models.StorageQueue, IO[bytes]], - **kwargs: Any - ) -> _models.StorageQueue: - """Creates a new queue with the specified queue name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param queue_name: A queue name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, - it should begin and end with an alphanumeric character and it cannot have two consecutive - dash(-) characters. Required. - :type queue_name: str - :param queue: Queue properties and metadata to be created with. Is either a StorageQueue type - or a IO[bytes] type. Required. - :type queue: ~azure.mgmt.storage.models.StorageQueue or IO[bytes] - :return: StorageQueue or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageQueue - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.StorageQueue] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(queue, (IOBase, bytes)): - _content = queue - else: - _json = self._serialize.body(queue, "StorageQueue") - - _request = build_create_request( - resource_group_name=resource_group_name, - account_name=account_name, - queue_name=queue_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageQueue", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def update( - self, - resource_group_name: str, - account_name: str, - queue_name: str, - queue: _models.StorageQueue, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.StorageQueue: - """Creates a new queue with the specified queue name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param queue_name: A queue name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, - it should begin and end with an alphanumeric character and it cannot have two consecutive - dash(-) characters. Required. - :type queue_name: str - :param queue: Queue properties and metadata to be created with. Required. - :type queue: ~azure.mgmt.storage.models.StorageQueue - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: StorageQueue or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageQueue - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def update( - self, - resource_group_name: str, - account_name: str, - queue_name: str, - queue: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.StorageQueue: - """Creates a new queue with the specified queue name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param queue_name: A queue name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, - it should begin and end with an alphanumeric character and it cannot have two consecutive - dash(-) characters. Required. - :type queue_name: str - :param queue: Queue properties and metadata to be created with. Required. - :type queue: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: StorageQueue or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageQueue - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def update( - self, - resource_group_name: str, - account_name: str, - queue_name: str, - queue: Union[_models.StorageQueue, IO[bytes]], - **kwargs: Any - ) -> _models.StorageQueue: - """Creates a new queue with the specified queue name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param queue_name: A queue name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, - it should begin and end with an alphanumeric character and it cannot have two consecutive - dash(-) characters. Required. - :type queue_name: str - :param queue: Queue properties and metadata to be created with. Is either a StorageQueue type - or a IO[bytes] type. Required. - :type queue: ~azure.mgmt.storage.models.StorageQueue or IO[bytes] - :return: StorageQueue or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageQueue - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.StorageQueue] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(queue, (IOBase, bytes)): - _content = queue - else: - _json = self._serialize.body(queue, "StorageQueue") - - _request = build_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - queue_name=queue_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageQueue", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get(self, resource_group_name: str, account_name: str, queue_name: str, **kwargs: Any) -> _models.StorageQueue: - """Gets the queue with the specified queue name, under the specified account if it exists. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param queue_name: A queue name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, - it should begin and end with an alphanumeric character and it cannot have two consecutive - dash(-) characters. Required. - :type queue_name: str - :return: StorageQueue or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageQueue - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageQueue] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - queue_name=queue_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageQueue", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def delete( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, account_name: str, queue_name: str, **kwargs: Any - ) -> None: - """Deletes the queue with the specified queue name, under the specified account if it exists. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param queue_name: A queue name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, - it should begin and end with an alphanumeric character and it cannot have two consecutive - dash(-) characters. Required. - :type queue_name: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - queue_name=queue_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - @distributed_trace - def list( - self, - resource_group_name: str, - account_name: str, - maxpagesize: Optional[str] = None, - filter: Optional[str] = None, - **kwargs: Any - ) -> ItemPaged["_models.ListQueue"]: - """Gets a list of all the queues under the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param maxpagesize: Optional, a maximum number of queues that should be included in a list - queue response. Default value is None. - :type maxpagesize: str - :param filter: Optional, When specified, only the queues with a name starting with the given - filter will be listed. Default value is None. - :type filter: str - :return: An iterator like instance of either ListQueue or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.ListQueue] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ListQueueResource] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - maxpagesize=maxpagesize, - filter=filter, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("ListQueueResource", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_queue_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_queue_services_operations.py deleted file mode 100644 index f0ce4e0cdb99..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_queue_services_operations.py +++ /dev/null @@ -1,430 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Literal, Optional, TypeVar, Union, overload - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_request(resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_set_service_properties_request( - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - queue_service_name: Literal["default"] = kwargs.pop("queue_service_name", "default") - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/{queueServiceName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "queueServiceName": _SERIALIZER.url("queue_service_name", queue_service_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_service_properties_request( - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - queue_service_name: Literal["default"] = kwargs.pop("queue_service_name", "default") - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/{queueServiceName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "queueServiceName": _SERIALIZER.url("queue_service_name", queue_service_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class QueueServicesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`queue_services` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _models.ListQueueServices: - """List all queue services for the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: ListQueueServices or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ListQueueServices - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ListQueueServices] = kwargs.pop("cls", None) - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ListQueueServices", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: _models.QueueServiceProperties, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.QueueServiceProperties: - """Sets the properties of a storage account’s Queue service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Queue service, only properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. - :type parameters: ~azure.mgmt.storage.models.QueueServiceProperties - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: QueueServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.QueueServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.QueueServiceProperties: - """Sets the properties of a storage account’s Queue service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Queue service, only properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: QueueServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.QueueServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.QueueServiceProperties, IO[bytes]], - **kwargs: Any - ) -> _models.QueueServiceProperties: - """Sets the properties of a storage account’s Queue service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Queue service, only properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Is either a - QueueServiceProperties type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.QueueServiceProperties or IO[bytes] - :return: QueueServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.QueueServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - queue_service_name: Literal["default"] = kwargs.pop("queue_service_name", "default") - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.QueueServiceProperties] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "QueueServiceProperties") - - _request = build_set_service_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - queue_service_name=queue_service_name, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("QueueServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get_service_properties( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> _models.QueueServiceProperties: - """Gets the properties of a storage account’s Queue service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: QueueServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.QueueServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - queue_service_name: Literal["default"] = kwargs.pop("queue_service_name", "default") - cls: ClsType[_models.QueueServiceProperties] = kwargs.pop("cls", None) - - _request = build_get_service_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - queue_service_name=queue_service_name, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("QueueServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_skus_operations.py deleted file mode 100644 index 6b2c90d9bc3e..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_skus_operations.py +++ /dev/null @@ -1,155 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, Callable, Optional, TypeVar -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Storage/skus") - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class SkusOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`skus` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list(self, **kwargs: Any) -> ItemPaged["_models.SkuInformation"]: - """Lists the available SKUs supported by Microsoft.Storage for given subscription. - - :return: An iterator like instance of either SkuInformation or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.SkuInformation] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageSkuListResult] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("StorageSkuListResult", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_storage_accounts_operations.py deleted file mode 100644 index f88df9080618..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_storage_accounts_operations.py +++ /dev/null @@ -1,2827 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Iterator, Literal, Optional, TypeVar, Union, cast, overload -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - StreamClosedError, - StreamConsumedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_check_name_availability_request(subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Storage/checkNameAvailability" - ) - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_create_request( - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_delete_request( - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) - - -def build_get_properties_request( - resource_group_name: str, - account_name: str, - subscription_id: str, - *, - expand: Optional[Union[str, _models.StorageAccountExpand]] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if expand is not None: - _params["$expand"] = _SERIALIZER.query("expand", expand, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_update_request( - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Storage/storageAccounts") - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_list_by_resource_group_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_list_keys_request( - resource_group_name: str, - account_name: str, - subscription_id: str, - *, - expand: Literal["kerb"] = "kerb", - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/listKeys", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if expand is not None: - _params["$expand"] = _SERIALIZER.query("expand", expand, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_regenerate_key_request( - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/regenerateKey", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_list_account_sas_request( - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/ListAccountSas", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_list_service_sas_request( - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/ListServiceSas", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_failover_request( - resource_group_name: str, - account_name: str, - subscription_id: str, - *, - failover_type: Literal["Planned"] = "Planned", - **kwargs: Any -) -> HttpRequest: - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/failover", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if failover_type is not None: - _params["failoverType"] = _SERIALIZER.query("failover_type", failover_type, "str") - - return HttpRequest(method="POST", url=_url, params=_params, **kwargs) - - -def build_hierarchical_namespace_migration_request( # pylint: disable=name-too-long - resource_group_name: str, account_name: str, subscription_id: str, *, request_type: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/hnsonmigration", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - _params["requestType"] = _SERIALIZER.query("request_type", request_type, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_abort_hierarchical_namespace_migration_request( # pylint: disable=name-too-long - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/aborthnsonmigration", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_customer_initiated_migration_request( # pylint: disable=name-too-long - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/startAccountMigration", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_customer_initiated_migration_request( # pylint: disable=name-too-long - resource_group_name: str, - account_name: str, - migration_name: Union[str, _models.MigrationName], - subscription_id: str, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/accountMigrations/{migrationName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "migrationName": _SERIALIZER.url("migration_name", migration_name, "str"), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_restore_blob_ranges_request( - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/restoreBlobRanges", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_revoke_user_delegation_keys_request( # pylint: disable=name-too-long - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/revokeUserDelegationKeys", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - return HttpRequest(method="POST", url=_url, params=_params, **kwargs) - - -class StorageAccountsOperations: # pylint: disable=too-many-public-methods - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`storage_accounts` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @overload - def check_name_availability( - self, - account_name: _models.StorageAccountCheckNameAvailabilityParameters, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.CheckNameAvailabilityResult: - """Checks that the storage account name is valid and is not already in use. - - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: ~azure.mgmt.storage.models.StorageAccountCheckNameAvailabilityParameters - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: CheckNameAvailabilityResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.CheckNameAvailabilityResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def check_name_availability( - self, account_name: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.CheckNameAvailabilityResult: - """Checks that the storage account name is valid and is not already in use. - - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: CheckNameAvailabilityResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.CheckNameAvailabilityResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def check_name_availability( - self, account_name: Union[_models.StorageAccountCheckNameAvailabilityParameters, IO[bytes]], **kwargs: Any - ) -> _models.CheckNameAvailabilityResult: - """Checks that the storage account name is valid and is not already in use. - - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Is either a StorageAccountCheckNameAvailabilityParameters type or a - IO[bytes] type. Required. - :type account_name: ~azure.mgmt.storage.models.StorageAccountCheckNameAvailabilityParameters or - IO[bytes] - :return: CheckNameAvailabilityResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.CheckNameAvailabilityResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.CheckNameAvailabilityResult] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(account_name, (IOBase, bytes)): - _content = account_name - else: - _json = self._serialize.body(account_name, "StorageAccountCheckNameAvailabilityParameters") - - _request = build_check_name_availability_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("CheckNameAvailabilityResult", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - def _create_initial( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.StorageAccountCreateParameters, IO[bytes]], - **kwargs: Any - ) -> Iterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "StorageAccountCreateParameters") - - _request = build_create_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def begin_create( - self, - resource_group_name: str, - account_name: str, - parameters: _models.StorageAccountCreateParameters, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> LROPoller[_models.StorageAccount]: - """Asynchronously creates a new storage account with the specified parameters. If an account is - already created and a subsequent create request is issued with different properties, the - account properties will be updated. If an account is already created and a subsequent create or - update request is issued with the exact same set of properties, the request will succeed. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for the created account. Required. - :type parameters: ~azure.mgmt.storage.models.StorageAccountCreateParameters - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of LROPoller that returns either StorageAccount or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageAccount] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def begin_create( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> LROPoller[_models.StorageAccount]: - """Asynchronously creates a new storage account with the specified parameters. If an account is - already created and a subsequent create request is issued with different properties, the - account properties will be updated. If an account is already created and a subsequent create or - update request is issued with the exact same set of properties, the request will succeed. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for the created account. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of LROPoller that returns either StorageAccount or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageAccount] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def begin_create( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.StorageAccountCreateParameters, IO[bytes]], - **kwargs: Any - ) -> LROPoller[_models.StorageAccount]: - """Asynchronously creates a new storage account with the specified parameters. If an account is - already created and a subsequent create request is issued with different properties, the - account properties will be updated. If an account is already created and a subsequent create or - update request is issued with the exact same set of properties, the request will succeed. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for the created account. Is either a - StorageAccountCreateParameters type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.StorageAccountCreateParameters or IO[bytes] - :return: An instance of LROPoller that returns either StorageAccount or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageAccount] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.StorageAccount] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._create_initial( - resource_group_name=resource_group_name, - account_name=account_name, - parameters=parameters, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("StorageAccount", pipeline_response.http_response) - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - if polling is True: - polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller[_models.StorageAccount].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller[_models.StorageAccount]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - @distributed_trace - def delete( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> None: - """Deletes a storage account in Microsoft Azure. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - @distributed_trace - def get_properties( - self, - resource_group_name: str, - account_name: str, - expand: Optional[Union[str, _models.StorageAccountExpand]] = None, - **kwargs: Any - ) -> _models.StorageAccount: - """Returns the properties for the specified storage account including but not limited to name, SKU - name, location, and account status. The ListKeys operation should be used to retrieve storage - keys. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param expand: May be used to expand the properties within account's properties. By default, - data is not included when fetching properties. Currently we only support geoReplicationStats - and blobRestoreStatus. Known values are: "geoReplicationStats" and "blobRestoreStatus". Default - value is None. - :type expand: str or ~azure.mgmt.storage.models.StorageAccountExpand - :return: StorageAccount or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccount - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageAccount] = kwargs.pop("cls", None) - - _request = build_get_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - expand=expand, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageAccount", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def update( - self, - resource_group_name: str, - account_name: str, - parameters: _models.StorageAccountUpdateParameters, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.StorageAccount: - """The update operation can be used to update the SKU, encryption, access tier, or tags for a - storage account. It can also be used to map the account to a custom domain. Only one custom - domain is supported per storage account; the replacement/change of custom domain is not - supported. In order to replace an old custom domain, the old value must be cleared/unregistered - before a new value can be set. The update of multiple properties is supported. This call does - not change the storage keys for the account. If you want to change the storage account keys, - use the regenerate keys operation. The location and name of the storage account cannot be - changed after creation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for the updated account. Required. - :type parameters: ~azure.mgmt.storage.models.StorageAccountUpdateParameters - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: StorageAccount or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccount - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def update( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.StorageAccount: - """The update operation can be used to update the SKU, encryption, access tier, or tags for a - storage account. It can also be used to map the account to a custom domain. Only one custom - domain is supported per storage account; the replacement/change of custom domain is not - supported. In order to replace an old custom domain, the old value must be cleared/unregistered - before a new value can be set. The update of multiple properties is supported. This call does - not change the storage keys for the account. If you want to change the storage account keys, - use the regenerate keys operation. The location and name of the storage account cannot be - changed after creation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for the updated account. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: StorageAccount or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccount - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def update( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.StorageAccountUpdateParameters, IO[bytes]], - **kwargs: Any - ) -> _models.StorageAccount: - """The update operation can be used to update the SKU, encryption, access tier, or tags for a - storage account. It can also be used to map the account to a custom domain. Only one custom - domain is supported per storage account; the replacement/change of custom domain is not - supported. In order to replace an old custom domain, the old value must be cleared/unregistered - before a new value can be set. The update of multiple properties is supported. This call does - not change the storage keys for the account. If you want to change the storage account keys, - use the regenerate keys operation. The location and name of the storage account cannot be - changed after creation. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for the updated account. Is either a - StorageAccountUpdateParameters type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.StorageAccountUpdateParameters or IO[bytes] - :return: StorageAccount or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccount - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.StorageAccount] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "StorageAccountUpdateParameters") - - _request = build_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageAccount", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def list(self, **kwargs: Any) -> ItemPaged["_models.StorageAccount"]: - """Lists all the storage accounts available under the subscription. Note that storage keys are not - returned; use the ListKeys operation for this. - - :return: An iterator like instance of either StorageAccount or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.StorageAccount] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageAccountListResult] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("StorageAccountListResult", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - @distributed_trace - def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.StorageAccount"]: - """Lists all the storage accounts available under the given resource group. Note that storage keys - are not returned; use the ListKeys operation for this. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :return: An iterator like instance of either StorageAccount or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.StorageAccount] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageAccountListResult] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_by_resource_group_request( - resource_group_name=resource_group_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("StorageAccountListResult", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - @distributed_trace - def list_keys( - self, resource_group_name: str, account_name: str, expand: Literal["kerb"] = "kerb", **kwargs: Any - ) -> _models.StorageAccountListKeysResult: - """Lists the access keys or Kerberos keys (if active directory enabled) for the specified storage - account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param expand: Specifies type of the key to be listed. Possible value is kerb. Known values are - "kerb" and None. Default value is "kerb". - :type expand: str - :return: StorageAccountListKeysResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageAccountListKeysResult] = kwargs.pop("cls", None) - - _request = build_list_keys_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - expand=expand, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageAccountListKeysResult", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def regenerate_key( - self, - resource_group_name: str, - account_name: str, - regenerate_key: _models.StorageAccountRegenerateKeyParameters, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.StorageAccountListKeysResult: - """Regenerates one of the access keys or Kerberos keys for the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2, - kerb1, kerb2. Required. - :type regenerate_key: ~azure.mgmt.storage.models.StorageAccountRegenerateKeyParameters - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: StorageAccountListKeysResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def regenerate_key( - self, - resource_group_name: str, - account_name: str, - regenerate_key: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.StorageAccountListKeysResult: - """Regenerates one of the access keys or Kerberos keys for the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2, - kerb1, kerb2. Required. - :type regenerate_key: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: StorageAccountListKeysResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def regenerate_key( - self, - resource_group_name: str, - account_name: str, - regenerate_key: Union[_models.StorageAccountRegenerateKeyParameters, IO[bytes]], - **kwargs: Any - ) -> _models.StorageAccountListKeysResult: - """Regenerates one of the access keys or Kerberos keys for the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2, - kerb1, kerb2. Is either a StorageAccountRegenerateKeyParameters type or a IO[bytes] type. - Required. - :type regenerate_key: ~azure.mgmt.storage.models.StorageAccountRegenerateKeyParameters or - IO[bytes] - :return: StorageAccountListKeysResult or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccountListKeysResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.StorageAccountListKeysResult] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(regenerate_key, (IOBase, bytes)): - _content = regenerate_key - else: - _json = self._serialize.body(regenerate_key, "StorageAccountRegenerateKeyParameters") - - _request = build_regenerate_key_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageAccountListKeysResult", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def list_account_sas( - self, - resource_group_name: str, - account_name: str, - parameters: _models.AccountSasParameters, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ListAccountSasResponse: - """List SAS credentials of a storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide to list SAS credentials for the storage account. - Required. - :type parameters: ~azure.mgmt.storage.models.AccountSasParameters - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: ListAccountSasResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ListAccountSasResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def list_account_sas( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ListAccountSasResponse: - """List SAS credentials of a storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide to list SAS credentials for the storage account. - Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: ListAccountSasResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ListAccountSasResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def list_account_sas( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.AccountSasParameters, IO[bytes]], - **kwargs: Any - ) -> _models.ListAccountSasResponse: - """List SAS credentials of a storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide to list SAS credentials for the storage account. - Is either a AccountSasParameters type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.AccountSasParameters or IO[bytes] - :return: ListAccountSasResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ListAccountSasResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.ListAccountSasResponse] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "AccountSasParameters") - - _request = build_list_account_sas_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ListAccountSasResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def list_service_sas( - self, - resource_group_name: str, - account_name: str, - parameters: _models.ServiceSasParameters, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ListServiceSasResponse: - """List service SAS credentials of a specific resource. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide to list service SAS credentials. Required. - :type parameters: ~azure.mgmt.storage.models.ServiceSasParameters - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: ListServiceSasResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ListServiceSasResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def list_service_sas( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.ListServiceSasResponse: - """List service SAS credentials of a specific resource. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide to list service SAS credentials. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: ListServiceSasResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ListServiceSasResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def list_service_sas( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.ServiceSasParameters, IO[bytes]], - **kwargs: Any - ) -> _models.ListServiceSasResponse: - """List service SAS credentials of a specific resource. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide to list service SAS credentials. Is either a - ServiceSasParameters type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.ServiceSasParameters or IO[bytes] - :return: ListServiceSasResponse or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ListServiceSasResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.ListServiceSasResponse] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "ServiceSasParameters") - - _request = build_list_service_sas_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ListServiceSasResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - def _failover_initial( - self, resource_group_name: str, account_name: str, failover_type: Literal["Planned"] = "Planned", **kwargs: Any - ) -> Iterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - - _request = build_failover_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - failover_type=failover_type, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def begin_failover( - self, resource_group_name: str, account_name: str, failover_type: Literal["Planned"] = "Planned", **kwargs: Any - ) -> LROPoller[None]: - """A failover request can be triggered for a storage account in the event a primary endpoint - becomes unavailable for any reason. The failover occurs from the storage account's primary - cluster to the secondary cluster for RA-GRS accounts. The secondary cluster will become primary - after failover and the account is converted to LRS. In the case of a Planned Failover, the - primary and secondary clusters are swapped after failover and the account remains - geo-replicated. Failover should continue to be used in the event of availability issues as - Planned failover is only available while the primary and secondary endpoints are available. The - primary use case of a Planned Failover is disaster recovery testing drills. This type of - failover is invoked by setting FailoverType parameter to 'Planned'. Learn more about the - failover options here- - https://learn.microsoft.com/azure/storage/common/storage-disaster-recovery-guidance. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param failover_type: The parameter is set to 'Planned' to indicate whether a Planned failover - is requested. Known values are "Planned" and None. Default value is "Planned". - :type failover_type: str - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._failover_initial( - resource_group_name=resource_group_name, - account_name=account_name, - failover_type=failover_type, - api_version=api_version, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller[None].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - def _hierarchical_namespace_migration_initial( # pylint: disable=name-too-long - self, resource_group_name: str, account_name: str, request_type: str, **kwargs: Any - ) -> Iterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - - _request = build_hierarchical_namespace_migration_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - request_type=request_type, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def begin_hierarchical_namespace_migration( - self, resource_group_name: str, account_name: str, request_type: str, **kwargs: Any - ) -> LROPoller[None]: - """Live Migration of storage account to enable Hns. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param request_type: Required. Hierarchical namespace migration type can either be a - hierarchical namespace validation request 'HnsOnValidationRequest' or a hydration request - 'HnsOnHydrationRequest'. The validation request will validate the migration whereas the - hydration request will migrate the account. Required. - :type request_type: str - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._hierarchical_namespace_migration_initial( - resource_group_name=resource_group_name, - account_name=account_name, - request_type=request_type, - api_version=api_version, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller[None].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - def _abort_hierarchical_namespace_migration_initial( # pylint: disable=name-too-long - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> Iterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - - _request = build_abort_hierarchical_namespace_migration_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponse, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def begin_abort_hierarchical_namespace_migration( # pylint: disable=name-too-long - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> LROPoller[None]: - """Abort live Migration of storage account to enable Hns. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._abort_hierarchical_namespace_migration_initial( - resource_group_name=resource_group_name, - account_name=account_name, - api_version=api_version, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller[None].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - def _customer_initiated_migration_initial( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.StorageAccountMigration, IO[bytes]], - **kwargs: Any - ) -> Iterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "StorageAccountMigration") - - _request = build_customer_initiated_migration_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - if response.status_code == 202: - response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @overload - def begin_customer_initiated_migration( - self, - resource_group_name: str, - account_name: str, - parameters: _models.StorageAccountMigration, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> LROPoller[None]: - """Account Migration request can be triggered for a storage account to change its redundancy - level. The migration updates the non-zonal redundant storage account to a zonal redundant - account or vice-versa in order to have better reliability and availability. Zone-redundant - storage (ZRS) replicates your storage account synchronously across three Azure availability - zones in the primary region. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The request parameters required to perform storage account migration. - Required. - :type parameters: ~azure.mgmt.storage.models.StorageAccountMigration - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def begin_customer_initiated_migration( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> LROPoller[None]: - """Account Migration request can be triggered for a storage account to change its redundancy - level. The migration updates the non-zonal redundant storage account to a zonal redundant - account or vice-versa in order to have better reliability and availability. Zone-redundant - storage (ZRS) replicates your storage account synchronously across three Azure availability - zones in the primary region. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The request parameters required to perform storage account migration. - Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def begin_customer_initiated_migration( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.StorageAccountMigration, IO[bytes]], - **kwargs: Any - ) -> LROPoller[None]: - """Account Migration request can be triggered for a storage account to change its redundancy - level. The migration updates the non-zonal redundant storage account to a zonal redundant - account or vice-versa in order to have better reliability and availability. Zone-redundant - storage (ZRS) replicates your storage account synchronously across three Azure availability - zones in the primary region. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The request parameters required to perform storage account migration. Is - either a StorageAccountMigration type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.StorageAccountMigration or IO[bytes] - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._customer_initiated_migration_initial( - resource_group_name=resource_group_name, - account_name=account_name, - parameters=parameters, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller[None].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - @distributed_trace - def get_customer_initiated_migration( - self, - resource_group_name: str, - account_name: str, - migration_name: Union[str, _models.MigrationName], - **kwargs: Any - ) -> _models.StorageAccountMigration: - """Gets the status of the ongoing migration for the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param migration_name: The name of the Storage Account Migration. It should always be - 'default'. "default" Required. - :type migration_name: str or ~azure.mgmt.storage.models.MigrationName - :return: StorageAccountMigration or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageAccountMigration - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageAccountMigration] = kwargs.pop("cls", None) - - _request = build_get_customer_initiated_migration_request( - resource_group_name=resource_group_name, - account_name=account_name, - migration_name=migration_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageAccountMigration", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - def _restore_blob_ranges_initial( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.BlobRestoreParameters, IO[bytes]], - **kwargs: Any - ) -> Iterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "BlobRestoreParameters") - - _request = build_restore_blob_ranges_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def begin_restore_blob_ranges( - self, - resource_group_name: str, - account_name: str, - parameters: _models.BlobRestoreParameters, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> LROPoller[_models.BlobRestoreStatus]: - """Restore blobs in the specified blob ranges. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for restore blob ranges. Required. - :type parameters: ~azure.mgmt.storage.models.BlobRestoreParameters - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of LROPoller that returns either BlobRestoreStatus or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.BlobRestoreStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def begin_restore_blob_ranges( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> LROPoller[_models.BlobRestoreStatus]: - """Restore blobs in the specified blob ranges. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for restore blob ranges. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of LROPoller that returns either BlobRestoreStatus or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.BlobRestoreStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def begin_restore_blob_ranges( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.BlobRestoreParameters, IO[bytes]], - **kwargs: Any - ) -> LROPoller[_models.BlobRestoreStatus]: - """Restore blobs in the specified blob ranges. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The parameters to provide for restore blob ranges. Is either a - BlobRestoreParameters type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.BlobRestoreParameters or IO[bytes] - :return: An instance of LROPoller that returns either BlobRestoreStatus or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.BlobRestoreStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.BlobRestoreStatus] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._restore_blob_ranges_initial( - resource_group_name=resource_group_name, - account_name=account_name, - parameters=parameters, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("BlobRestoreStatus", pipeline_response.http_response) - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller[_models.BlobRestoreStatus].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller[_models.BlobRestoreStatus]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - @distributed_trace - def revoke_user_delegation_keys( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> None: - """Revoke user delegation keys. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_revoke_user_delegation_keys_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_storage_task_assignment_instances_report_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_storage_task_assignment_instances_report_operations.py deleted file mode 100644 index 91a23e1a5304..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_storage_task_assignment_instances_report_operations.py +++ /dev/null @@ -1,223 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, Callable, Optional, TypeVar -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_request( - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - subscription_id: str, - *, - maxpagesize: Optional[int] = None, - filter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/storageTaskAssignments/{storageTaskAssignmentName}/reports", - ) - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "storageTaskAssignmentName": _SERIALIZER.url( - "storage_task_assignment_name", - storage_task_assignment_name, - "str", - max_length=24, - min_length=3, - pattern=r"^[a-z][a-z0-9]{2,23}$", - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if maxpagesize is not None: - _params["$maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") - if filter is not None: - _params["$filter"] = _SERIALIZER.query("filter", filter, "str") - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class StorageTaskAssignmentInstancesReportOperations: # pylint: disable=name-too-long - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`storage_task_assignment_instances_report` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - maxpagesize: Optional[int] = None, - filter: Optional[str] = None, - **kwargs: Any - ) -> ItemPaged["_models.StorageTaskReportInstance"]: - """Fetch the report summary of a single storage task assignment's instances. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :param maxpagesize: Optional, specifies the maximum number of storage task assignment instances - to be included in the list response. Default value is None. - :type maxpagesize: int - :param filter: Optional. When specified, it can be used to query using reporting properties. - See `Constructing Filter Strings - `_ - for details. Default value is None. - :type filter: str - :return: An iterator like instance of either StorageTaskReportInstance or the result of - cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.StorageTaskReportInstance] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageTaskReportSummary] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - storage_task_assignment_name=storage_task_assignment_name, - subscription_id=self._config.subscription_id, - maxpagesize=maxpagesize, - filter=filter, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("StorageTaskReportSummary", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_storage_task_assignments_instances_report_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_storage_task_assignments_instances_report_operations.py deleted file mode 100644 index eed7a1111189..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_storage_task_assignments_instances_report_operations.py +++ /dev/null @@ -1,208 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, Callable, Optional, TypeVar -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_request( - resource_group_name: str, - account_name: str, - subscription_id: str, - *, - maxpagesize: Optional[int] = None, - filter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/reports", - ) - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if maxpagesize is not None: - _params["$maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") - if filter is not None: - _params["$filter"] = _SERIALIZER.query("filter", filter, "str") - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class StorageTaskAssignmentsInstancesReportOperations: # pylint: disable=name-too-long - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`storage_task_assignments_instances_report` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, - resource_group_name: str, - account_name: str, - maxpagesize: Optional[int] = None, - filter: Optional[str] = None, - **kwargs: Any - ) -> ItemPaged["_models.StorageTaskReportInstance"]: - """Fetch the report summary of all the storage task assignments and instances in an account. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param maxpagesize: Optional, specifies the maximum number of storage task assignment instances - to be included in the list response. Default value is None. - :type maxpagesize: int - :param filter: Optional. When specified, it can be used to query using reporting properties. - See `Constructing Filter Strings - `_ - for details. Default value is None. - :type filter: str - :return: An iterator like instance of either StorageTaskReportInstance or the result of - cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.StorageTaskReportInstance] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageTaskReportSummary] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - maxpagesize=maxpagesize, - filter=filter, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("StorageTaskReportSummary", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_storage_task_assignments_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_storage_task_assignments_operations.py deleted file mode 100644 index c7d1557659e0..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_storage_task_assignments_operations.py +++ /dev/null @@ -1,1036 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Iterator, Optional, TypeVar, Union, cast, overload -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - StreamClosedError, - StreamConsumedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_create_request( - resource_group_name: str, account_name: str, storage_task_assignment_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/storageTaskAssignments/{storageTaskAssignmentName}", - ) - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "storageTaskAssignmentName": _SERIALIZER.url( - "storage_task_assignment_name", - storage_task_assignment_name, - "str", - max_length=24, - min_length=3, - pattern=r"^[a-z][a-z0-9]{2,23}$", - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_update_request( - resource_group_name: str, account_name: str, storage_task_assignment_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/storageTaskAssignments/{storageTaskAssignmentName}", - ) - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "storageTaskAssignmentName": _SERIALIZER.url( - "storage_task_assignment_name", - storage_task_assignment_name, - "str", - max_length=24, - min_length=3, - pattern=r"^[a-z][a-z0-9]{2,23}$", - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_request( - resource_group_name: str, account_name: str, storage_task_assignment_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/storageTaskAssignments/{storageTaskAssignmentName}", - ) - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "storageTaskAssignmentName": _SERIALIZER.url( - "storage_task_assignment_name", - storage_task_assignment_name, - "str", - max_length=24, - min_length=3, - pattern=r"^[a-z][a-z0-9]{2,23}$", - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_delete_request( - resource_group_name: str, account_name: str, storage_task_assignment_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/storageTaskAssignments/{storageTaskAssignmentName}", - ) - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "storageTaskAssignmentName": _SERIALIZER.url( - "storage_task_assignment_name", - storage_task_assignment_name, - "str", - max_length=24, - min_length=3, - pattern=r"^[a-z][a-z0-9]{2,23}$", - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_list_request( - resource_group_name: str, account_name: str, subscription_id: str, *, top: Optional[int] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/storageTaskAssignments", - ) - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if top is not None: - _params["$top"] = _SERIALIZER.query("top", top, "int") - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class StorageTaskAssignmentsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`storage_task_assignments` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - def _create_initial( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - parameters: Union[_models.StorageTaskAssignment, IO[bytes]], - **kwargs: Any - ) -> Iterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "StorageTaskAssignment") - - _request = build_create_request( - resource_group_name=resource_group_name, - account_name=account_name, - storage_task_assignment_name=storage_task_assignment_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 201, 202]: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - if response.status_code == 202: - response_headers["Azure-AsyncOperation"] = self._deserialize( - "str", response.headers.get("Azure-AsyncOperation") - ) - response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @overload - def begin_create( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - parameters: _models.StorageTaskAssignment, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> LROPoller[_models.StorageTaskAssignment]: - """Asynchronously creates a new storage task assignment sub-resource with the specified - parameters. If a storage task assignment is already created and a subsequent create request is - issued with different properties, the storage task assignment properties will be updated. If a - storage task assignment is already created and a subsequent create or update request is issued - with the exact same set of properties, the request will succeed. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :param parameters: The parameters to create a Storage Task Assignment. Required. - :type parameters: ~azure.mgmt.storage.models.StorageTaskAssignment - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of LROPoller that returns either StorageTaskAssignment or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def begin_create( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> LROPoller[_models.StorageTaskAssignment]: - """Asynchronously creates a new storage task assignment sub-resource with the specified - parameters. If a storage task assignment is already created and a subsequent create request is - issued with different properties, the storage task assignment properties will be updated. If a - storage task assignment is already created and a subsequent create or update request is issued - with the exact same set of properties, the request will succeed. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :param parameters: The parameters to create a Storage Task Assignment. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of LROPoller that returns either StorageTaskAssignment or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def begin_create( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - parameters: Union[_models.StorageTaskAssignment, IO[bytes]], - **kwargs: Any - ) -> LROPoller[_models.StorageTaskAssignment]: - """Asynchronously creates a new storage task assignment sub-resource with the specified - parameters. If a storage task assignment is already created and a subsequent create request is - issued with different properties, the storage task assignment properties will be updated. If a - storage task assignment is already created and a subsequent create or update request is issued - with the exact same set of properties, the request will succeed. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :param parameters: The parameters to create a Storage Task Assignment. Is either a - StorageTaskAssignment type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.StorageTaskAssignment or IO[bytes] - :return: An instance of LROPoller that returns either StorageTaskAssignment or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.StorageTaskAssignment] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._create_initial( - resource_group_name=resource_group_name, - account_name=account_name, - storage_task_assignment_name=storage_task_assignment_name, - parameters=parameters, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("StorageTaskAssignment", pipeline_response.http_response) - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller[_models.StorageTaskAssignment].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller[_models.StorageTaskAssignment]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - def _update_initial( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - parameters: Union[_models.StorageTaskAssignmentUpdateParameters, IO[bytes]], - **kwargs: Any - ) -> Iterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "StorageTaskAssignmentUpdateParameters") - - _request = build_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - storage_task_assignment_name=storage_task_assignment_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - if response.status_code == 202: - response_headers["Azure-AsyncOperation"] = self._deserialize( - "str", response.headers.get("Azure-AsyncOperation") - ) - response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @overload - def begin_update( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - parameters: _models.StorageTaskAssignmentUpdateParameters, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> LROPoller[_models.StorageTaskAssignment]: - """Update storage task assignment properties. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :param parameters: The parameters to update a Storage Task Assignment. Required. - :type parameters: ~azure.mgmt.storage.models.StorageTaskAssignmentUpdateParameters - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of LROPoller that returns either StorageTaskAssignment or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def begin_update( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> LROPoller[_models.StorageTaskAssignment]: - """Update storage task assignment properties. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :param parameters: The parameters to update a Storage Task Assignment. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of LROPoller that returns either StorageTaskAssignment or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def begin_update( - self, - resource_group_name: str, - account_name: str, - storage_task_assignment_name: str, - parameters: Union[_models.StorageTaskAssignmentUpdateParameters, IO[bytes]], - **kwargs: Any - ) -> LROPoller[_models.StorageTaskAssignment]: - """Update storage task assignment properties. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :param parameters: The parameters to update a Storage Task Assignment. Is either a - StorageTaskAssignmentUpdateParameters type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.StorageTaskAssignmentUpdateParameters or IO[bytes] - :return: An instance of LROPoller that returns either StorageTaskAssignment or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.models.StorageTaskAssignment] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.StorageTaskAssignment] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._update_initial( - resource_group_name=resource_group_name, - account_name=account_name, - storage_task_assignment_name=storage_task_assignment_name, - parameters=parameters, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("StorageTaskAssignment", pipeline_response.http_response) - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller[_models.StorageTaskAssignment].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller[_models.StorageTaskAssignment]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - @distributed_trace - def get( - self, resource_group_name: str, account_name: str, storage_task_assignment_name: str, **kwargs: Any - ) -> _models.StorageTaskAssignment: - """Get the storage task assignment properties. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :return: StorageTaskAssignment or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.StorageTaskAssignment - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageTaskAssignment] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - storage_task_assignment_name=storage_task_assignment_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("StorageTaskAssignment", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - def _delete_initial( - self, resource_group_name: str, account_name: str, storage_task_assignment_name: str, **kwargs: Any - ) -> Iterator[bytes]: - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - storage_task_assignment_name=storage_task_assignment_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202, 204]: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - if response.status_code == 202: - response_headers["Azure-AsyncOperation"] = self._deserialize( - "str", response.headers.get("Azure-AsyncOperation") - ) - response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def begin_delete( - self, resource_group_name: str, account_name: str, storage_task_assignment_name: str, **kwargs: Any - ) -> LROPoller[None]: - """Delete the storage task assignment sub-resource. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param storage_task_assignment_name: The name of the storage task assignment within the - specified resource group. Storage task assignment names must be between 3 and 24 characters in - length and use numbers and lower-case letters only. Required. - :type storage_task_assignment_name: str - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._delete_initial( - resource_group_name=resource_group_name, - account_name=account_name, - storage_task_assignment_name=storage_task_assignment_name, - api_version=api_version, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller[None].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - @distributed_trace - def list( - self, resource_group_name: str, account_name: str, top: Optional[int] = None, **kwargs: Any - ) -> ItemPaged["_models.StorageTaskAssignment"]: - """List all the storage task assignments in an account. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param top: Optional, specifies the maximum number of storage task assignment Ids to be - included in the list response. Default value is None. - :type top: int - :return: An iterator like instance of either StorageTaskAssignment or the result of - cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.StorageTaskAssignment] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.StorageTaskAssignmentsList] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - top=top, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("StorageTaskAssignmentsList", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.ErrorResponseAutoGenerated, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_table_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_table_operations.py deleted file mode 100644 index 2477302a37e0..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_table_operations.py +++ /dev/null @@ -1,767 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Optional, TypeVar, Union, overload -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_create_request( - resource_group_name: str, account_name: str, table_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables/{tableName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "tableName": _SERIALIZER.url( - "table_name", table_name, "str", max_length=63, min_length=3, pattern=r"^[A-Za-z][A-Za-z0-9]{2,62}$" - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_update_request( - resource_group_name: str, account_name: str, table_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables/{tableName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "tableName": _SERIALIZER.url( - "table_name", table_name, "str", max_length=63, min_length=3, pattern=r"^[A-Za-z][A-Za-z0-9]{2,62}$" - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_request( - resource_group_name: str, account_name: str, table_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables/{tableName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "tableName": _SERIALIZER.url( - "table_name", table_name, "str", max_length=63, min_length=3, pattern=r"^[A-Za-z][A-Za-z0-9]{2,62}$" - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_delete_request( - resource_group_name: str, account_name: str, table_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables/{tableName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "tableName": _SERIALIZER.url( - "table_name", table_name, "str", max_length=63, min_length=3, pattern=r"^[A-Za-z][A-Za-z0-9]{2,62}$" - ), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_list_request(resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class TableOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`table` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @overload - def create( - self, - resource_group_name: str, - account_name: str, - table_name: str, - parameters: Optional[_models.Table] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.Table: - """Creates a new table with the specified table name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param table_name: A table name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin - with a numeric character. Required. - :type table_name: str - :param parameters: The parameters to provide to create a table. Default value is None. - :type parameters: ~azure.mgmt.storage.models.Table - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: Table or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.Table - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def create( - self, - resource_group_name: str, - account_name: str, - table_name: str, - parameters: Optional[IO[bytes]] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.Table: - """Creates a new table with the specified table name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param table_name: A table name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin - with a numeric character. Required. - :type table_name: str - :param parameters: The parameters to provide to create a table. Default value is None. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: Table or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.Table - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def create( - self, - resource_group_name: str, - account_name: str, - table_name: str, - parameters: Optional[Union[_models.Table, IO[bytes]]] = None, - **kwargs: Any - ) -> _models.Table: - """Creates a new table with the specified table name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param table_name: A table name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin - with a numeric character. Required. - :type table_name: str - :param parameters: The parameters to provide to create a table. Is either a Table type or a - IO[bytes] type. Default value is None. - :type parameters: ~azure.mgmt.storage.models.Table or IO[bytes] - :return: Table or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.Table - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - content_type = content_type if parameters else None - cls: ClsType[_models.Table] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" if parameters else None - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - if parameters is not None: - _json = self._serialize.body(parameters, "Table") - else: - _json = None - - _request = build_create_request( - resource_group_name=resource_group_name, - account_name=account_name, - table_name=table_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("Table", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def update( - self, - resource_group_name: str, - account_name: str, - table_name: str, - parameters: Optional[_models.Table] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.Table: - """Creates a new table with the specified table name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param table_name: A table name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin - with a numeric character. Required. - :type table_name: str - :param parameters: The parameters to provide to create a table. Default value is None. - :type parameters: ~azure.mgmt.storage.models.Table - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: Table or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.Table - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def update( - self, - resource_group_name: str, - account_name: str, - table_name: str, - parameters: Optional[IO[bytes]] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.Table: - """Creates a new table with the specified table name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param table_name: A table name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin - with a numeric character. Required. - :type table_name: str - :param parameters: The parameters to provide to create a table. Default value is None. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: Table or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.Table - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def update( - self, - resource_group_name: str, - account_name: str, - table_name: str, - parameters: Optional[Union[_models.Table, IO[bytes]]] = None, - **kwargs: Any - ) -> _models.Table: - """Creates a new table with the specified table name, under the specified account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param table_name: A table name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin - with a numeric character. Required. - :type table_name: str - :param parameters: The parameters to provide to create a table. Is either a Table type or a - IO[bytes] type. Default value is None. - :type parameters: ~azure.mgmt.storage.models.Table or IO[bytes] - :return: Table or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.Table - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - content_type = content_type if parameters else None - cls: ClsType[_models.Table] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" if parameters else None - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - if parameters is not None: - _json = self._serialize.body(parameters, "Table") - else: - _json = None - - _request = build_update_request( - resource_group_name=resource_group_name, - account_name=account_name, - table_name=table_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("Table", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get(self, resource_group_name: str, account_name: str, table_name: str, **kwargs: Any) -> _models.Table: - """Gets the table with the specified table name, under the specified account if it exists. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param table_name: A table name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin - with a numeric character. Required. - :type table_name: str - :return: Table or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.Table - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.Table] = kwargs.pop("cls", None) - - _request = build_get_request( - resource_group_name=resource_group_name, - account_name=account_name, - table_name=table_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("Table", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def delete( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, account_name: str, table_name: str, **kwargs: Any - ) -> None: - """Deletes the table with the specified table name, under the specified account if it exists. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param table_name: A table name must be unique within a storage account and must be between 3 - and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin - with a numeric character. Required. - :type table_name: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_delete_request( - resource_group_name=resource_group_name, - account_name=account_name, - table_name=table_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - @distributed_trace - def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> ItemPaged["_models.Table"]: - """Gets a list of all the tables under the specified storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: An iterator like instance of either Table or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.Table] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ListTableResource] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("ListTableResource", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_table_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_table_services_operations.py deleted file mode 100644 index 8c756b707ddc..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_table_services_operations.py +++ /dev/null @@ -1,430 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from io import IOBase -from typing import Any, Callable, IO, Literal, Optional, TypeVar, Union, overload - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_request(resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_set_service_properties_request( - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - table_service_name: Literal["default"] = kwargs.pop("table_service_name", "default") - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/{tableServiceName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "tableServiceName": _SERIALIZER.url("table_service_name", table_service_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_service_properties_request( - resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - table_service_name: Literal["default"] = kwargs.pop("table_service_name", "default") - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/{tableServiceName}", - ) - path_format_arguments = { - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "accountName": _SERIALIZER.url( - "account_name", account_name, "str", max_length=24, min_length=3, pattern=r"^[a-z0-9]+$" - ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "tableServiceName": _SERIALIZER.url("table_service_name", table_service_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class TableServicesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`table_services` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _models.ListTableServices: - """List all table services for the storage account. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: ListTableServices or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.ListTableServices - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ListTableServices] = kwargs.pop("cls", None) - - _request = build_list_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ListTableServices", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @overload - def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: _models.TableServiceProperties, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.TableServiceProperties: - """Sets the properties of a storage account’s Table service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Table service, only properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. - :type parameters: ~azure.mgmt.storage.models.TableServiceProperties - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: TableServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.TableServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.TableServiceProperties: - """Sets the properties of a storage account’s Table service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Table service, only properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required. - :type parameters: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: TableServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.TableServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def set_service_properties( - self, - resource_group_name: str, - account_name: str, - parameters: Union[_models.TableServiceProperties, IO[bytes]], - **kwargs: Any - ) -> _models.TableServiceProperties: - """Sets the properties of a storage account’s Table service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :param parameters: The properties of a storage account’s Table service, only properties for - Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Is either a - TableServiceProperties type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.storage.models.TableServiceProperties or IO[bytes] - :return: TableServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.TableServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - table_service_name: Literal["default"] = kwargs.pop("table_service_name", "default") - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.TableServiceProperties] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "TableServiceProperties") - - _request = build_set_service_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - table_service_name=table_service_name, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("TableServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get_service_properties( - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> _models.TableServiceProperties: - """Gets the properties of a storage account’s Table service, including properties for Storage - Analytics and CORS (Cross-Origin Resource Sharing) rules. - - :param resource_group_name: The name of the resource group within the user's subscription. The - name is case insensitive. Required. - :type resource_group_name: str - :param account_name: The name of the storage account within the specified resource group. - Storage account names must be between 3 and 24 characters in length and use numbers and - lower-case letters only. Required. - :type account_name: str - :return: TableServiceProperties or the result of cls(response) - :rtype: ~azure.mgmt.storage.models.TableServiceProperties - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - table_service_name: Literal["default"] = kwargs.pop("table_service_name", "default") - cls: ClsType[_models.TableServiceProperties] = kwargs.pop("cls", None) - - _request = build_get_service_properties_request( - resource_group_name=resource_group_name, - account_name=account_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - table_service_name=table_service_name, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize("TableServiceProperties", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_usages_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_usages_operations.py deleted file mode 100644 index 185d942c2006..000000000000 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/operations/_usages_operations.py +++ /dev/null @@ -1,162 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, Callable, Optional, TypeVar -import urllib.parse - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._configuration import StorageManagementClientConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_by_location_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Storage/locations/{location}/usages" - ) - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "location": _SERIALIZER.url("location", location, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class UsagesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.storage.StorageManagementClient`'s - :attr:`usages` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: StorageManagementClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list_by_location(self, location: str, **kwargs: Any) -> ItemPaged["_models.Usage"]: - """Gets the current usage count and the limit for the resources of the location under the - subscription. - - :param location: The location of the Azure Storage resource. Required. - :type location: str - :return: An iterator like instance of either Usage or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.models.Usage] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.UsageListResult] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_by_location_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("UsageListResult", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_clear_legal_hold.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_clear_legal_hold.py index d6874416fe0d..78995e6e0dfb 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_clear_legal_hold.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_clear_legal_hold.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,18 +27,18 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.clear_legal_hold( resource_group_name="res4303", account_name="sto7280", container_name="container8723", - legal_hold={"tags": ["str"], "allowProtectedAppendWritesAll": bool, "hasLegalHold": bool}, + legal_hold={"tags": ["tag1", "tag2", "tag3"]}, ) print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersClearLegalHold.json +# x-ms-original-file: 2025-08-01/BlobContainersClearLegalHold.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_delete.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_delete.py index b879509f4747..96107df43fe7 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_delete.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_delete.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.blob_containers.delete( @@ -38,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersDelete.json +# x-ms-original-file: 2025-08-01/BlobContainersDelete.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_delete_immutability_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_delete_immutability_policy.py index 90c62f051f9b..8bf6d942c7ba 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_delete_immutability_policy.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_delete_immutability_policy.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,18 +27,19 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.delete_immutability_policy( resource_group_name="res1581", account_name="sto9621", container_name="container4910", - if_match="8d59f81a7fa7be0", + etag="str", + match_condition=~azure.core.MatchConditions, ) print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersDeleteImmutabilityPolicy.json +# x-ms-original-file: 2025-08-01/BlobContainersDeleteImmutabilityPolicy.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_extend_immutability_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_extend_immutability_policy.py index f9c917f7fe35..1899619d6c94 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_extend_immutability_policy.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_extend_immutability_policy.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,18 +27,19 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.extend_immutability_policy( resource_group_name="res6238", account_name="sto232", container_name="container5023", - if_match="8d59f830d0c3bf9", + etag="str", + match_condition=~azure.core.MatchConditions, ) print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersExtendImmutabilityPolicy.json +# x-ms-original-file: 2025-08-01/BlobContainersExtendImmutabilityPolicy.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_get.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_get.py index 1af85875297a..f2b79d2b417c 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_get.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_get.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.get( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersGet.json +# x-ms-original-file: 2025-08-01/BlobContainersGet.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_get_immutability_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_get_immutability_policy.py index daae04e76992..fb86e8c863ee 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_get_immutability_policy.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_get_immutability_policy.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.get_immutability_policy( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersGetImmutabilityPolicy.json +# x-ms-original-file: 2025-08-01/BlobContainersGetImmutabilityPolicy.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_get_with_allow_protected_append_writes_all.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_get_with_allow_protected_append_writes_all.py index 71d00bb2b968..5cf78172cad8 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_get_with_allow_protected_append_writes_all.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_get_with_allow_protected_append_writes_all.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.get( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersGetWithAllowProtectedAppendWritesAll.json +# x-ms-original-file: 2025-08-01/BlobContainersGetWithAllowProtectedAppendWritesAll.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_lease_acquire.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_lease_acquire.py index 228a43a5db34..16e4e2e7b0a8 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_lease_acquire.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_lease_acquire.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.lease( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersLease_Acquire.json +# x-ms-original-file: 2025-08-01/BlobContainersLease_Acquire.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_lease_break.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_lease_break.py index c757b1e4e64c..2094044bb560 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_lease_break.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_lease_break.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.lease( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersLease_Break.json +# x-ms-original-file: 2025-08-01/BlobContainersLease_Break.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_list.py index 2d4e94c2f13f..db85de282ba8 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_list.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersList.json +# x-ms-original-file: 2025-08-01/BlobContainersList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_lock_immutability_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_lock_immutability_policy.py index 44bc69427ad9..bc9caff58b6b 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_lock_immutability_policy.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_lock_immutability_policy.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,18 +27,19 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.lock_immutability_policy( resource_group_name="res2702", account_name="sto5009", container_name="container1631", - if_match="8d59f825b721dd3", + etag="str", + match_condition=~azure.core.MatchConditions, ) print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersLockImmutabilityPolicy.json +# x-ms-original-file: 2025-08-01/BlobContainersLockImmutabilityPolicy.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_patch.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_patch.py index 0223c1b133df..6b1fee2322fa 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_patch.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_patch.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.update( @@ -40,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersPatch.json +# x-ms-original-file: 2025-08-01/BlobContainersPatch.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put.py index 2b5eace18d79..f1609bb9f688 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.create( @@ -36,71 +35,83 @@ def main(): account_name="sto328", container_name="container6185", blob_container={ - "defaultEncryptionScope": "str", - "deleted": bool, - "deletedTime": "2020-02-20 00:00:00", - "denyEncryptionScopeOverride": bool, - "enableNfsV3AllSquash": bool, - "enableNfsV3RootSquash": bool, "etag": "str", - "hasImmutabilityPolicy": bool, - "hasLegalHold": bool, "id": "str", - "immutabilityPolicy": { - "allowProtectedAppendWrites": bool, - "allowProtectedAppendWritesAll": bool, - "etag": "str", - "immutabilityPeriodSinceCreationInDays": 0, - "state": "str", - "updateHistory": [ - { + "name": "str", + "properties": { + "defaultEncryptionScope": "str", + "deleted": bool, + "deletedTime": "2020-02-20 00:00:00", + "denyEncryptionScopeOverride": bool, + "enableNfsV3AllSquash": bool, + "enableNfsV3RootSquash": bool, + "hasImmutabilityPolicy": bool, + "hasLegalHold": bool, + "immutabilityPolicy": { + "etag": "str", + "properties": { "allowProtectedAppendWrites": bool, "allowProtectedAppendWritesAll": bool, "immutabilityPeriodSinceCreationInDays": 0, - "objectIdentifier": "str", - "tenantId": "str", - "timestamp": "2020-02-20 00:00:00", - "update": "str", - "upn": "str", - } - ], - }, - "immutableStorageWithVersioning": { - "enabled": bool, - "migrationState": "str", - "timeStamp": "2020-02-20 00:00:00", - }, - "lastModifiedTime": "2020-02-20 00:00:00", - "leaseDuration": "str", - "leaseState": "str", - "leaseStatus": "str", - "legalHold": { - "hasLegalHold": bool, - "protectedAppendWritesHistory": { - "allowProtectedAppendWritesAll": bool, - "timestamp": "2020-02-20 00:00:00", + "state": "str", + }, + "updateHistory": [ + { + "allowProtectedAppendWrites": bool, + "allowProtectedAppendWritesAll": bool, + "immutabilityPeriodSinceCreationInDays": 0, + "objectIdentifier": "str", + "tenantId": "str", + "timestamp": "2020-02-20 00:00:00", + "update": "str", + "upn": "str", + } + ], }, - "tags": [ - { - "objectIdentifier": "str", - "tag": "str", - "tenantId": "str", + "immutableStorageWithVersioning": { + "enabled": bool, + "migrationState": "str", + "timeStamp": "2020-02-20 00:00:00", + }, + "lastModifiedTime": "2020-02-20 00:00:00", + "leaseDuration": "str", + "leaseState": "str", + "leaseStatus": "str", + "legalHold": { + "hasLegalHold": bool, + "protectedAppendWritesHistory": { + "allowProtectedAppendWritesAll": bool, "timestamp": "2020-02-20 00:00:00", - "upn": "str", - } - ], + }, + "tags": [ + { + "objectIdentifier": "str", + "tag": "str", + "tenantId": "str", + "timestamp": "2020-02-20 00:00:00", + "upn": "str", + } + ], + }, + "metadata": {"str": "str"}, + "publicAccess": "str", + "remainingRetentionDays": 0, + "version": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", }, - "metadata": {"str": "str"}, - "name": "str", - "publicAccess": "str", - "remainingRetentionDays": 0, "type": "str", - "version": "str", }, ) print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersPut.json +# x-ms-original-file: 2025-08-01/BlobContainersPut.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put_default_encryption_scope.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put_default_encryption_scope.py index 73e7317b899f..4e6fa51adeb8 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put_default_encryption_scope.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put_default_encryption_scope.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.create( @@ -42,6 +41,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersPutDefaultEncryptionScope.json +# x-ms-original-file: 2025-08-01/BlobContainersPutDefaultEncryptionScope.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put_immutability_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put_immutability_policy.py index 88f64ce352e6..951b64cb9ec2 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put_immutability_policy.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put_immutability_policy.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.create_or_update_immutability_policy( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersPutImmutabilityPolicy.json +# x-ms-original-file: 2025-08-01/BlobContainersPutImmutabilityPolicy.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put_immutability_policy_allow_protected_append_writes_all.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put_immutability_policy_allow_protected_append_writes_all.py index cce5b226a34f..286d28a356c9 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put_immutability_policy_allow_protected_append_writes_all.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put_immutability_policy_allow_protected_append_writes_all.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.create_or_update_immutability_policy( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersPutImmutabilityPolicyAllowProtectedAppendWritesAll.json +# x-ms-original-file: 2025-08-01/BlobContainersPutImmutabilityPolicyAllowProtectedAppendWritesAll.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put_object_level_worm.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put_object_level_worm.py index e29fc1132415..b41fe3d23f02 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put_object_level_worm.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put_object_level_worm.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.create( @@ -40,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersPutObjectLevelWorm.json +# x-ms-original-file: 2025-08-01/BlobContainersPutObjectLevelWorm.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_set_legal_hold.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_set_legal_hold.py index 1390451e684f..8ad9019bca8b 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_set_legal_hold.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_set_legal_hold.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,18 +27,18 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.set_legal_hold( resource_group_name="res4303", account_name="sto7280", container_name="container8723", - legal_hold={"tags": ["str"], "allowProtectedAppendWritesAll": bool, "hasLegalHold": bool}, + legal_hold={"tags": ["tag1", "tag2", "tag3"]}, ) print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersSetLegalHold.json +# x-ms-original-file: 2025-08-01/BlobContainersSetLegalHold.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_set_legal_hold_allow_protected_append_writes_all.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_set_legal_hold_allow_protected_append_writes_all.py index ce459478ee6f..86f904008eae 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_set_legal_hold_allow_protected_append_writes_all.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_set_legal_hold_allow_protected_append_writes_all.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,18 +27,18 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.set_legal_hold( resource_group_name="res4303", account_name="sto7280", container_name="container8723", - legal_hold={"tags": ["str"], "allowProtectedAppendWritesAll": bool, "hasLegalHold": bool}, + legal_hold={"allowProtectedAppendWritesAll": True, "tags": ["tag1", "tag2", "tag3"]}, ) print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobContainersSetLegalHoldAllowProtectedAppendWritesAll.json +# x-ms-original-file: 2025-08-01/BlobContainersSetLegalHoldAllowProtectedAppendWritesAll.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_ranges_restore.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_ranges_restore.py index 0df92f915e05..e8874db869bd 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_ranges_restore.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_ranges_restore.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.begin_restore_blob_ranges( @@ -45,6 +44,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobRangesRestore.json +# x-ms-original-file: 2025-08-01/BlobRangesRestore.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_get.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_get.py index f949a1271a1a..5ec493874b72 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_get.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_get.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_services.get_service_properties( @@ -38,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobServicesGet.json +# x-ms-original-file: 2025-08-01/BlobServicesGet.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_list.py index bd082c19bf7a..a2d46b282938 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_list.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_services.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobServicesList.json +# x-ms-original-file: 2025-08-01/BlobServicesList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_put.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_put.py index 8fac51cc0917..b17bf8ea860a 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_put.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_put.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_services.set_service_properties( @@ -65,12 +64,17 @@ def main(): "defaultServiceVersion": "2017-07-29", "deleteRetentionPolicy": {"days": 300, "enabled": True}, "isVersioningEnabled": True, + "staticWebsite": { + "enabled": True, + "errorDocument404Path": "site/errors/not-found.html", + "indexDocument": "home.html", + }, } }, ) print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobServicesPut.json +# x-ms-original-file: 2025-08-01/BlobServicesPut.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_put_allow_permanent_delete.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_put_allow_permanent_delete.py index adce95b1700e..1f52548763f8 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_put_allow_permanent_delete.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_put_allow_permanent_delete.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_services.set_service_properties( @@ -44,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobServicesPutAllowPermanentDelete.json +# x-ms-original-file: 2025-08-01/BlobServicesPutAllowPermanentDelete.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_put_last_access_time_based_tracking.py b/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_put_last_access_time_based_tracking.py index 0b7b9547eec0..cbe34f984cab 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_put_last_access_time_based_tracking.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/blob_services_put_last_access_time_based_tracking.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_services.set_service_properties( @@ -48,6 +47,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/BlobServicesPutLastAccessTimeBasedTracking.json +# x-ms-original-file: 2025-08-01/BlobServicesPutLastAccessTimeBasedTracking.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_crud/delete_storage_task_assignment.py b/sdk/storage/azure-mgmt-storage/generated_samples/delete_storage_task_assignment.py similarity index 79% rename from sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_crud/delete_storage_task_assignment.py rename to sdk/storage/azure-mgmt-storage/generated_samples/delete_storage_task_assignment.py index 36e0b5215a64..c83c9d08b71d 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_crud/delete_storage_task_assignment.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/delete_storage_task_assignment.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="1f31ba14-ce16-4281-b9b4-3e78da6e1616", + subscription_id="SUBSCRIPTION_ID", ) client.storage_task_assignments.begin_delete( @@ -38,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/storageTaskAssignmentsCrud/DeleteStorageTaskAssignment.json +# x-ms-original-file: 2025-08-01/storageTaskAssignmentsCrud/DeleteStorageTaskAssignment.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/deleted_account_get.py b/sdk/storage/azure-mgmt-storage/generated_samples/deleted_account_get.py index 7331d3789f98..02be1caefbac 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/deleted_account_get.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/deleted_account_get.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.deleted_accounts.get( @@ -38,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/DeletedAccountGet.json +# x-ms-original-file: 2025-08-01/DeletedAccountGet.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/deleted_account_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/deleted_account_list.py index ec5ec2408e4b..11418b1e1034 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/deleted_account_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/deleted_account_list.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.deleted_accounts.list() @@ -36,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/DeletedAccountList.json +# x-ms-original-file: 2025-08-01/DeletedAccountList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/deleted_blob_containers_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/deleted_blob_containers_list.py index 6e5ec5c1afa5..47db22b87f7d 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/deleted_blob_containers_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/deleted_blob_containers_list.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_containers.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/DeletedBlobContainersList.json +# x-ms-original-file: 2025-08-01/DeletedBlobContainersList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/deleted_file_shares_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/deleted_file_shares_list.py index 53d49b75be20..ba21be133ad0 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/deleted_file_shares_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/deleted_file_shares_list.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/DeletedFileSharesList.json +# x-ms-original-file: 2025-08-01/DeletedFileSharesList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_services_get.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_services_get.py index 519a03d5f889..7b218d433d0d 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_services_get.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_services_get.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_services.get_service_properties( @@ -38,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileServicesGet.json +# x-ms-original-file: 2025-08-01/FileServicesGet.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_services_get_usage.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_services_get_usage.py index b8696a2205b9..295f5f54b589 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_services_get_usage.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_services_get_usage.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="00000000-1111-2222-3333-444444444444", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_services.get_service_usage( @@ -38,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileServicesGetUsage.json +# x-ms-original-file: 2025-08-01/FileServicesGetUsage.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_services_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_services_list.py index 5ccf6a76c920..5dabd6b0788e 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_services_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_services_list.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_services.list( @@ -38,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileServicesList.json +# x-ms-original-file: 2025-08-01/FileServicesList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_services_list_usages.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_services_list_usages.py index 622af3ffae0f..1698fdc3509e 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_services_list_usages.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_services_list_usages.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="00000000-1111-2222-3333-444444444444", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_services.list_service_usages( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileServicesListUsages.json +# x-ms-original-file: 2025-08-01/FileServicesListUsages.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_services_put.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_services_put.py index 32aa73d76c1c..5e0ff36b019f 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_services_put.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_services_put.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_services.set_service_properties( @@ -67,6 +66,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileServicesPut.json +# x-ms-original-file: 2025-08-01/FileServicesPut.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_services_put_enable_secure_smb_features.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_services_put_enable_secure_smb_features.py index 9253aee5ff0a..246c05bd074f 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_services_put_enable_secure_smb_features.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_services_put_enable_secure_smb_features.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_services.set_service_properties( @@ -50,6 +49,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileServicesPut_EnableSecureSmbFeatures.json +# x-ms-original-file: 2025-08-01/FileServicesPut_EnableSecureSmbFeatures.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_services_put_enable_smb_multichannel.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_services_put_enable_smb_multichannel.py index e45906469e1e..fa350fc7f9cc 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_services_put_enable_smb_multichannel.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_services_put_enable_smb_multichannel.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_services.set_service_properties( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileServicesPut_EnableSMBMultichannel.json +# x-ms-original-file: 2025-08-01/FileServicesPut_EnableSMBMultichannel.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_services_put_encryption_in_transit_required.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_services_put_encryption_in_transit_required.py index 11e587f22d49..3783ca6d4745 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_services_put_encryption_in_transit_required.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_services_put_encryption_in_transit_required.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_services.set_service_properties( @@ -46,6 +45,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileServicesPut_EncryptionInTransitRequired.json +# x-ms-original-file: 2025-08-01/FileServicesPut_EncryptionInTransitRequired.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_share_acls_patch.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_share_acls_patch.py index 8741649142b8..f935fdea4dd4 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_share_acls_patch.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_share_acls_patch.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.update( @@ -53,6 +52,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileShareAclsPatch.json +# x-ms-original-file: 2025-08-01/FileShareAclsPatch.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_share_snapshots_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_share_snapshots_list.py index 6f41403304ad..864ce03b9cf4 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_share_snapshots_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_share_snapshots_list.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileShareSnapshotsList.json +# x-ms-original-file: 2025-08-01/FileShareSnapshotsList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_delete.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_delete.py index 339d7b3e7aa0..77b050bf6b5a 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_delete.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_delete.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.file_shares.delete( @@ -38,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesDelete.json +# x-ms-original-file: 2025-08-01/FileSharesDelete.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_get.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_get.py index d81cb5ac3e49..efcc74676e2e 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_get.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_get.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.get( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesGet.json +# x-ms-original-file: 2025-08-01/FileSharesGet.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_get_paid_bursting.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_get_paid_bursting.py index 517c3c11c1e7..a98dcd2f4be1 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_get_paid_bursting.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_get_paid_bursting.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.get( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesGet_PaidBursting.json +# x-ms-original-file: 2025-08-01/FileSharesGet_PaidBursting.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_get_provisioned_v2.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_get_provisioned_v2.py index e85818ffb58c..87bb2f070bda 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_get_provisioned_v2.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_get_provisioned_v2.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.get( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesGet_ProvisionedV2.json +# x-ms-original-file: 2025-08-01/FileSharesGet_ProvisionedV2.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_get_stats.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_get_stats.py index 4db701ba4a5a..35b278666a64 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_get_stats.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_get_stats.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.get( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesGet_Stats.json +# x-ms-original-file: 2025-08-01/FileSharesGet_Stats.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_lease_acquire.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_lease_acquire.py index 0d50981d3f34..1425db92b8ce 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_lease_acquire.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_lease_acquire.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.lease( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesLease_Acquire.json +# x-ms-original-file: 2025-08-01/FileSharesLease_Acquire.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_lease_break.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_lease_break.py index 475e822db70d..b2e69e10b60c 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_lease_break.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_lease_break.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.lease( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesLease_Break.json +# x-ms-original-file: 2025-08-01/FileSharesLease_Break.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_list.py index 98a90d515a59..556609ec5c9f 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_list.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesList.json +# x-ms-original-file: 2025-08-01/FileSharesList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_list_paid_bursting.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_list_paid_bursting.py index 745aac56a817..834514b2d725 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_list_paid_bursting.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_list_paid_bursting.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesList_PaidBursting.json +# x-ms-original-file: 2025-08-01/FileSharesList_PaidBursting.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_list_provisioned_v2.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_list_provisioned_v2.py index c3328512971c..5dfb46d9a9db 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_list_provisioned_v2.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_list_provisioned_v2.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesList_ProvisionedV2.json +# x-ms-original-file: 2025-08-01/FileSharesList_ProvisionedV2.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_patch.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_patch.py index 93fc8587a06d..38fb2d4daa32 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_patch.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_patch.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.update( @@ -40,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesPatch.json +# x-ms-original-file: 2025-08-01/FileSharesPatch.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_patch_paid_bursting.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_patch_paid_bursting.py index 4ba1e49dd5bd..078d7a681b14 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_patch_paid_bursting.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_patch_paid_bursting.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.update( @@ -48,6 +47,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesPatch_PaidBursting.json +# x-ms-original-file: 2025-08-01/FileSharesPatch_PaidBursting.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_patch_provisioned_v2.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_patch_provisioned_v2.py index 2a370ff7444f..d17edb27df3b 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_patch_provisioned_v2.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_patch_provisioned_v2.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.update( @@ -40,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesPatch_ProvisionedV2.json +# x-ms-original-file: 2025-08-01/FileSharesPatch_ProvisionedV2.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put.py index ce4e5e1e30f6..15974f41b759 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.create( @@ -36,54 +35,64 @@ def main(): account_name="sto328", share_name="share6185", file_share={ - "accessTier": "str", - "accessTierChangeTime": "2020-02-20 00:00:00", - "accessTierStatus": "str", - "deleted": bool, - "deletedTime": "2020-02-20 00:00:00", - "enabledProtocols": "str", "etag": "str", - "fileSharePaidBursting": { - "paidBurstingEnabled": bool, - "paidBurstingMaxBandwidthMibps": 0, - "paidBurstingMaxIops": 0, - }, "id": "str", - "includedBurstIops": 0, - "lastModifiedTime": "2020-02-20 00:00:00", - "leaseDuration": "str", - "leaseState": "str", - "leaseStatus": "str", - "maxBurstCreditsForIops": 0, - "metadata": {"str": "str"}, "name": "str", - "nextAllowedProvisionedBandwidthDowngradeTime": "2020-02-20 00:00:00", - "nextAllowedProvisionedIopsDowngradeTime": "2020-02-20 00:00:00", - "nextAllowedQuotaDowngradeTime": "2020-02-20 00:00:00", - "provisionedBandwidthMibps": 0, - "provisionedIops": 0, - "remainingRetentionDays": 0, - "rootSquash": "str", - "shareQuota": 0, - "shareUsageBytes": 0, - "signedIdentifiers": [ - { - "accessPolicy": { - "expiryTime": "2020-02-20 00:00:00", - "permission": "str", - "startTime": "2020-02-20 00:00:00", - }, - "id": "str", - } - ], - "snapshotTime": "2020-02-20 00:00:00", + "properties": { + "accessTier": "str", + "accessTierChangeTime": "2020-02-20 00:00:00", + "accessTierStatus": "str", + "deleted": bool, + "deletedTime": "2020-02-20 00:00:00", + "enabledProtocols": "str", + "fileSharePaidBursting": { + "paidBurstingEnabled": bool, + "paidBurstingMaxBandwidthMibps": 0, + "paidBurstingMaxIops": 0, + }, + "includedBurstIops": 0, + "lastModifiedTime": "2020-02-20 00:00:00", + "leaseDuration": "str", + "leaseState": "str", + "leaseStatus": "str", + "maxBurstCreditsForIops": 0, + "metadata": {"str": "str"}, + "nextAllowedProvisionedBandwidthDowngradeTime": "2020-02-20 00:00:00", + "nextAllowedProvisionedIopsDowngradeTime": "2020-02-20 00:00:00", + "nextAllowedQuotaDowngradeTime": "2020-02-20 00:00:00", + "provisionedBandwidthMibps": 0, + "provisionedIops": 0, + "remainingRetentionDays": 0, + "rootSquash": "str", + "shareQuota": 0, + "shareUsageBytes": 0, + "signedIdentifiers": [ + { + "accessPolicy": { + "expiryTime": "2020-02-20 00:00:00", + "permission": "str", + "startTime": "2020-02-20 00:00:00", + }, + "id": "str", + } + ], + "snapshotTime": "2020-02-20 00:00:00", + "version": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", - "version": "str", }, ) print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesPut.json +# x-ms-original-file: 2025-08-01/FileSharesPut.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put_access_tier.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put_access_tier.py index eccffa0f51a9..b61ff0c3e52d 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put_access_tier.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put_access_tier.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.create( @@ -40,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesPut_AccessTier.json +# x-ms-original-file: 2025-08-01/FileSharesPut_AccessTier.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put_nfs.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put_nfs.py index a7f565bd00e5..2f5d633f0017 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put_nfs.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put_nfs.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.create( @@ -40,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesPut_NFS.json +# x-ms-original-file: 2025-08-01/FileSharesPut_NFS.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put_paid_bursting.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put_paid_bursting.py index 4c22258d1344..04688443eb69 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put_paid_bursting.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put_paid_bursting.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.create( @@ -48,6 +47,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesPut_PaidBursting.json +# x-ms-original-file: 2025-08-01/FileSharesPut_PaidBursting.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put_provisioned_v2.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put_provisioned_v2.py index b0cfd3e45451..24f9efdab394 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put_provisioned_v2.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_put_provisioned_v2.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.file_shares.create( @@ -40,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesPut_ProvisionedV2.json +# x-ms-original-file: 2025-08-01/FileSharesPut_ProvisionedV2.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_restore.py b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_restore.py index 1b56c32d0ab0..1469ce5c897d 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_restore.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/file_shares_restore.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.file_shares.restore( @@ -39,6 +38,6 @@ def main(): ) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/FileSharesRestore.json +# x-ms-original-file: 2025-08-01/FileSharesRestore.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_crud/get_storage_task_assignment.py b/sdk/storage/azure-mgmt-storage/generated_samples/get_storage_task_assignment.py similarity index 80% rename from sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_crud/get_storage_task_assignment.py rename to sdk/storage/azure-mgmt-storage/generated_samples/get_storage_task_assignment.py index 6bbde447e43a..5ffba8d0288f 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_crud/get_storage_task_assignment.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/get_storage_task_assignment.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="1f31ba14-ce16-4281-b9b4-3e78da6e1616", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_task_assignments.get( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/storageTaskAssignmentsCrud/GetStorageTaskAssignment.json +# x-ms-original-file: 2025-08-01/storageTaskAssignmentsCrud/GetStorageTaskAssignment.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_list/list_storage_task_assignment_instances_report_summary.py b/sdk/storage/azure-mgmt-storage/generated_samples/list_storage_task_assignment_instances_report_summary.py similarity index 79% rename from sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_list/list_storage_task_assignment_instances_report_summary.py rename to sdk/storage/azure-mgmt-storage/generated_samples/list_storage_task_assignment_instances_report_summary.py index 2452b2c57318..2c3e8bf5df69 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_list/list_storage_task_assignment_instances_report_summary.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/list_storage_task_assignment_instances_report_summary.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="1f31ba14-ce16-4281-b9b4-3e78da6e1616", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_task_assignment_instances_report.list( @@ -40,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/storageTaskAssignmentsList/ListStorageTaskAssignmentInstancesReportSummary.json +# x-ms-original-file: 2025-08-01/storageTaskAssignmentsList/ListStorageTaskAssignmentInstancesReportSummary.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_list/list_storage_task_assignments_for_account.py b/sdk/storage/azure-mgmt-storage/generated_samples/list_storage_task_assignments_for_account.py similarity index 79% rename from sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_list/list_storage_task_assignments_for_account.py rename to sdk/storage/azure-mgmt-storage/generated_samples/list_storage_task_assignments_for_account.py index b3837a114d24..5f8b743496ea 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_list/list_storage_task_assignments_for_account.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/list_storage_task_assignments_for_account.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="1f31ba14-ce16-4281-b9b4-3e78da6e1616", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_task_assignments.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/storageTaskAssignmentsList/ListStorageTaskAssignmentsForAccount.json +# x-ms-original-file: 2025-08-01/storageTaskAssignmentsList/ListStorageTaskAssignmentsForAccount.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_list/list_storage_task_assignments_instances_report_summary.py b/sdk/storage/azure-mgmt-storage/generated_samples/list_storage_task_assignments_instances_report_summary.py similarity index 79% rename from sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_list/list_storage_task_assignments_instances_report_summary.py rename to sdk/storage/azure-mgmt-storage/generated_samples/list_storage_task_assignments_instances_report_summary.py index 1b9378781146..3ff57dfac74e 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_list/list_storage_task_assignments_instances_report_summary.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/list_storage_task_assignments_instances_report_summary.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="1f31ba14-ce16-4281-b9b4-3e78da6e1616", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_task_assignments_instances_report.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/storageTaskAssignmentsList/ListStorageTaskAssignmentsInstancesReportSummary.json +# x-ms-original-file: 2025-08-01/storageTaskAssignmentsList/ListStorageTaskAssignmentsInstancesReportSummary.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/local_user_create.py b/sdk/storage/azure-mgmt-storage/generated_samples/local_user_create.py index 119b5750f239..17ee543f5bf1 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/local_user_create.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/local_user_create.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.local_users.create_or_update( @@ -52,6 +51,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/LocalUserCreate.json +# x-ms-original-file: 2025-08-01/LocalUserCreate.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/local_user_create_nf_sv3_enabled.py b/sdk/storage/azure-mgmt-storage/generated_samples/local_user_create_nf_sv3_enabled.py index b42d8daff352..5ade5dba00b8 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/local_user_create_nf_sv3_enabled.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/local_user_create_nf_sv3_enabled.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.local_users.create_or_update( @@ -40,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/LocalUserCreateNFSv3Enabled.json +# x-ms-original-file: 2025-08-01/LocalUserCreateNFSv3Enabled.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/local_user_delete.py b/sdk/storage/azure-mgmt-storage/generated_samples/local_user_delete.py index e8ee79465a2e..42f43491db61 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/local_user_delete.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/local_user_delete.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.local_users.delete( @@ -38,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/LocalUserDelete.json +# x-ms-original-file: 2025-08-01/LocalUserDelete.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/local_user_get.py b/sdk/storage/azure-mgmt-storage/generated_samples/local_user_get.py index 34766426cf37..78969aa3a9cb 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/local_user_get.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/local_user_get.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.local_users.get( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/LocalUserGet.json +# x-ms-original-file: 2025-08-01/LocalUserGet.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/local_user_list_keys.py b/sdk/storage/azure-mgmt-storage/generated_samples/local_user_list_keys.py index a01e823d3233..f96e28a8b7f8 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/local_user_list_keys.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/local_user_list_keys.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.local_users.list_keys( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/LocalUserListKeys.json +# x-ms-original-file: 2025-08-01/LocalUserListKeys.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/local_user_regenerate_password.py b/sdk/storage/azure-mgmt-storage/generated_samples/local_user_regenerate_password.py index d3dcf5541f88..7509b59b136f 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/local_user_regenerate_password.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/local_user_regenerate_password.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.local_users.regenerate_password( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/LocalUserRegeneratePassword.json +# x-ms-original-file: 2025-08-01/LocalUserRegeneratePassword.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/local_user_update.py b/sdk/storage/azure-mgmt-storage/generated_samples/local_user_update.py index cb23ecb751da..e6dbb737d68a 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/local_user_update.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/local_user_update.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.local_users.create_or_update( @@ -51,6 +50,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/LocalUserUpdate.json +# x-ms-original-file: 2025-08-01/LocalUserUpdate.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/local_users_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/local_users_list.py index 52bf4fcf1654..a8158e390b09 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/local_users_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/local_users_list.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.local_users.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/LocalUsersList.json +# x-ms-original-file: 2025-08-01/LocalUsersList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/local_users_list_nf_sv3_enabled.py b/sdk/storage/azure-mgmt-storage/generated_samples/local_users_list_nf_sv3_enabled.py index 19a519aafc46..5acabe35688b 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/local_users_list_nf_sv3_enabled.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/local_users_list_nf_sv3_enabled.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.local_users.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/LocalUsersListNFSv3Enabled.json +# x-ms-original-file: 2025-08-01/LocalUsersListNFSv3Enabled.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/network_security_perimeter_configuration_get.py b/sdk/storage/azure-mgmt-storage/generated_samples/network_security_perimeter_configuration_get.py index 482abbe15a1e..bdc1c6cb45e0 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/network_security_perimeter_configuration_get.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/network_security_perimeter_configuration_get.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="00000000-1111-2222-3333-444444444444", + subscription_id="SUBSCRIPTION_ID", ) response = client.network_security_perimeter_configurations.get( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/NetworkSecurityPerimeterConfigurationGet.json +# x-ms-original-file: 2025-08-01/NetworkSecurityPerimeterConfigurationGet.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/network_security_perimeter_configuration_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/network_security_perimeter_configuration_list.py index 64aea7029b19..2156af2f6e75 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/network_security_perimeter_configuration_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/network_security_perimeter_configuration_list.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="00000000-1111-2222-3333-444444444444", + subscription_id="SUBSCRIPTION_ID", ) response = client.network_security_perimeter_configurations.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/NetworkSecurityPerimeterConfigurationList.json +# x-ms-original-file: 2025-08-01/NetworkSecurityPerimeterConfigurationList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/network_security_perimeter_configuration_reconcile.py b/sdk/storage/azure-mgmt-storage/generated_samples/network_security_perimeter_configuration_reconcile.py index a48b81ef816e..217ec38c1d93 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/network_security_perimeter_configuration_reconcile.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/network_security_perimeter_configuration_reconcile.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="00000000-1111-2222-3333-444444444444", + subscription_id="SUBSCRIPTION_ID", ) client.network_security_perimeter_configurations.begin_reconcile( @@ -38,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/NetworkSecurityPerimeterConfigurationReconcile.json +# x-ms-original-file: 2025-08-01/NetworkSecurityPerimeterConfigurationReconcile.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/nfs_v3_account_create.py b/sdk/storage/azure-mgmt-storage/generated_samples/nfs_v3_account_create.py index 511a671e7a16..632ce9b03acc 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/nfs_v3_account_create.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/nfs_v3_account_create.py @@ -3,7 +3,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +28,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.begin_create( @@ -59,6 +59,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/NfsV3AccountCreate.json +# x-ms-original-file: 2025-08-01/NfsV3AccountCreate.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/object_level_worm_container_migration.py b/sdk/storage/azure-mgmt-storage/generated_samples/object_level_worm_container_migration.py index 9591cdd51132..f85170e922a3 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/object_level_worm_container_migration.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/object_level_worm_container_migration.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.blob_containers.begin_object_level_worm( @@ -38,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/ObjectLevelWormContainerMigration.json +# x-ms-original-file: 2025-08-01/ObjectLevelWormContainerMigration.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/operations_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/operations_list.py index d91301b82d93..e9fd5ebc5680 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/operations_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/operations_list.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -36,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/OperationsList.json +# x-ms-original-file: 2025-08-01/OperationsList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_crud/patch_storage_task_assignment.py b/sdk/storage/azure-mgmt-storage/generated_samples/patch_storage_task_assignment.py similarity index 84% rename from sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_crud/patch_storage_task_assignment.py rename to sdk/storage/azure-mgmt-storage/generated_samples/patch_storage_task_assignment.py index 181c203c1c13..d2d5b8ef054f 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_crud/patch_storage_task_assignment.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/patch_storage_task_assignment.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="1f31ba14-ce16-4281-b9b4-3e78da6e1616", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_task_assignments.begin_update( @@ -50,6 +49,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/storageTaskAssignmentsCrud/PatchStorageTaskAssignment.json +# x-ms-original-file: 2025-08-01/storageTaskAssignmentsCrud/PatchStorageTaskAssignment.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_crud/put_storage_task_assignment.py b/sdk/storage/azure-mgmt-storage/generated_samples/put_storage_task_assignment.py similarity index 87% rename from sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_crud/put_storage_task_assignment.py rename to sdk/storage/azure-mgmt-storage/generated_samples/put_storage_task_assignment.py index 603933e7d694..98aff7190db4 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_crud/put_storage_task_assignment.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/put_storage_task_assignment.py @@ -3,7 +3,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +28,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="1f31ba14-ce16-4281-b9b4-3e78da6e1616", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_task_assignments.begin_create( @@ -51,6 +51,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/storageTaskAssignmentsCrud/PutStorageTaskAssignment.json +# x-ms-original-file: 2025-08-01/storageTaskAssignmentsCrud/PutStorageTaskAssignment.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/put_storage_task_assignment_mock_run.py b/sdk/storage/azure-mgmt-storage/generated_samples/put_storage_task_assignment_mock_run.py new file mode 100644 index 000000000000..d0c53454f53b --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/put_storage_task_assignment_mock_run.py @@ -0,0 +1,56 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python put_storage_task_assignment_mock_run.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_task_assignments.begin_create( + resource_group_name="res4228", + account_name="sto4445", + storage_task_assignment_name="myassignment1", + parameters={ + "properties": { + "description": "My Storage task assignment for testing", + "enabled": True, + "executionContext": { + "target": {"excludePrefix": [], "prefix": []}, + "trigger": {"parameters": {"startOn": "2023-01-01T00:00:00.1234567Z"}, "type": "MockRun"}, + }, + "report": {"prefix": "reports"}, + "taskId": "/subscriptions/1f31ba14-ce16-4281-b9b4-3e78da6e1616/resourceGroups/res4228/providers/Microsoft.StorageActions/storageTasks/myStorageTask", + } + }, + ).result() + print(response) + + +# x-ms-original-file: 2025-08-01/storageTaskAssignmentsCrud/PutStorageTaskAssignmentMockRun.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_crud/put_storage_task_assignment_required_properties.py b/sdk/storage/azure-mgmt-storage/generated_samples/put_storage_task_assignment_required_properties.py similarity index 86% rename from sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_crud/put_storage_task_assignment_required_properties.py rename to sdk/storage/azure-mgmt-storage/generated_samples/put_storage_task_assignment_required_properties.py index 4ef4afe66b89..c4071c27fa84 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_task_assignments_crud/put_storage_task_assignment_required_properties.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/put_storage_task_assignment_required_properties.py @@ -3,7 +3,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +28,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="1f31ba14-ce16-4281-b9b4-3e78da6e1616", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_task_assignments.begin_create( @@ -50,6 +50,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/storageTaskAssignmentsCrud/PutStorageTaskAssignmentRequiredProperties.json +# x-ms-original-file: 2025-08-01/storageTaskAssignmentsCrud/PutStorageTaskAssignmentRequiredProperties.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_delete.py b/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_delete.py index 552536ecc0d1..96b39b8b7f09 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_delete.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_delete.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.queue.delete( @@ -38,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/QueueOperationDelete.json +# x-ms-original-file: 2025-08-01/QueueOperationDelete.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_get.py b/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_get.py index 0dda36313e9b..a1fe18877151 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_get.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_get.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.queue.get( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/QueueOperationGet.json +# x-ms-original-file: 2025-08-01/QueueOperationGet.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_list.py index 27a819a6000a..de0775a3e5c8 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_list.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.queue.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/QueueOperationList.json +# x-ms-original-file: 2025-08-01/QueueOperationList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_patch.py b/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_patch.py index f619094b9092..36d51d422cbe 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_patch.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_patch.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,18 +27,31 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.queue.update( resource_group_name="res3376", account_name="sto328", queue_name="queue6185", - queue={"approximateMessageCount": 0, "id": "str", "metadata": {"str": "str"}, "name": "str", "type": "str"}, + queue={ + "id": "str", + "name": "str", + "properties": {"approximateMessageCount": 0, "metadata": {"str": "str"}}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, ) print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/QueueOperationPatch.json +# x-ms-original-file: 2025-08-01/QueueOperationPatch.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_put.py b/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_put.py index c43f656ff66e..2c21be0280ca 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_put.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_put.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,18 +27,31 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.queue.create( resource_group_name="res3376", account_name="sto328", queue_name="queue6185", - queue={"approximateMessageCount": 0, "id": "str", "metadata": {"str": "str"}, "name": "str", "type": "str"}, + queue={ + "id": "str", + "name": "str", + "properties": {"approximateMessageCount": 0, "metadata": {"str": "str"}}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, ) print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/QueueOperationPut.json +# x-ms-original-file: 2025-08-01/QueueOperationPut.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_put_with_metadata.py b/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_put_with_metadata.py index e43dfb22aa18..dcab7e751e99 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_put_with_metadata.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_put_with_metadata.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.queue.create( @@ -40,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/QueueOperationPutWithMetadata.json +# x-ms-original-file: 2025-08-01/QueueOperationPutWithMetadata.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/queue_services_get.py b/sdk/storage/azure-mgmt-storage/generated_samples/queue_services_get.py index edc3eb4bb4eb..64a2a8dd04af 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/queue_services_get.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/queue_services_get.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.queue_services.get_service_properties( @@ -38,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/QueueServicesGet.json +# x-ms-original-file: 2025-08-01/QueueServicesGet.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/queue_services_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/queue_services_list.py index c03fa8fe3412..1ab217c026eb 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/queue_services_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/queue_services_list.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.queue_services.list( @@ -38,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/QueueServicesList.json +# x-ms-original-file: 2025-08-01/QueueServicesList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/queue_services_put.py b/sdk/storage/azure-mgmt-storage/generated_samples/queue_services_put.py index 21132a138921..ebab62c08b47 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/queue_services_put.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/queue_services_put.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.queue_services.set_service_properties( @@ -67,6 +66,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/QueueServicesPut.json +# x-ms-original-file: 2025-08-01/QueueServicesPut.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/sku_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/sku_list.py index a1dfa9c13e7a..0dcaa8a032c1 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/sku_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/sku_list.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -27,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.skus.list() @@ -35,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/SKUList.json +# x-ms-original-file: 2025-08-01/SKUList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/sku_list_with_location_info.py b/sdk/storage/azure-mgmt-storage/generated_samples/sku_list_with_location_info.py index 660d53cd31fd..e47cfdbd56b3 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/sku_list_with_location_info.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/sku_list_with_location_info.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.skus.list() @@ -36,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/SKUListWithLocationInfo.json +# x-ms-original-file: 2025-08-01/SKUListWithLocationInfo.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/stop_storage_task_assignment.py b/sdk/storage/azure-mgmt-storage/generated_samples/stop_storage_task_assignment.py new file mode 100644 index 000000000000..e3d7135b1ecf --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/stop_storage_task_assignment.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python stop_storage_task_assignment.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.storage_task_assignments.begin_stop_assignment( + resource_group_name="res4228", + account_name="sto4445", + storage_task_assignment_name="myassignment1", + ).result() + + +# x-ms-original-file: 2025-08-01/storageTaskAssignmentsCrud/StopStorageTaskAssignment.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_abort_hierarchical_namespace_migration.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_abort_hierarchical_namespace_migration.py index d1af85c0e261..7aa208eb4897 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_abort_hierarchical_namespace_migration.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_abort_hierarchical_namespace_migration.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.storage_accounts.begin_abort_hierarchical_namespace_migration( @@ -37,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountAbortHierarchicalNamespaceMigration.json +# x-ms-original-file: 2025-08-01/StorageAccountAbortHierarchicalNamespaceMigration.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_check_name_availability.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_check_name_availability.py index 43a4972cdb0b..7ae73321c279 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_check_name_availability.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_check_name_availability.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.check_name_availability( @@ -37,6 +36,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountCheckNameAvailability.json +# x-ms-original-file: 2025-08-01/StorageAccountCheckNameAvailability.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create.py index ce4c21e82463..2bb665257f23 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.begin_create( @@ -41,6 +40,12 @@ def main(): "properties": { "allowBlobPublicAccess": False, "allowSharedKeyAccess": True, + "allowSharedKeyAccessForServices": { + "blob": {"enabled": True}, + "file": {"enabled": False}, + "queue": {"enabled": True}, + "table": {"enabled": False}, + }, "defaultToOAuthAuthentication": False, "encryption": { "keySource": "Microsoft.Storage", @@ -69,6 +74,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountCreate.json +# x-ms-original-file: 2025-08-01/StorageAccountCreate.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_allowed_copy_scope_to_aad.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_allowed_copy_scope_to_aad.py index b12752577692..d31dcce77b19 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_allowed_copy_scope_to_aad.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_allowed_copy_scope_to_aad.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.begin_create( @@ -66,6 +65,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountCreateAllowedCopyScopeToAAD.json +# x-ms-original-file: 2025-08-01/StorageAccountCreateAllowedCopyScopeToAAD.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_allowed_copy_scope_to_private_link.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_allowed_copy_scope_to_private_link.py index 96f594b8be91..4df4fdfe9afe 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_allowed_copy_scope_to_private_link.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_allowed_copy_scope_to_private_link.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.begin_create( @@ -66,6 +65,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountCreateAllowedCopyScopeToPrivateLink.json +# x-ms-original-file: 2025-08-01/StorageAccountCreateAllowedCopyScopeToPrivateLink.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_disallow_public_network_access.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_disallow_public_network_access.py index 2b8befcb5ed8..8e250b5acc6f 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_disallow_public_network_access.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_disallow_public_network_access.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.begin_create( @@ -67,6 +66,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountCreateDisallowPublicNetworkAccess.json +# x-ms-original-file: 2025-08-01/StorageAccountCreateDisallowPublicNetworkAccess.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_dns_endpoint_type_to_azure_dns_zone.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_dns_endpoint_type_to_azure_dns_zone.py index d3d188ed400e..3177b72e96d3 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_dns_endpoint_type_to_azure_dns_zone.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_dns_endpoint_type_to_azure_dns_zone.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.begin_create( @@ -69,6 +68,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountCreateDnsEndpointTypeToAzureDnsZone.json +# x-ms-original-file: 2025-08-01/StorageAccountCreateDnsEndpointTypeToAzureDnsZone.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_dns_endpoint_type_to_standard.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_dns_endpoint_type_to_standard.py index dcb54ea873fb..e75e0ab63481 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_dns_endpoint_type_to_standard.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_dns_endpoint_type_to_standard.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.begin_create( @@ -69,6 +68,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountCreateDnsEndpointTypeToStandard.json +# x-ms-original-file: 2025-08-01/StorageAccountCreateDnsEndpointTypeToStandard.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_enable_public_network_access.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_enable_public_network_access.py index 471cc30ab7c4..5924a16a1644 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_enable_public_network_access.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_enable_public_network_access.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.begin_create( @@ -67,6 +66,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountCreateEnablePublicNetworkAccess.json +# x-ms-original-file: 2025-08-01/StorageAccountCreateEnablePublicNetworkAccess.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_object_replication_policy_on_destination.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_object_replication_policy_on_destination.py index 07a066e516e4..7b12ee73dc22 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_object_replication_policy_on_destination.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_object_replication_policy_on_destination.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.object_replication_policies.create_or_update( @@ -48,12 +47,13 @@ def main(): } ], "sourceAccount": "src1122", + "tagsReplication": {"enabled": True}, } }, ) print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountCreateObjectReplicationPolicyOnDestination.json +# x-ms-original-file: 2025-08-01/StorageAccountCreateObjectReplicationPolicyOnDestination.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_object_replication_policy_on_source.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_object_replication_policy_on_source.py index 72103a22ed32..453d80238635 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_object_replication_policy_on_source.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_object_replication_policy_on_source.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.object_replication_policies.create_or_update( @@ -49,12 +48,13 @@ def main(): } ], "sourceAccount": "src1122", + "tagsReplication": {"enabled": True}, } }, ) print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountCreateObjectReplicationPolicyOnSource.json +# x-ms-original-file: 2025-08-01/StorageAccountCreateObjectReplicationPolicyOnSource.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_placement.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_placement.py index be692a148767..df226176a1ea 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_placement.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_placement.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.begin_create( @@ -69,6 +68,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountCreate_placement.json +# x-ms-original-file: 2025-08-01/StorageAccountCreate_placement.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_premium_block_blob_storage.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_premium_block_blob_storage.py index 7602cffce113..d03834175f73 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_premium_block_blob_storage.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_premium_block_blob_storage.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.begin_create( @@ -56,6 +55,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountCreatePremiumBlockBlobStorage.json +# x-ms-original-file: 2025-08-01/StorageAccountCreatePremiumBlockBlobStorage.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_user_assigned_encryption_identity_with_cmk.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_user_assigned_encryption_identity_with_cmk.py index 082735c1f69d..d9484b72ed09 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_user_assigned_encryption_identity_with_cmk.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_user_assigned_encryption_identity_with_cmk.py @@ -3,7 +3,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +28,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.begin_create( @@ -66,6 +66,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountCreateUserAssignedEncryptionIdentityWithCMK.json +# x-ms-original-file: 2025-08-01/StorageAccountCreateUserAssignedEncryptionIdentityWithCMK.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_user_assigned_identity_with_federated_identity_client_id.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_user_assigned_identity_with_federated_identity_client_id.py index 151fb35f4522..ca1262709366 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_user_assigned_identity_with_federated_identity_client_id.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_user_assigned_identity_with_federated_identity_client_id.py @@ -3,7 +3,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +28,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.begin_create( @@ -67,6 +67,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountCreateUserAssignedIdentityWithFederatedIdentityClientId.json +# x-ms-original-file: 2025-08-01/StorageAccountCreateUserAssignedIdentityWithFederatedIdentityClientId.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_with_data_collaboration_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_with_data_collaboration_policy.py new file mode 100644 index 000000000000..af4b27da9d2a --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_with_data_collaboration_policy.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python storage_account_create_with_data_collaboration_policy.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_accounts.begin_create( + resource_group_name="res9101", + account_name="sto4445", + parameters={ + "kind": "Storage", + "location": "eastus", + "properties": { + "dataCollaborationPolicyProperties": { + "allowCrossTenantDataSharing": False, + "allowStorageConnectors": True, + "allowStorageDataShares": True, + } + }, + "sku": {"name": "Standard_GRS"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2025-08-01/StorageAccountCreateWithDataCollaborationPolicy.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_with_immutability_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_with_immutability_policy.py index 63a123334f72..36ed4fad5298 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_with_immutability_policy.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_with_immutability_policy.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.begin_create( @@ -54,6 +53,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountCreateWithImmutabilityPolicy.json +# x-ms-original-file: 2025-08-01/StorageAccountCreateWithImmutabilityPolicy.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_with_smart_access_tier.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_with_smart_access_tier.py new file mode 100644 index 000000000000..95bda3e5eb12 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_with_smart_access_tier.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python storage_account_create_with_smart_access_tier.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_accounts.begin_create( + resource_group_name="res9101", + account_name="sto4445", + parameters={ + "extendedLocation": {"name": "losangeles001", "type": "EdgeZone"}, + "kind": "Storage", + "location": "eastus", + "properties": { + "accessTier": "Smart", + "allowBlobPublicAccess": False, + "allowSharedKeyAccess": True, + "defaultToOAuthAuthentication": False, + "encryption": { + "keySource": "Microsoft.Storage", + "requireInfrastructureEncryption": False, + "services": { + "blob": {"enabled": True, "keyType": "Account"}, + "file": {"enabled": True, "keyType": "Account"}, + }, + }, + "geoPriorityReplicationStatus": {"isBlobEnabled": True}, + "isHnsEnabled": True, + "isSftpEnabled": True, + "keyPolicy": {"keyExpirationPeriodInDays": 20}, + "minimumTlsVersion": "TLS1_2", + "routingPreference": { + "publishInternetEndpoints": True, + "publishMicrosoftEndpoints": True, + "routingChoice": "MicrosoftRouting", + }, + "sasPolicy": {"expirationAction": "Log", "sasExpirationPeriod": "1.15:59:59"}, + }, + "sku": {"name": "Standard_GRS"}, + "tags": {"key1": "value1", "key2": "value2"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2025-08-01/StorageAccountCreateWithSmartAccessTier.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_zones.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_zones.py index 43a777d9cab6..590eedad31c5 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_zones.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_create_zones.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.begin_create( @@ -69,6 +68,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountCreate_zones.json +# x-ms-original-file: 2025-08-01/StorageAccountCreate_zones.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete.py index e43bba82c4d8..7116a2996b2f 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.storage_accounts.delete( @@ -37,6 +36,6 @@ def main(): ) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountDelete.json +# x-ms-original-file: 2025-08-01/StorageAccountDelete.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete_blob_inventory_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete_blob_inventory_policy.py index 0f9ffb08c7ab..46c370e44d42 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete_blob_inventory_policy.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete_blob_inventory_policy.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.blob_inventory_policies.delete( @@ -38,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountDeleteBlobInventoryPolicy.json +# x-ms-original-file: 2025-08-01/StorageAccountDeleteBlobInventoryPolicy.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete_management_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete_management_policy.py index 863123059f90..f794d067a809 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete_management_policy.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete_management_policy.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.management_policies.delete( @@ -38,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountDeleteManagementPolicy.json +# x-ms-original-file: 2025-08-01/StorageAccountDeleteManagementPolicy.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete_object_replication_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete_object_replication_policy.py index 125eb59b812c..6d851cf9d6be 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete_object_replication_policy.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete_object_replication_policy.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.object_replication_policies.delete( @@ -38,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountDeleteObjectReplicationPolicy.json +# x-ms-original-file: 2025-08-01/StorageAccountDeleteObjectReplicationPolicy.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete_private_endpoint_connection.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete_private_endpoint_connection.py index 438c09299e64..36b5fd30dd98 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete_private_endpoint_connection.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_delete_private_endpoint_connection.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.private_endpoint_connections.delete( @@ -38,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountDeletePrivateEndpointConnection.json +# x-ms-original-file: 2025-08-01/StorageAccountDeletePrivateEndpointConnection.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_enable_ad.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_enable_ad.py index 452ff97a827e..3e4af25ede09 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_enable_ad.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_enable_ad.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.update( @@ -55,6 +54,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountEnableAD.json +# x-ms-original-file: 2025-08-01/StorageAccountEnableAD.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_enable_cmk.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_enable_cmk.py index d3d522c88bab..6cc16839dccf 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_enable_cmk.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_enable_cmk.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.update( @@ -54,6 +53,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountEnableCMK.json +# x-ms-original-file: 2025-08-01/StorageAccountEnableCMK.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_enable_smb_oauth.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_enable_smb_oauth.py index ccf90d1f28ae..4faad81cc917 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_enable_smb_oauth.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_enable_smb_oauth.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.update( @@ -46,6 +45,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountEnableSmbOAuth.json +# x-ms-original-file: 2025-08-01/StorageAccountEnableSmbOAuth.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_encryption_scope_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_encryption_scope_list.py index be351662632b..a6bd879de7ba 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_encryption_scope_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_encryption_scope_list.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.encryption_scopes.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountEncryptionScopeList.json +# x-ms-original-file: 2025-08-01/StorageAccountEncryptionScopeList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_failover.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_failover.py index 42e38e0cf65c..ce42ca8819dc 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_failover.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_failover.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.storage_accounts.begin_failover( @@ -37,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountFailover.json +# x-ms-original-file: 2025-08-01/StorageAccountFailover.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_failover_planned.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_failover_planned.py index 479dbbab35d8..776c7cfb2591 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_failover_planned.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_failover_planned.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.storage_accounts.begin_failover( @@ -37,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountFailoverPlanned.json +# x-ms-original-file: 2025-08-01/StorageAccountFailoverPlanned.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_async_sku_conversion_status.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_async_sku_conversion_status.py index cb6aedc90cdd..178c8069e64e 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_async_sku_conversion_status.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_async_sku_conversion_status.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.get_properties( @@ -38,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountGetAsyncSkuConversionStatus.json +# x-ms-original-file: 2025-08-01/StorageAccountGetAsyncSkuConversionStatus.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_blob_inventory_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_blob_inventory_policy.py index fbda98a76dce..c79880391133 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_blob_inventory_policy.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_blob_inventory_policy.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_inventory_policies.get( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountGetBlobInventoryPolicy.json +# x-ms-original-file: 2025-08-01/StorageAccountGetBlobInventoryPolicy.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_encryption_scope.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_encryption_scope.py index 1a1808e591cc..3aec5fa0d825 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_encryption_scope.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_encryption_scope.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.encryption_scopes.get( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountGetEncryptionScope.json +# x-ms-original-file: 2025-08-01/StorageAccountGetEncryptionScope.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_management_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_management_policy.py index 9802c7960865..6feee2485aae 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_management_policy.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_management_policy.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.management_policies.get( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountGetManagementPolicy.json +# x-ms-original-file: 2025-08-01/StorageAccountGetManagementPolicy.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_migration_failed.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_migration_failed.py index 2e14190abf60..60fbcd7c4f96 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_migration_failed.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_migration_failed.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.get_customer_initiated_migration( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountGetMigrationFailed.json +# x-ms-original-file: 2025-08-01/StorageAccountGetMigrationFailed.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_migration_in_progress.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_migration_in_progress.py index 42934aa000eb..ee6b227d3044 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_migration_in_progress.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_migration_in_progress.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.get_customer_initiated_migration( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountGetMigrationInProgress.json +# x-ms-original-file: 2025-08-01/StorageAccountGetMigrationInProgress.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_object_replication_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_object_replication_policy.py index b8539b351368..3a50c4ec7ad8 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_object_replication_policy.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_object_replication_policy.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.object_replication_policies.get( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountGetObjectReplicationPolicy.json +# x-ms-original-file: 2025-08-01/StorageAccountGetObjectReplicationPolicy.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_private_endpoint_connection.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_private_endpoint_connection.py index 74d2cb57b8f4..676726095e9d 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_private_endpoint_connection.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_private_endpoint_connection.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.private_endpoint_connections.get( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountGetPrivateEndpointConnection.json +# x-ms-original-file: 2025-08-01/StorageAccountGetPrivateEndpointConnection.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties.py index 141bd3b850c9..57858e5ce5ea 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.get_properties( @@ -38,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountGetProperties.json +# x-ms-original-file: 2025-08-01/StorageAccountGetProperties.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties_cmk_enabled.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties_cmk_enabled.py index 51d781128393..22395e51b306 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties_cmk_enabled.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties_cmk_enabled.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.get_properties( @@ -38,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountGetPropertiesCMKEnabled.json +# x-ms-original-file: 2025-08-01/StorageAccountGetPropertiesCMKEnabled.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties_cmk_version_expiration_time.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties_cmk_version_expiration_time.py index cef1d9d17f38..98c8e4fbe0c5 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties_cmk_version_expiration_time.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties_cmk_version_expiration_time.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.get_properties( @@ -38,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountGetPropertiesCMKVersionExpirationTime.json +# x-ms-original-file: 2025-08-01/StorageAccountGetPropertiesCMKVersionExpirationTime.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties_geo_replication_statscan_failover_false.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties_geo_replication_statscan_failover_false.py index 4b18867fc264..7c1066818732 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties_geo_replication_statscan_failover_false.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties_geo_replication_statscan_failover_false.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.get_properties( @@ -38,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountGetPropertiesGeoReplicationStatscanFailoverFalse.json +# x-ms-original-file: 2025-08-01/StorageAccountGetPropertiesGeoReplicationStatscanFailoverFalse.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties_geo_replication_statscan_failover_true.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties_geo_replication_statscan_failover_true.py index a7d07bceb7db..238dd153e62c 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties_geo_replication_statscan_failover_true.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_get_properties_geo_replication_statscan_failover_true.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.get_properties( @@ -38,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountGetPropertiesGeoReplicationStatscanFailoverTrue.json +# x-ms-original-file: 2025-08-01/StorageAccountGetPropertiesGeoReplicationStatscanFailoverTrue.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_hierarchical_namespace_migration.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_hierarchical_namespace_migration.py index b5e7c10c9554..14c3ea61bd08 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_hierarchical_namespace_migration.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_hierarchical_namespace_migration.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.storage_accounts.begin_hierarchical_namespace_migration( @@ -38,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountHierarchicalNamespaceMigration.json +# x-ms-original-file: 2025-08-01/StorageAccountHierarchicalNamespaceMigration.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_leverage_ipv6_ability.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_leverage_ipv6_ability.py index b20567e77f06..3877b5613c5a 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_leverage_ipv6_ability.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_leverage_ipv6_ability.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.update( @@ -47,6 +46,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountLeverageIPv6Ability.json +# x-ms-original-file: 2025-08-01/StorageAccountLeverageIPv6Ability.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list.py index f1a1a4fcf57c..6de1f94f411d 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.list() @@ -36,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountList.json +# x-ms-original-file: 2025-08-01/StorageAccountList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_account_sas.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_account_sas.py index f6c286ec397c..4ea3c19bddc1 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_account_sas.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_account_sas.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.list_account_sas( @@ -47,6 +46,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountListAccountSAS.json +# x-ms-original-file: 2025-08-01/StorageAccountListAccountSAS.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_blob_inventory_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_blob_inventory_policy.py index 37be257814dd..9cb911728af0 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_blob_inventory_policy.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_blob_inventory_policy.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_inventory_policies.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountListBlobInventoryPolicy.json +# x-ms-original-file: 2025-08-01/StorageAccountListBlobInventoryPolicy.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_by_resource_group.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_by_resource_group.py index 269522e7e510..b5f42bb323c4 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_by_resource_group.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_by_resource_group.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.list_by_resource_group( @@ -38,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountListByResourceGroup.json +# x-ms-original-file: 2025-08-01/StorageAccountListByResourceGroup.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_keys.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_keys.py index 17356810bf33..f7d93a286b05 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_keys.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_keys.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.list_keys( @@ -38,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountListKeys.json +# x-ms-original-file: 2025-08-01/StorageAccountListKeys.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_location_usage.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_location_usage.py index 631021c21821..95032841b2ec 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_location_usage.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_location_usage.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.usages.list_by_location( @@ -38,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountListLocationUsage.json +# x-ms-original-file: 2025-08-01/StorageAccountListLocationUsage.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_object_replication_policies.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_object_replication_policies.py index 1a4e1d0e5170..8942867cf3a8 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_object_replication_policies.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_object_replication_policies.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.object_replication_policies.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountListObjectReplicationPolicies.json +# x-ms-original-file: 2025-08-01/StorageAccountListObjectReplicationPolicies.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_private_endpoint_connections.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_private_endpoint_connections.py index 17cdb678c8d9..4ec130d08023 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_private_endpoint_connections.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_private_endpoint_connections.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.private_endpoint_connections.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountListPrivateEndpointConnections.json +# x-ms-original-file: 2025-08-01/StorageAccountListPrivateEndpointConnections.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_private_link_resources.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_private_link_resources.py index 1dec14f9465c..66a734623778 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_private_link_resources.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_private_link_resources.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.private_link_resources.list_by_storage_account( @@ -38,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountListPrivateLinkResources.json +# x-ms-original-file: 2025-08-01/StorageAccountListPrivateLinkResources.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_service_sas.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_service_sas.py index e3710866c82b..756fa467bee3 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_service_sas.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_list_service_sas.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.list_service_sas( @@ -44,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountListServiceSAS.json +# x-ms-original-file: 2025-08-01/StorageAccountListServiceSAS.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_patch_encryption_scope.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_patch_encryption_scope.py index 140793c2abb9..222024051206 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_patch_encryption_scope.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_patch_encryption_scope.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.encryption_scopes.patch( @@ -45,6 +44,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountPatchEncryptionScope.json +# x-ms-original-file: 2025-08-01/StorageAccountPatchEncryptionScope.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_post_migration.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_post_migration.py index 51e2a5142587..d59a3842ea4d 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_post_migration.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_post_migration.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.storage_accounts.begin_customer_initiated_migration( @@ -38,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountPostMigration.json +# x-ms-original-file: 2025-08-01/StorageAccountPostMigration.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_put_encryption_scope.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_put_encryption_scope.py index 78479e545cb7..7846913792d2 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_put_encryption_scope.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_put_encryption_scope.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.encryption_scopes.put( @@ -36,24 +35,34 @@ def main(): account_name="accountname", encryption_scope_name="{encryption-scope-name}", encryption_scope={ - "creationTime": "2020-02-20 00:00:00", "id": "str", - "keyVaultProperties": { - "currentVersionedKeyIdentifier": "str", - "keyUri": "str", - "lastKeyRotationTimestamp": "2020-02-20 00:00:00", - }, - "lastModifiedTime": "2020-02-20 00:00:00", "name": "str", - "requireInfrastructureEncryption": bool, - "source": "str", - "state": "str", + "properties": { + "creationTime": "2020-02-20 00:00:00", + "keyVaultProperties": { + "currentVersionedKeyIdentifier": "str", + "keyUri": "str", + "lastKeyRotationTimestamp": "2020-02-20 00:00:00", + }, + "lastModifiedTime": "2020-02-20 00:00:00", + "requireInfrastructureEncryption": bool, + "source": "str", + "state": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, ) print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountPutEncryptionScope.json +# x-ms-original-file: 2025-08-01/StorageAccountPutEncryptionScope.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_put_encryption_scope_with_infrastructure_encryption.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_put_encryption_scope_with_infrastructure_encryption.py index fc4067652b2e..ea464c6d5f7b 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_put_encryption_scope_with_infrastructure_encryption.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_put_encryption_scope_with_infrastructure_encryption.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.encryption_scopes.put( @@ -40,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountPutEncryptionScopeWithInfrastructureEncryption.json +# x-ms-original-file: 2025-08-01/StorageAccountPutEncryptionScopeWithInfrastructureEncryption.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_put_private_endpoint_connection.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_put_private_endpoint_connection.py index e0933814d416..c497c5edf51b 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_put_private_endpoint_connection.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_put_private_endpoint_connection.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.private_endpoint_connections.put( @@ -42,6 +41,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountPutPrivateEndpointConnection.json +# x-ms-original-file: 2025-08-01/StorageAccountPutPrivateEndpointConnection.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_regenerate_kerb_key.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_regenerate_kerb_key.py index b54a89b76deb..af47cb4d0fad 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_regenerate_kerb_key.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_regenerate_kerb_key.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.regenerate_key( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountRegenerateKerbKey.json +# x-ms-original-file: 2025-08-01/StorageAccountRegenerateKerbKey.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_regenerate_key.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_regenerate_key.py index a2ceae1f412b..c1e31908ee0b 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_regenerate_key.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_regenerate_key.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.regenerate_key( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountRegenerateKey.json +# x-ms-original-file: 2025-08-01/StorageAccountRegenerateKey.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_revoke_user_delegation_keys.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_revoke_user_delegation_keys.py index f3b9bb7aaaf4..341ec5f461f2 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_revoke_user_delegation_keys.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_revoke_user_delegation_keys.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.storage_accounts.revoke_user_delegation_keys( @@ -37,6 +36,6 @@ def main(): ) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountRevokeUserDelegationKeys.json +# x-ms-original-file: 2025-08-01/StorageAccountRevokeUserDelegationKeys.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_blob_inventory_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_blob_inventory_policy.py index 1ac53e9bae9f..b5c43000c2ac 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_blob_inventory_policy.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_blob_inventory_policy.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_inventory_policies.create_or_update( @@ -101,6 +100,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountSetBlobInventoryPolicy.json +# x-ms-original-file: 2025-08-01/StorageAccountSetBlobInventoryPolicy.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_blob_inventory_policy_include_delete_and_new_schema_for_hns_account.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_blob_inventory_policy_include_delete_and_new_schema_for_hns_account.py index 5fd411d9d53f..f135f007ca07 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_blob_inventory_policy_include_delete_and_new_schema_for_hns_account.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_blob_inventory_policy_include_delete_and_new_schema_for_hns_account.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_inventory_policies.create_or_update( @@ -119,6 +118,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountSetBlobInventoryPolicyIncludeDeleteAndNewSchemaForHnsAccount.json +# x-ms-original-file: 2025-08-01/StorageAccountSetBlobInventoryPolicyIncludeDeleteAndNewSchemaForHnsAccount.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_blob_inventory_policy_include_delete_and_new_schema_for_non_hns_account.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_blob_inventory_policy_include_delete_and_new_schema_for_non_hns_account.py index c18934d3fc88..27d10eed6e95 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_blob_inventory_policy_include_delete_and_new_schema_for_non_hns_account.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_blob_inventory_policy_include_delete_and_new_schema_for_non_hns_account.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.blob_inventory_policies.create_or_update( @@ -118,6 +117,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountSetBlobInventoryPolicyIncludeDeleteAndNewSchemaForNonHnsAccount.json +# x-ms-original-file: 2025-08-01/StorageAccountSetBlobInventoryPolicyIncludeDeleteAndNewSchemaForNonHnsAccount.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy.py index 26bcab69caf2..017d0108f09e 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.management_policies.create_or_update( @@ -85,6 +84,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountSetManagementPolicy.json +# x-ms-original-file: 2025-08-01/StorageAccountSetManagementPolicy.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_base_blob_days_after_creation_actions.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_base_blob_days_after_creation_actions.py index 0f1f976ac445..448c278363a3 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_base_blob_days_after_creation_actions.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_base_blob_days_after_creation_actions.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.management_policies.create_or_update( @@ -62,6 +61,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountSetManagementPolicy_BaseBlobDaysAfterCreationActions.json +# x-ms-original-file: 2025-08-01/StorageAccountSetManagementPolicy_BaseBlobDaysAfterCreationActions.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_cold_tier_actions.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_cold_tier_actions.py index f38e74cc6041..c30dbc4493b9 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_cold_tier_actions.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_cold_tier_actions.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.management_policies.create_or_update( @@ -71,6 +70,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountSetManagementPolicyColdTierActions.json +# x-ms-original-file: 2025-08-01/StorageAccountSetManagementPolicyColdTierActions.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_for_block_and_append_blobs.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_for_block_and_append_blobs.py index 9b46a461fd64..ee718d570033 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_for_block_and_append_blobs.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_for_block_and_append_blobs.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.management_policies.create_or_update( @@ -63,6 +62,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountSetManagementPolicyForBlockAndAppendBlobs.json +# x-ms-original-file: 2025-08-01/StorageAccountSetManagementPolicyForBlockAndAppendBlobs.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_hot_tier_actions.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_hot_tier_actions.py index 827990a915e5..62ed3fa76627 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_hot_tier_actions.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_hot_tier_actions.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.management_policies.create_or_update( @@ -60,6 +59,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountSetManagementPolicyHotTierActions.json +# x-ms-original-file: 2025-08-01/StorageAccountSetManagementPolicyHotTierActions.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_last_access_time_based_blob_actions.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_last_access_time_based_blob_actions.py index cf6783fe3966..d5da493cd978 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_last_access_time_based_blob_actions.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_last_access_time_based_blob_actions.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.management_policies.create_or_update( @@ -64,6 +63,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountSetManagementPolicy_LastAccessTimeBasedBlobActions.json +# x-ms-original-file: 2025-08-01/StorageAccountSetManagementPolicy_LastAccessTimeBasedBlobActions.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_last_tier_change_time_actions.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_last_tier_change_time_actions.py index 2ef9b569ef8b..b307b2a83d46 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_last_tier_change_time_actions.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_last_tier_change_time_actions.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.management_policies.create_or_update( @@ -77,6 +76,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountSetManagementPolicy_LastTierChangeTimeActions.json +# x-ms-original-file: 2025-08-01/StorageAccountSetManagementPolicy_LastTierChangeTimeActions.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_with_snapshot_and_version.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_with_snapshot_and_version.py index e7cbbb061536..0cb71111fc3d 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_with_snapshot_and_version.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_set_management_policy_with_snapshot_and_version.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.management_policies.create_or_update( @@ -72,6 +71,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountSetManagementPolicyWithSnapshotAndVersion.json +# x-ms-original-file: 2025-08-01/StorageAccountSetManagementPolicyWithSnapshotAndVersion.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update.py index 92e1afac3c14..8622528cd7e5 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update.py @@ -3,7 +3,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +28,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.update( @@ -38,6 +38,12 @@ def main(): "properties": { "allowBlobPublicAccess": False, "allowSharedKeyAccess": True, + "allowSharedKeyAccessForServices": { + "blob": {"enabled": True}, + "file": {"enabled": False}, + "queue": {"enabled": True}, + "table": {"enabled": False}, + }, "defaultToOAuthAuthentication": False, "enableExtendedGroups": True, "encryption": { @@ -73,6 +79,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountUpdate.json +# x-ms-original-file: 2025-08-01/StorageAccountUpdate.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_access_tier_to_smart.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_access_tier_to_smart.py new file mode 100644 index 000000000000..af29e5f1ce42 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_access_tier_to_smart.py @@ -0,0 +1,79 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python storage_account_update_access_tier_to_smart.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_accounts.update( + resource_group_name="res9407", + account_name="sto8596", + parameters={ + "properties": { + "accessTier": "Smart", + "allowBlobPublicAccess": False, + "allowSharedKeyAccess": True, + "defaultToOAuthAuthentication": False, + "enableExtendedGroups": True, + "encryption": { + "keySource": "Microsoft.Storage", + "services": { + "blob": {"enabled": True, "keyType": "Account"}, + "file": {"enabled": True, "keyType": "Account"}, + }, + }, + "geoPriorityReplicationStatus": {"isBlobEnabled": True}, + "isLocalUserEnabled": True, + "isSftpEnabled": True, + "keyPolicy": {"keyExpirationPeriodInDays": 20}, + "minimumTlsVersion": "TLS1_2", + "networkAcls": { + "defaultAction": "Allow", + "resourceAccessRules": [ + { + "resourceId": "/subscriptions/a7e99807-abbf-4642-bdec-2c809a96a8bc/resourceGroups/res9407/providers/Microsoft.Synapse/workspaces/testworkspace", + "tenantId": "72f988bf-86f1-41af-91ab-2d7cd011db47", + } + ], + }, + "routingPreference": { + "publishInternetEndpoints": True, + "publishMicrosoftEndpoints": True, + "routingChoice": "MicrosoftRouting", + }, + "sasPolicy": {"expirationAction": "Log", "sasExpirationPeriod": "1.15:59:59"}, + } + }, + ) + print(response) + + +# x-ms-original-file: 2025-08-01/StorageAccountUpdateAccessTierToSmart.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_allowed_copy_scope_to_aad.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_allowed_copy_scope_to_aad.py index da260892a881..995e0f809f81 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_allowed_copy_scope_to_aad.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_allowed_copy_scope_to_aad.py @@ -3,7 +3,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +28,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.update( @@ -69,6 +69,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountUpdateAllowedCopyScopeToAAD.json +# x-ms-original-file: 2025-08-01/StorageAccountUpdateAllowedCopyScopeToAAD.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_disable_public_network_access.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_disable_public_network_access.py index 3d5d29a64de2..f5694a29f8da 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_disable_public_network_access.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_disable_public_network_access.py @@ -3,7 +3,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +28,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.update( @@ -69,6 +69,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountUpdateDisablePublicNetworkAccess.json +# x-ms-original-file: 2025-08-01/StorageAccountUpdateDisablePublicNetworkAccess.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_object_replication_policy_on_destination.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_object_replication_policy_on_destination.py index 3f62a12d28de..63d5bb7f5024 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_object_replication_policy_on_destination.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_object_replication_policy_on_destination.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.object_replication_policies.create_or_update( @@ -50,12 +49,13 @@ def main(): {"destinationContainer": "dcont179", "sourceContainer": "scont179"}, ], "sourceAccount": "src1122", + "tagsReplication": {"enabled": True}, } }, ) print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountUpdateObjectReplicationPolicyOnDestination.json +# x-ms-original-file: 2025-08-01/StorageAccountUpdateObjectReplicationPolicyOnDestination.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_object_replication_policy_on_source.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_object_replication_policy_on_source.py index 9f442189fc4a..6d1b801c51f1 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_object_replication_policy_on_source.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_object_replication_policy_on_source.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.object_replication_policies.create_or_update( @@ -54,12 +53,13 @@ def main(): }, ], "sourceAccount": "src1122", + "tagsReplication": {"enabled": True}, } }, ) print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountUpdateObjectReplicationPolicyOnSource.json +# x-ms-original-file: 2025-08-01/StorageAccountUpdateObjectReplicationPolicyOnSource.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_placement.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_placement.py index 7c69cc332c79..443596d0036c 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_placement.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_placement.py @@ -3,7 +3,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +28,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.update( @@ -73,6 +73,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountUpdate_placement.json +# x-ms-original-file: 2025-08-01/StorageAccountUpdate_placement.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_user_assigned_encryption_identity_with_cmk.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_user_assigned_encryption_identity_with_cmk.py index 1de04d3625db..9a891dad1033 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_user_assigned_encryption_identity_with_cmk.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_user_assigned_encryption_identity_with_cmk.py @@ -3,7 +3,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +28,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.update( @@ -65,6 +65,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountUpdateUserAssignedEncryptionIdentityWithCMK.json +# x-ms-original-file: 2025-08-01/StorageAccountUpdateUserAssignedEncryptionIdentityWithCMK.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_user_assigned_identity_with_federated_identity_client_id.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_user_assigned_identity_with_federated_identity_client_id.py index b2b22df1aef2..0815ef48e535 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_user_assigned_identity_with_federated_identity_client_id.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_user_assigned_identity_with_federated_identity_client_id.py @@ -3,7 +3,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +28,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.update( @@ -66,6 +66,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountUpdateUserAssignedIdentityWithFederatedIdentityClientId.json +# x-ms-original-file: 2025-08-01/StorageAccountUpdateUserAssignedIdentityWithFederatedIdentityClientId.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_with_data_collaboration_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_with_data_collaboration_policy.py new file mode 100644 index 000000000000..f54fd61f30c8 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_with_data_collaboration_policy.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python storage_account_update_with_data_collaboration_policy.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_accounts.update( + resource_group_name="res9407", + account_name="sto8596", + parameters={ + "properties": { + "dataCollaborationPolicyProperties": { + "allowCrossTenantDataSharing": False, + "allowStorageConnectors": True, + "allowStorageDataShares": True, + } + } + }, + ) + print(response) + + +# x-ms-original-file: 2025-08-01/StorageAccountUpdateWithDataCollaborationPolicy.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_with_immutability_policy.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_with_immutability_policy.py index d6802cbbadb7..a911dfeb8530 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_with_immutability_policy.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_with_immutability_policy.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.update( @@ -50,6 +49,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountUpdateWithImmutabilityPolicy.json +# x-ms-original-file: 2025-08-01/StorageAccountUpdateWithImmutabilityPolicy.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_zones.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_zones.py index ba8bcb992ddd..b4b03c72e4e0 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_zones.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_account_update_zones.py @@ -3,7 +3,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +28,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.storage_accounts.update( @@ -73,6 +73,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/StorageAccountUpdate_zones.json +# x-ms-original-file: 2025-08-01/StorageAccountUpdate_zones.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_create.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_create.py new file mode 100644 index 000000000000..e3cff51d4cc0 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_create.py @@ -0,0 +1,63 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python storage_connectors_create.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.connectors.begin_create( + resource_group_name="testrg", + account_name="teststorageaccount", + connector_name="testconnector", + resource={ + "location": "eastus", + "properties": { + "dataSourceType": "Azure_DataShare", + "description": "Example connector", + "source": { + "authProperties": { + "identityResourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/testrg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/testIdentity", + "type": "ManagedIdentity", + }, + "connection": { + "dataShareUri": "azds://eastus:datashare1:12345678-1234-1234-1234-123456789123", + "type": "DataShare", + }, + "type": "DataShare", + }, + "state": "Active", + }, + }, + ).result() + print(response) + + +# x-ms-original-file: 2025-08-01/StorageConnectorCRUD/StorageConnectors_Create.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_delete.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_delete.py new file mode 100644 index 000000000000..18ca80b96fa4 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_delete.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python storage_connectors_delete.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.connectors.begin_delete( + resource_group_name="testrg", + account_name="teststorageaccount", + connector_name="testconnector", + ).result() + + +# x-ms-original-file: 2025-08-01/StorageConnectorCRUD/StorageConnectors_Delete.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_get.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_get.py new file mode 100644 index 000000000000..7a73d971421f --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_get.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python storage_connectors_get.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.connectors.get( + resource_group_name="testrg", + account_name="teststorageaccount", + connector_name="testconnector", + ) + print(response) + + +# x-ms-original-file: 2025-08-01/StorageConnectorCRUD/StorageConnectors_Get.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_list_by_storage_account.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_list_by_storage_account.py new file mode 100644 index 000000000000..85e29a729a7b --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_list_by_storage_account.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python storage_connectors_list_by_storage_account.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.connectors.list_by_storage_account( + resource_group_name="testrg", + account_name="teststorageaccount", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2025-08-01/StorageConnectorCRUD/StorageConnectors_ListByStorageAccount.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_test_existing_connection.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_test_existing_connection.py new file mode 100644 index 000000000000..dc3416cf7f7e --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_test_existing_connection.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python storage_connectors_test_existing_connection.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.connectors.begin_test_existing_connection( + resource_group_name="testrg", + account_name="teststorageaccount", + connector_name="testconnector", + body={"uniqueId": "12345678-1234-1234-1234-12345678912"}, + ).result() + print(response) + + +# x-ms-original-file: 2025-08-01/StorageConnectorCRUD/StorageConnectors_TestExistingConnection.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_update.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_update.py new file mode 100644 index 000000000000..9f3a5f32dafa --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_connectors_update.py @@ -0,0 +1,55 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python storage_connectors_update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.connectors.begin_update( + resource_group_name="testrg", + account_name="teststorageaccount", + connector_name="testconnector", + properties={ + "properties": { + "source": { + "authProperties": { + "identityResourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/testrg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/newTestIdentity", + "type": "ManagedIdentity", + }, + "type": "DataShare", + } + } + }, + ).result() + print(response) + + +# x-ms-original-file: 2025-08-01/StorageConnectorCRUD/StorageConnectors_Update.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_create.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_create.py new file mode 100644 index 000000000000..e6f57533976a --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_create.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python storage_data_shares_create.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.data_shares.begin_create( + resource_group_name="testrg", + account_name="teststorageaccount", + data_share_name="testdatashare", + resource={ + "location": "eastus", + "properties": { + "accessPolicies": [ + { + "permission": "Read", + "principalId": "00000000-0000-0000-0000-000000000000", + "tenantId": "00000000-0000-0000-0000-000000000000", + } + ], + "assets": [{"assetPath": "/container/folder/foo", "displayName": "virtualFoo"}], + "description": "Dummy data share", + }, + }, + ).result() + print(response) + + +# x-ms-original-file: 2025-08-01/StorageDataShareCRUD/StorageDataShares_Create.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_delete.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_delete.py new file mode 100644 index 000000000000..bb72bc32c593 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_delete.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python storage_data_shares_delete.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.data_shares.begin_delete( + resource_group_name="testrg", + account_name="teststorageaccount", + data_share_name="testdatashare", + ).result() + + +# x-ms-original-file: 2025-08-01/StorageDataShareCRUD/StorageDataShares_Delete.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_get.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_get.py new file mode 100644 index 000000000000..297ade651ea8 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_get.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python storage_data_shares_get.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.data_shares.get( + resource_group_name="testrg", + account_name="teststorageaccount", + data_share_name="testdatashare", + ) + print(response) + + +# x-ms-original-file: 2025-08-01/StorageDataShareCRUD/StorageDataShares_Get.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_list_by_storage_account.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_list_by_storage_account.py new file mode 100644 index 000000000000..0166872bce89 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_list_by_storage_account.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python storage_data_shares_list_by_storage_account.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.data_shares.list_by_storage_account( + resource_group_name="testrg", + account_name="teststorageaccount", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2025-08-01/StorageDataShareCRUD/StorageDataShares_ListByStorageAccount.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_update.py b/sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_update.py new file mode 100644 index 000000000000..730dceebd9a3 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/storage_data_shares_update.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python storage_data_shares_update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.data_shares.begin_update( + resource_group_name="testrg", + account_name="teststorageaccount", + data_share_name="testdatashare", + properties={ + "properties": { + "accessPolicies": [ + { + "permission": "Read", + "principalId": "00000000-0000-0000-0000-123456781234", + "tenantId": "00000000-0000-0000-0000-987654321987", + } + ], + "assets": [{"assetPath": "/container/folder1/bar", "displayName": "virtualBar"}], + "description": "New dummy data share", + } + }, + ).result() + print(response) + + +# x-ms-original-file: 2025-08-01/StorageDataShareCRUD/StorageDataShares_Update.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_delete.py b/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_delete.py index a3d634c2a613..32a001ad8ac1 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_delete.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_delete.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) client.table.delete( @@ -38,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/TableOperationDelete.json +# x-ms-original-file: 2025-08-01/TableOperationDelete.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_get.py b/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_get.py index 5b50290d85c4..aa39c4655500 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_get.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_get.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.table.get( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/TableOperationGet.json +# x-ms-original-file: 2025-08-01/TableOperationGet.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_list.py index 4e227f782c0a..20e2fd2af975 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_list.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.table.list( @@ -39,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/TableOperationList.json +# x-ms-original-file: 2025-08-01/TableOperationList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_patch.py b/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_patch.py index 7d6dde706d4e..d078beac33f1 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_patch.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_patch.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.table.update( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/TableOperationPatch.json +# x-ms-original-file: 2025-08-01/TableOperationPatch.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_put.py b/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_put.py index a7767c343ad3..47c042af47e2 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_put.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_put.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.table.create( @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/TableOperationPut.json +# x-ms-original-file: 2025-08-01/TableOperationPut.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_put_or_patch_acls.py b/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_put_or_patch_acls.py index 64dc33ddff8e..d7f9dd42fa5d 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_put_or_patch_acls.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_put_or_patch_acls.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,10 +27,10 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) - response = client.table.create( + response = client.table.update( resource_group_name="res3376", account_name="sto328", table_name="table6185", @@ -39,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/TableOperationPutOrPatchAcls.json +# x-ms-original-file: 2025-08-01/TableOperationPutOrPatchAcls.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_put_or_patch_acls_table_create.py b/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_put_or_patch_acls_table_create.py new file mode 100644 index 000000000000..5fe18423f2a6 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_samples/table_operation_put_or_patch_acls_table_create.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storage import StorageManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storage +# USAGE + python table_operation_put_or_patch_acls_table_create.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.table.create( + resource_group_name="res3376", + account_name="sto328", + table_name="table6185", + ) + print(response) + + +# x-ms-original-file: 2025-08-01/TableOperationPutOrPatchAclsTableCreate.json +if __name__ == "__main__": + main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/table_services_get.py b/sdk/storage/azure-mgmt-storage/generated_samples/table_services_get.py index fa8c6c49db99..06cae9314d86 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/table_services_get.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/table_services_get.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.table_services.get_service_properties( @@ -38,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/TableServicesGet.json +# x-ms-original-file: 2025-08-01/TableServicesGet.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/table_services_list.py b/sdk/storage/azure-mgmt-storage/generated_samples/table_services_list.py index bfd863196a5f..519a8c77fcfe 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/table_services_list.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/table_services_list.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.table_services.list( @@ -38,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/TableServicesList.json +# x-ms-original-file: 2025-08-01/TableServicesList.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_samples/table_services_put.py b/sdk/storage/azure-mgmt-storage/generated_samples/table_services_put.py index 3dba3834c09b..85ff10d235a2 100644 --- a/sdk/storage/azure-mgmt-storage/generated_samples/table_services_put.py +++ b/sdk/storage/azure-mgmt-storage/generated_samples/table_services_put.py @@ -1,9 +1,8 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +27,7 @@ def main(): client = StorageManagementClient( credential=DefaultAzureCredential(), - subscription_id="{subscription-id}", + subscription_id="SUBSCRIPTION_ID", ) response = client.table_services.set_service_properties( @@ -67,6 +66,6 @@ def main(): print(response) -# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2025-06-01/examples/TableServicesPut.json +# x-ms-original-file: 2025-08-01/TableServicesPut.json if __name__ == "__main__": main() diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/conftest.py b/sdk/storage/azure-mgmt-storage/generated_tests/conftest.py index 167edda55fba..b0be7485cf2e 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/conftest.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/conftest.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import os diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_containers_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_containers_operations.py index 414ef5d5b121..c89a7b2c2b05 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_containers_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_containers_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -20,13 +20,13 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_blob_containers_list(self, resource_group): - response = self.client.blob_containers.list( + def test_blob_containers_get(self, resource_group): + response = self.client.blob_containers.get( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", + container_name="str", ) - result = [r for r in response] + # please add some check logic here by yourself # ... @@ -38,68 +38,79 @@ def test_blob_containers_create(self, resource_group): account_name="str", container_name="str", blob_container={ - "defaultEncryptionScope": "str", - "deleted": bool, - "deletedTime": "2020-02-20 00:00:00", - "denyEncryptionScopeOverride": bool, - "enableNfsV3AllSquash": bool, - "enableNfsV3RootSquash": bool, "etag": "str", - "hasImmutabilityPolicy": bool, - "hasLegalHold": bool, "id": "str", - "immutabilityPolicy": { - "allowProtectedAppendWrites": bool, - "allowProtectedAppendWritesAll": bool, - "etag": "str", - "immutabilityPeriodSinceCreationInDays": 0, - "state": "str", - "updateHistory": [ - { + "name": "str", + "properties": { + "defaultEncryptionScope": "str", + "deleted": bool, + "deletedTime": "2020-02-20 00:00:00", + "denyEncryptionScopeOverride": bool, + "enableNfsV3AllSquash": bool, + "enableNfsV3RootSquash": bool, + "hasImmutabilityPolicy": bool, + "hasLegalHold": bool, + "immutabilityPolicy": { + "etag": "str", + "properties": { "allowProtectedAppendWrites": bool, "allowProtectedAppendWritesAll": bool, "immutabilityPeriodSinceCreationInDays": 0, - "objectIdentifier": "str", - "tenantId": "str", - "timestamp": "2020-02-20 00:00:00", - "update": "str", - "upn": "str", - } - ], - }, - "immutableStorageWithVersioning": { - "enabled": bool, - "migrationState": "str", - "timeStamp": "2020-02-20 00:00:00", - }, - "lastModifiedTime": "2020-02-20 00:00:00", - "leaseDuration": "str", - "leaseState": "str", - "leaseStatus": "str", - "legalHold": { - "hasLegalHold": bool, - "protectedAppendWritesHistory": { - "allowProtectedAppendWritesAll": bool, - "timestamp": "2020-02-20 00:00:00", + "state": "str", + }, + "updateHistory": [ + { + "allowProtectedAppendWrites": bool, + "allowProtectedAppendWritesAll": bool, + "immutabilityPeriodSinceCreationInDays": 0, + "objectIdentifier": "str", + "tenantId": "str", + "timestamp": "2020-02-20 00:00:00", + "update": "str", + "upn": "str", + } + ], + }, + "immutableStorageWithVersioning": { + "enabled": bool, + "migrationState": "str", + "timeStamp": "2020-02-20 00:00:00", }, - "tags": [ - { - "objectIdentifier": "str", - "tag": "str", - "tenantId": "str", + "lastModifiedTime": "2020-02-20 00:00:00", + "leaseDuration": "str", + "leaseState": "str", + "leaseStatus": "str", + "legalHold": { + "hasLegalHold": bool, + "protectedAppendWritesHistory": { + "allowProtectedAppendWritesAll": bool, "timestamp": "2020-02-20 00:00:00", - "upn": "str", - } - ], + }, + "tags": [ + { + "objectIdentifier": "str", + "tag": "str", + "tenantId": "str", + "timestamp": "2020-02-20 00:00:00", + "upn": "str", + } + ], + }, + "metadata": {"str": "str"}, + "publicAccess": "str", + "remainingRetentionDays": 0, + "version": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", }, - "metadata": {"str": "str"}, - "name": "str", - "publicAccess": "str", - "remainingRetentionDays": 0, "type": "str", - "version": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -113,68 +124,79 @@ def test_blob_containers_update(self, resource_group): account_name="str", container_name="str", blob_container={ - "defaultEncryptionScope": "str", - "deleted": bool, - "deletedTime": "2020-02-20 00:00:00", - "denyEncryptionScopeOverride": bool, - "enableNfsV3AllSquash": bool, - "enableNfsV3RootSquash": bool, "etag": "str", - "hasImmutabilityPolicy": bool, - "hasLegalHold": bool, "id": "str", - "immutabilityPolicy": { - "allowProtectedAppendWrites": bool, - "allowProtectedAppendWritesAll": bool, - "etag": "str", - "immutabilityPeriodSinceCreationInDays": 0, - "state": "str", - "updateHistory": [ - { + "name": "str", + "properties": { + "defaultEncryptionScope": "str", + "deleted": bool, + "deletedTime": "2020-02-20 00:00:00", + "denyEncryptionScopeOverride": bool, + "enableNfsV3AllSquash": bool, + "enableNfsV3RootSquash": bool, + "hasImmutabilityPolicy": bool, + "hasLegalHold": bool, + "immutabilityPolicy": { + "etag": "str", + "properties": { "allowProtectedAppendWrites": bool, "allowProtectedAppendWritesAll": bool, "immutabilityPeriodSinceCreationInDays": 0, - "objectIdentifier": "str", - "tenantId": "str", - "timestamp": "2020-02-20 00:00:00", - "update": "str", - "upn": "str", - } - ], - }, - "immutableStorageWithVersioning": { - "enabled": bool, - "migrationState": "str", - "timeStamp": "2020-02-20 00:00:00", - }, - "lastModifiedTime": "2020-02-20 00:00:00", - "leaseDuration": "str", - "leaseState": "str", - "leaseStatus": "str", - "legalHold": { - "hasLegalHold": bool, - "protectedAppendWritesHistory": { - "allowProtectedAppendWritesAll": bool, - "timestamp": "2020-02-20 00:00:00", + "state": "str", + }, + "updateHistory": [ + { + "allowProtectedAppendWrites": bool, + "allowProtectedAppendWritesAll": bool, + "immutabilityPeriodSinceCreationInDays": 0, + "objectIdentifier": "str", + "tenantId": "str", + "timestamp": "2020-02-20 00:00:00", + "update": "str", + "upn": "str", + } + ], }, - "tags": [ - { - "objectIdentifier": "str", - "tag": "str", - "tenantId": "str", + "immutableStorageWithVersioning": { + "enabled": bool, + "migrationState": "str", + "timeStamp": "2020-02-20 00:00:00", + }, + "lastModifiedTime": "2020-02-20 00:00:00", + "leaseDuration": "str", + "leaseState": "str", + "leaseStatus": "str", + "legalHold": { + "hasLegalHold": bool, + "protectedAppendWritesHistory": { + "allowProtectedAppendWritesAll": bool, "timestamp": "2020-02-20 00:00:00", - "upn": "str", - } - ], + }, + "tags": [ + { + "objectIdentifier": "str", + "tag": "str", + "tenantId": "str", + "timestamp": "2020-02-20 00:00:00", + "upn": "str", + } + ], + }, + "metadata": {"str": "str"}, + "publicAccess": "str", + "remainingRetentionDays": 0, + "version": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", }, - "metadata": {"str": "str"}, - "name": "str", - "publicAccess": "str", - "remainingRetentionDays": 0, "type": "str", - "version": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -182,12 +204,11 @@ def test_blob_containers_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_blob_containers_get(self, resource_group): - response = self.client.blob_containers.get( + def test_blob_containers_delete(self, resource_group): + response = self.client.blob_containers.delete( resource_group_name=resource_group.name, account_name="str", container_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -195,12 +216,12 @@ def test_blob_containers_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_blob_containers_delete(self, resource_group): - response = self.client.blob_containers.delete( + def test_blob_containers_set_legal_hold(self, resource_group): + response = self.client.blob_containers.set_legal_hold( resource_group_name=resource_group.name, account_name="str", container_name="str", - api_version="2025-06-01", + legal_hold={"tags": ["str"], "allowProtectedAppendWritesAll": bool, "hasLegalHold": bool}, ) # please add some check logic here by yourself @@ -208,13 +229,12 @@ def test_blob_containers_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_blob_containers_set_legal_hold(self, resource_group): - response = self.client.blob_containers.set_legal_hold( + def test_blob_containers_clear_legal_hold(self, resource_group): + response = self.client.blob_containers.clear_legal_hold( resource_group_name=resource_group.name, account_name="str", container_name="str", legal_hold={"tags": ["str"], "allowProtectedAppendWritesAll": bool, "hasLegalHold": bool}, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -222,13 +242,11 @@ def test_blob_containers_set_legal_hold(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_blob_containers_clear_legal_hold(self, resource_group): - response = self.client.blob_containers.clear_legal_hold( + def test_blob_containers_lease(self, resource_group): + response = self.client.blob_containers.lease( resource_group_name=resource_group.name, account_name="str", container_name="str", - legal_hold={"tags": ["str"], "allowProtectedAppendWritesAll": bool, "hasLegalHold": bool}, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -236,42 +254,34 @@ def test_blob_containers_clear_legal_hold(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_blob_containers_create_or_update_immutability_policy(self, resource_group): - response = self.client.blob_containers.create_or_update_immutability_policy( + def test_blob_containers_begin_object_level_worm(self, resource_group): + response = self.client.blob_containers.begin_object_level_worm( resource_group_name=resource_group.name, account_name="str", container_name="str", - immutability_policy_name="default", - api_version="2025-06-01", - ) + ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself # ... @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_blob_containers_get_immutability_policy(self, resource_group): - response = self.client.blob_containers.get_immutability_policy( + def test_blob_containers_list(self, resource_group): + response = self.client.blob_containers.list( resource_group_name=resource_group.name, account_name="str", - container_name="str", - immutability_policy_name="default", - api_version="2025-06-01", ) - + result = [r for r in response] # please add some check logic here by yourself # ... @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_blob_containers_delete_immutability_policy(self, resource_group): - response = self.client.blob_containers.delete_immutability_policy( + def test_blob_containers_get_immutability_policy(self, resource_group): + response = self.client.blob_containers.get_immutability_policy( resource_group_name=resource_group.name, account_name="str", container_name="str", - if_match="str", - immutability_policy_name="default", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -279,13 +289,11 @@ def test_blob_containers_delete_immutability_policy(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_blob_containers_lock_immutability_policy(self, resource_group): - response = self.client.blob_containers.lock_immutability_policy( + def test_blob_containers_create_or_update_immutability_policy(self, resource_group): + response = self.client.blob_containers.create_or_update_immutability_policy( resource_group_name=resource_group.name, account_name="str", container_name="str", - if_match="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -293,13 +301,13 @@ def test_blob_containers_lock_immutability_policy(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_blob_containers_extend_immutability_policy(self, resource_group): - response = self.client.blob_containers.extend_immutability_policy( + def test_blob_containers_delete_immutability_policy(self, resource_group): + response = self.client.blob_containers.delete_immutability_policy( resource_group_name=resource_group.name, account_name="str", container_name="str", - if_match="str", - api_version="2025-06-01", + etag="str", + match_condition=~azure.core.MatchConditions, ) # please add some check logic here by yourself @@ -307,12 +315,13 @@ def test_blob_containers_extend_immutability_policy(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_blob_containers_lease(self, resource_group): - response = self.client.blob_containers.lease( + def test_blob_containers_lock_immutability_policy(self, resource_group): + response = self.client.blob_containers.lock_immutability_policy( resource_group_name=resource_group.name, account_name="str", container_name="str", - api_version="2025-06-01", + etag="str", + match_condition=~azure.core.MatchConditions, ) # please add some check logic here by yourself @@ -320,13 +329,14 @@ def test_blob_containers_lease(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_blob_containers_begin_object_level_worm(self, resource_group): - response = self.client.blob_containers.begin_object_level_worm( + def test_blob_containers_extend_immutability_policy(self, resource_group): + response = self.client.blob_containers.extend_immutability_policy( resource_group_name=resource_group.name, account_name="str", container_name="str", - api_version="2025-06-01", - ).result() # call '.result()' to poll until service return final result + etag="str", + match_condition=~azure.core.MatchConditions, + ) # please add some check logic here by yourself # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_containers_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_containers_operations_async.py index 0f4bfbed0528..6ceb576feba5 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_containers_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_containers_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -21,13 +21,13 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_blob_containers_list(self, resource_group): - response = self.client.blob_containers.list( + async def test_blob_containers_get(self, resource_group): + response = await self.client.blob_containers.get( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", + container_name="str", ) - result = [r async for r in response] + # please add some check logic here by yourself # ... @@ -39,68 +39,79 @@ async def test_blob_containers_create(self, resource_group): account_name="str", container_name="str", blob_container={ - "defaultEncryptionScope": "str", - "deleted": bool, - "deletedTime": "2020-02-20 00:00:00", - "denyEncryptionScopeOverride": bool, - "enableNfsV3AllSquash": bool, - "enableNfsV3RootSquash": bool, "etag": "str", - "hasImmutabilityPolicy": bool, - "hasLegalHold": bool, "id": "str", - "immutabilityPolicy": { - "allowProtectedAppendWrites": bool, - "allowProtectedAppendWritesAll": bool, - "etag": "str", - "immutabilityPeriodSinceCreationInDays": 0, - "state": "str", - "updateHistory": [ - { + "name": "str", + "properties": { + "defaultEncryptionScope": "str", + "deleted": bool, + "deletedTime": "2020-02-20 00:00:00", + "denyEncryptionScopeOverride": bool, + "enableNfsV3AllSquash": bool, + "enableNfsV3RootSquash": bool, + "hasImmutabilityPolicy": bool, + "hasLegalHold": bool, + "immutabilityPolicy": { + "etag": "str", + "properties": { "allowProtectedAppendWrites": bool, "allowProtectedAppendWritesAll": bool, "immutabilityPeriodSinceCreationInDays": 0, - "objectIdentifier": "str", - "tenantId": "str", - "timestamp": "2020-02-20 00:00:00", - "update": "str", - "upn": "str", - } - ], - }, - "immutableStorageWithVersioning": { - "enabled": bool, - "migrationState": "str", - "timeStamp": "2020-02-20 00:00:00", - }, - "lastModifiedTime": "2020-02-20 00:00:00", - "leaseDuration": "str", - "leaseState": "str", - "leaseStatus": "str", - "legalHold": { - "hasLegalHold": bool, - "protectedAppendWritesHistory": { - "allowProtectedAppendWritesAll": bool, - "timestamp": "2020-02-20 00:00:00", + "state": "str", + }, + "updateHistory": [ + { + "allowProtectedAppendWrites": bool, + "allowProtectedAppendWritesAll": bool, + "immutabilityPeriodSinceCreationInDays": 0, + "objectIdentifier": "str", + "tenantId": "str", + "timestamp": "2020-02-20 00:00:00", + "update": "str", + "upn": "str", + } + ], + }, + "immutableStorageWithVersioning": { + "enabled": bool, + "migrationState": "str", + "timeStamp": "2020-02-20 00:00:00", }, - "tags": [ - { - "objectIdentifier": "str", - "tag": "str", - "tenantId": "str", + "lastModifiedTime": "2020-02-20 00:00:00", + "leaseDuration": "str", + "leaseState": "str", + "leaseStatus": "str", + "legalHold": { + "hasLegalHold": bool, + "protectedAppendWritesHistory": { + "allowProtectedAppendWritesAll": bool, "timestamp": "2020-02-20 00:00:00", - "upn": "str", - } - ], + }, + "tags": [ + { + "objectIdentifier": "str", + "tag": "str", + "tenantId": "str", + "timestamp": "2020-02-20 00:00:00", + "upn": "str", + } + ], + }, + "metadata": {"str": "str"}, + "publicAccess": "str", + "remainingRetentionDays": 0, + "version": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", }, - "metadata": {"str": "str"}, - "name": "str", - "publicAccess": "str", - "remainingRetentionDays": 0, "type": "str", - "version": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -114,81 +125,79 @@ async def test_blob_containers_update(self, resource_group): account_name="str", container_name="str", blob_container={ - "defaultEncryptionScope": "str", - "deleted": bool, - "deletedTime": "2020-02-20 00:00:00", - "denyEncryptionScopeOverride": bool, - "enableNfsV3AllSquash": bool, - "enableNfsV3RootSquash": bool, "etag": "str", - "hasImmutabilityPolicy": bool, - "hasLegalHold": bool, "id": "str", - "immutabilityPolicy": { - "allowProtectedAppendWrites": bool, - "allowProtectedAppendWritesAll": bool, - "etag": "str", - "immutabilityPeriodSinceCreationInDays": 0, - "state": "str", - "updateHistory": [ - { + "name": "str", + "properties": { + "defaultEncryptionScope": "str", + "deleted": bool, + "deletedTime": "2020-02-20 00:00:00", + "denyEncryptionScopeOverride": bool, + "enableNfsV3AllSquash": bool, + "enableNfsV3RootSquash": bool, + "hasImmutabilityPolicy": bool, + "hasLegalHold": bool, + "immutabilityPolicy": { + "etag": "str", + "properties": { "allowProtectedAppendWrites": bool, "allowProtectedAppendWritesAll": bool, "immutabilityPeriodSinceCreationInDays": 0, - "objectIdentifier": "str", - "tenantId": "str", - "timestamp": "2020-02-20 00:00:00", - "update": "str", - "upn": "str", - } - ], - }, - "immutableStorageWithVersioning": { - "enabled": bool, - "migrationState": "str", - "timeStamp": "2020-02-20 00:00:00", - }, - "lastModifiedTime": "2020-02-20 00:00:00", - "leaseDuration": "str", - "leaseState": "str", - "leaseStatus": "str", - "legalHold": { - "hasLegalHold": bool, - "protectedAppendWritesHistory": { - "allowProtectedAppendWritesAll": bool, - "timestamp": "2020-02-20 00:00:00", + "state": "str", + }, + "updateHistory": [ + { + "allowProtectedAppendWrites": bool, + "allowProtectedAppendWritesAll": bool, + "immutabilityPeriodSinceCreationInDays": 0, + "objectIdentifier": "str", + "tenantId": "str", + "timestamp": "2020-02-20 00:00:00", + "update": "str", + "upn": "str", + } + ], }, - "tags": [ - { - "objectIdentifier": "str", - "tag": "str", - "tenantId": "str", + "immutableStorageWithVersioning": { + "enabled": bool, + "migrationState": "str", + "timeStamp": "2020-02-20 00:00:00", + }, + "lastModifiedTime": "2020-02-20 00:00:00", + "leaseDuration": "str", + "leaseState": "str", + "leaseStatus": "str", + "legalHold": { + "hasLegalHold": bool, + "protectedAppendWritesHistory": { + "allowProtectedAppendWritesAll": bool, "timestamp": "2020-02-20 00:00:00", - "upn": "str", - } - ], + }, + "tags": [ + { + "objectIdentifier": "str", + "tag": "str", + "tenantId": "str", + "timestamp": "2020-02-20 00:00:00", + "upn": "str", + } + ], + }, + "metadata": {"str": "str"}, + "publicAccess": "str", + "remainingRetentionDays": 0, + "version": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", }, - "metadata": {"str": "str"}, - "name": "str", - "publicAccess": "str", - "remainingRetentionDays": 0, "type": "str", - "version": "str", }, - api_version="2025-06-01", - ) - - # please add some check logic here by yourself - # ... - - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy_async - async def test_blob_containers_get(self, resource_group): - response = await self.client.blob_containers.get( - resource_group_name=resource_group.name, - account_name="str", - container_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -201,7 +210,6 @@ async def test_blob_containers_delete(self, resource_group): resource_group_name=resource_group.name, account_name="str", container_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -215,7 +223,6 @@ async def test_blob_containers_set_legal_hold(self, resource_group): account_name="str", container_name="str", legal_hold={"tags": ["str"], "allowProtectedAppendWritesAll": bool, "hasLegalHold": bool}, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -229,7 +236,6 @@ async def test_blob_containers_clear_legal_hold(self, resource_group): account_name="str", container_name="str", legal_hold={"tags": ["str"], "allowProtectedAppendWritesAll": bool, "hasLegalHold": bool}, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -237,13 +243,11 @@ async def test_blob_containers_clear_legal_hold(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_blob_containers_create_or_update_immutability_policy(self, resource_group): - response = await self.client.blob_containers.create_or_update_immutability_policy( + async def test_blob_containers_lease(self, resource_group): + response = await self.client.blob_containers.lease( resource_group_name=resource_group.name, account_name="str", container_name="str", - immutability_policy_name="default", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -251,28 +255,36 @@ async def test_blob_containers_create_or_update_immutability_policy(self, resour @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_blob_containers_get_immutability_policy(self, resource_group): - response = await self.client.blob_containers.get_immutability_policy( + async def test_blob_containers_begin_object_level_worm(self, resource_group): + response = await ( + await self.client.blob_containers.begin_object_level_worm( + resource_group_name=resource_group.name, + account_name="str", + container_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_blob_containers_list(self, resource_group): + response = self.client.blob_containers.list( resource_group_name=resource_group.name, account_name="str", - container_name="str", - immutability_policy_name="default", - api_version="2025-06-01", ) - + result = [r async for r in response] # please add some check logic here by yourself # ... @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_blob_containers_delete_immutability_policy(self, resource_group): - response = await self.client.blob_containers.delete_immutability_policy( + async def test_blob_containers_get_immutability_policy(self, resource_group): + response = await self.client.blob_containers.get_immutability_policy( resource_group_name=resource_group.name, account_name="str", container_name="str", - if_match="str", - immutability_policy_name="default", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -280,13 +292,11 @@ async def test_blob_containers_delete_immutability_policy(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_blob_containers_lock_immutability_policy(self, resource_group): - response = await self.client.blob_containers.lock_immutability_policy( + async def test_blob_containers_create_or_update_immutability_policy(self, resource_group): + response = await self.client.blob_containers.create_or_update_immutability_policy( resource_group_name=resource_group.name, account_name="str", container_name="str", - if_match="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -294,13 +304,13 @@ async def test_blob_containers_lock_immutability_policy(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_blob_containers_extend_immutability_policy(self, resource_group): - response = await self.client.blob_containers.extend_immutability_policy( + async def test_blob_containers_delete_immutability_policy(self, resource_group): + response = await self.client.blob_containers.delete_immutability_policy( resource_group_name=resource_group.name, account_name="str", container_name="str", - if_match="str", - api_version="2025-06-01", + etag="str", + match_condition=~azure.core.MatchConditions, ) # please add some check logic here by yourself @@ -308,12 +318,13 @@ async def test_blob_containers_extend_immutability_policy(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_blob_containers_lease(self, resource_group): - response = await self.client.blob_containers.lease( + async def test_blob_containers_lock_immutability_policy(self, resource_group): + response = await self.client.blob_containers.lock_immutability_policy( resource_group_name=resource_group.name, account_name="str", container_name="str", - api_version="2025-06-01", + etag="str", + match_condition=~azure.core.MatchConditions, ) # please add some check logic here by yourself @@ -321,15 +332,14 @@ async def test_blob_containers_lease(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_blob_containers_begin_object_level_worm(self, resource_group): - response = await ( - await self.client.blob_containers.begin_object_level_worm( - resource_group_name=resource_group.name, - account_name="str", - container_name="str", - api_version="2025-06-01", - ) - ).result() # call '.result()' to poll until service return final result + async def test_blob_containers_extend_immutability_policy(self, resource_group): + response = await self.client.blob_containers.extend_immutability_policy( + resource_group_name=resource_group.name, + account_name="str", + container_name="str", + etag="str", + match_condition=~azure.core.MatchConditions, + ) # please add some check logic here by yourself # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_inventory_policies_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_inventory_policies_operations.py index 2c54c1d992ec..126665317a8d 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_inventory_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_inventory_policies_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -25,7 +25,6 @@ def test_blob_inventory_policies_get(self, resource_group): resource_group_name=resource_group.name, account_name="str", blob_inventory_policy_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -40,34 +39,36 @@ def test_blob_inventory_policies_create_or_update(self, resource_group): blob_inventory_policy_name="str", properties={ "id": "str", - "lastModifiedTime": "2020-02-20 00:00:00", "name": "str", - "policy": { - "enabled": bool, - "rules": [ - { - "definition": { - "format": "str", - "objectType": "str", - "schedule": "str", - "schemaFields": ["str"], - "filters": { - "blobTypes": ["str"], - "creationTime": {"lastNDays": 0}, - "excludePrefix": ["str"], - "includeBlobVersions": bool, - "includeDeleted": bool, - "includeSnapshots": bool, - "prefixMatch": ["str"], + "properties": { + "policy": { + "enabled": bool, + "rules": [ + { + "definition": { + "format": "str", + "objectType": "str", + "schedule": "str", + "schemaFields": ["str"], + "filters": { + "blobTypes": ["str"], + "creationTime": {"lastNDays": 0}, + "excludePrefix": ["str"], + "includeBlobVersions": bool, + "includeDeleted": bool, + "includeSnapshots": bool, + "prefixMatch": ["str"], + }, }, - }, - "destination": "str", - "enabled": bool, - "name": "str", - } - ], - "type": "str", - "destination": "str", + "destination": "str", + "enabled": bool, + "name": "str", + } + ], + "type": "str", + "destination": "str", + }, + "lastModifiedTime": "2020-02-20 00:00:00", }, "systemData": { "createdAt": "2020-02-20 00:00:00", @@ -79,7 +80,6 @@ def test_blob_inventory_policies_create_or_update(self, resource_group): }, "type": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -92,7 +92,6 @@ def test_blob_inventory_policies_delete(self, resource_group): resource_group_name=resource_group.name, account_name="str", blob_inventory_policy_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -104,7 +103,6 @@ def test_blob_inventory_policies_list(self, resource_group): response = self.client.blob_inventory_policies.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) result = [r for r in response] # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_inventory_policies_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_inventory_policies_operations_async.py index d9843cea7a01..91fdb8f26195 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_inventory_policies_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_inventory_policies_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -26,7 +26,6 @@ async def test_blob_inventory_policies_get(self, resource_group): resource_group_name=resource_group.name, account_name="str", blob_inventory_policy_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -41,34 +40,36 @@ async def test_blob_inventory_policies_create_or_update(self, resource_group): blob_inventory_policy_name="str", properties={ "id": "str", - "lastModifiedTime": "2020-02-20 00:00:00", "name": "str", - "policy": { - "enabled": bool, - "rules": [ - { - "definition": { - "format": "str", - "objectType": "str", - "schedule": "str", - "schemaFields": ["str"], - "filters": { - "blobTypes": ["str"], - "creationTime": {"lastNDays": 0}, - "excludePrefix": ["str"], - "includeBlobVersions": bool, - "includeDeleted": bool, - "includeSnapshots": bool, - "prefixMatch": ["str"], + "properties": { + "policy": { + "enabled": bool, + "rules": [ + { + "definition": { + "format": "str", + "objectType": "str", + "schedule": "str", + "schemaFields": ["str"], + "filters": { + "blobTypes": ["str"], + "creationTime": {"lastNDays": 0}, + "excludePrefix": ["str"], + "includeBlobVersions": bool, + "includeDeleted": bool, + "includeSnapshots": bool, + "prefixMatch": ["str"], + }, }, - }, - "destination": "str", - "enabled": bool, - "name": "str", - } - ], - "type": "str", - "destination": "str", + "destination": "str", + "enabled": bool, + "name": "str", + } + ], + "type": "str", + "destination": "str", + }, + "lastModifiedTime": "2020-02-20 00:00:00", }, "systemData": { "createdAt": "2020-02-20 00:00:00", @@ -80,7 +81,6 @@ async def test_blob_inventory_policies_create_or_update(self, resource_group): }, "type": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -93,7 +93,6 @@ async def test_blob_inventory_policies_delete(self, resource_group): resource_group_name=resource_group.name, account_name="str", blob_inventory_policy_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -105,7 +104,6 @@ async def test_blob_inventory_policies_list(self, resource_group): response = self.client.blob_inventory_policies.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) result = [r async for r in response] # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_services_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_services_operations.py index 603218da7657..70d00bcf6a5f 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_services_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -20,13 +20,12 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_blob_services_list(self, resource_group): - response = self.client.blob_services.list( + def test_blob_services_get_service_properties(self, resource_group): + response = self.client.blob_services.get_service_properties( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) - result = [r for r in response] + # please add some check logic here by yourself # ... @@ -37,42 +36,56 @@ def test_blob_services_set_service_properties(self, resource_group): resource_group_name=resource_group.name, account_name="str", parameters={ - "automaticSnapshotPolicyEnabled": bool, - "changeFeed": {"enabled": bool, "retentionInDays": 0}, - "containerDeleteRetentionPolicy": {"allowPermanentDelete": bool, "days": 0, "enabled": bool}, - "cors": { - "corsRules": [ - { - "allowedHeaders": ["str"], - "allowedMethods": ["str"], - "allowedOrigins": ["str"], - "exposedHeaders": ["str"], - "maxAgeInSeconds": 0, - } - ] - }, - "defaultServiceVersion": "str", - "deleteRetentionPolicy": {"allowPermanentDelete": bool, "days": 0, "enabled": bool}, "id": "str", - "isVersioningEnabled": bool, - "lastAccessTimeTrackingPolicy": { - "enable": bool, - "blobType": ["str"], - "name": "str", - "trackingGranularityInDays": 0, - }, "name": "str", - "restorePolicy": { - "enabled": bool, - "days": 0, - "lastEnabledTime": "2020-02-20 00:00:00", - "minRestoreTime": "2020-02-20 00:00:00", + "properties": { + "automaticSnapshotPolicyEnabled": bool, + "changeFeed": {"enabled": bool, "retentionInDays": 0}, + "containerDeleteRetentionPolicy": {"allowPermanentDelete": bool, "days": 0, "enabled": bool}, + "cors": { + "corsRules": [ + { + "allowedHeaders": ["str"], + "allowedMethods": ["str"], + "allowedOrigins": ["str"], + "exposedHeaders": ["str"], + "maxAgeInSeconds": 0, + } + ] + }, + "defaultServiceVersion": "str", + "deleteRetentionPolicy": {"allowPermanentDelete": bool, "days": 0, "enabled": bool}, + "isVersioningEnabled": bool, + "lastAccessTimeTrackingPolicy": { + "enable": bool, + "blobType": ["str"], + "name": "str", + "trackingGranularityInDays": 0, + }, + "restorePolicy": { + "enabled": bool, + "days": 0, + "lastEnabledTime": "2020-02-20 00:00:00", + "minRestoreTime": "2020-02-20 00:00:00", + }, + "staticWebsite": { + "enabled": bool, + "defaultIndexDocumentPath": "str", + "errorDocument404Path": "str", + "indexDocument": "str", + }, }, "sku": {"name": "str", "tier": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", - blob_services_name="default", ) # please add some check logic here by yourself @@ -80,13 +93,11 @@ def test_blob_services_set_service_properties(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_blob_services_get_service_properties(self, resource_group): - response = self.client.blob_services.get_service_properties( + def test_blob_services_list(self, resource_group): + response = self.client.blob_services.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", - blob_services_name="default", ) - + result = [r for r in response] # please add some check logic here by yourself # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_services_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_services_operations_async.py index 08a4f785130a..54a0856c2fcb 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_services_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_blob_services_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -21,13 +21,12 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_blob_services_list(self, resource_group): - response = self.client.blob_services.list( + async def test_blob_services_get_service_properties(self, resource_group): + response = await self.client.blob_services.get_service_properties( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) - result = [r async for r in response] + # please add some check logic here by yourself # ... @@ -38,42 +37,56 @@ async def test_blob_services_set_service_properties(self, resource_group): resource_group_name=resource_group.name, account_name="str", parameters={ - "automaticSnapshotPolicyEnabled": bool, - "changeFeed": {"enabled": bool, "retentionInDays": 0}, - "containerDeleteRetentionPolicy": {"allowPermanentDelete": bool, "days": 0, "enabled": bool}, - "cors": { - "corsRules": [ - { - "allowedHeaders": ["str"], - "allowedMethods": ["str"], - "allowedOrigins": ["str"], - "exposedHeaders": ["str"], - "maxAgeInSeconds": 0, - } - ] - }, - "defaultServiceVersion": "str", - "deleteRetentionPolicy": {"allowPermanentDelete": bool, "days": 0, "enabled": bool}, "id": "str", - "isVersioningEnabled": bool, - "lastAccessTimeTrackingPolicy": { - "enable": bool, - "blobType": ["str"], - "name": "str", - "trackingGranularityInDays": 0, - }, "name": "str", - "restorePolicy": { - "enabled": bool, - "days": 0, - "lastEnabledTime": "2020-02-20 00:00:00", - "minRestoreTime": "2020-02-20 00:00:00", + "properties": { + "automaticSnapshotPolicyEnabled": bool, + "changeFeed": {"enabled": bool, "retentionInDays": 0}, + "containerDeleteRetentionPolicy": {"allowPermanentDelete": bool, "days": 0, "enabled": bool}, + "cors": { + "corsRules": [ + { + "allowedHeaders": ["str"], + "allowedMethods": ["str"], + "allowedOrigins": ["str"], + "exposedHeaders": ["str"], + "maxAgeInSeconds": 0, + } + ] + }, + "defaultServiceVersion": "str", + "deleteRetentionPolicy": {"allowPermanentDelete": bool, "days": 0, "enabled": bool}, + "isVersioningEnabled": bool, + "lastAccessTimeTrackingPolicy": { + "enable": bool, + "blobType": ["str"], + "name": "str", + "trackingGranularityInDays": 0, + }, + "restorePolicy": { + "enabled": bool, + "days": 0, + "lastEnabledTime": "2020-02-20 00:00:00", + "minRestoreTime": "2020-02-20 00:00:00", + }, + "staticWebsite": { + "enabled": bool, + "defaultIndexDocumentPath": "str", + "errorDocument404Path": "str", + "indexDocument": "str", + }, }, "sku": {"name": "str", "tier": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", - blob_services_name="default", ) # please add some check logic here by yourself @@ -81,13 +94,11 @@ async def test_blob_services_set_service_properties(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_blob_services_get_service_properties(self, resource_group): - response = await self.client.blob_services.get_service_properties( + async def test_blob_services_list(self, resource_group): + response = self.client.blob_services.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", - blob_services_name="default", ) - + result = [r async for r in response] # please add some check logic here by yourself # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_connectors_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_connectors_operations.py new file mode 100644 index 000000000000..b9cf2f5bc829 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_connectors_operations.py @@ -0,0 +1,141 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.storage import StorageManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStorageManagementConnectorsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StorageManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_connectors_get(self, resource_group): + response = self.client.connectors.get( + resource_group_name=resource_group.name, + account_name="str", + connector_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_connectors_begin_create(self, resource_group): + response = self.client.connectors.begin_create( + resource_group_name=resource_group.name, + account_name="str", + connector_name="str", + resource={ + "location": "str", + "properties": { + "dataSourceType": "str", + "source": "storage_connector_source", + "creationTime": "str", + "description": "str", + "provisioningState": "str", + "state": "str", + "testConnection": bool, + "uniqueId": "str", + }, + "id": "str", + "name": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_connectors_begin_update(self, resource_group): + response = self.client.connectors.begin_update( + resource_group_name=resource_group.name, + account_name="str", + connector_name="str", + properties={ + "location": "str", + "properties": { + "dataSourceType": "str", + "source": "storage_connector_source", + "creationTime": "str", + "description": "str", + "provisioningState": "str", + "state": "str", + "testConnection": bool, + "uniqueId": "str", + }, + "id": "str", + "name": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_connectors_begin_delete(self, resource_group): + response = self.client.connectors.begin_delete( + resource_group_name=resource_group.name, + account_name="str", + connector_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_connectors_list_by_storage_account(self, resource_group): + response = self.client.connectors.list_by_storage_account( + resource_group_name=resource_group.name, + account_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_connectors_begin_test_existing_connection(self, resource_group): + response = self.client.connectors.begin_test_existing_connection( + resource_group_name=resource_group.name, + account_name="str", + connector_name="str", + body={"uniqueId": "str"}, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_connectors_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_connectors_operations_async.py new file mode 100644 index 000000000000..aacfe2b85e82 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_connectors_operations_async.py @@ -0,0 +1,150 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.storage.aio import StorageManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStorageManagementConnectorsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StorageManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_connectors_get(self, resource_group): + response = await self.client.connectors.get( + resource_group_name=resource_group.name, + account_name="str", + connector_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_connectors_begin_create(self, resource_group): + response = await ( + await self.client.connectors.begin_create( + resource_group_name=resource_group.name, + account_name="str", + connector_name="str", + resource={ + "location": "str", + "properties": { + "dataSourceType": "str", + "source": "storage_connector_source", + "creationTime": "str", + "description": "str", + "provisioningState": "str", + "state": "str", + "testConnection": bool, + "uniqueId": "str", + }, + "id": "str", + "name": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_connectors_begin_update(self, resource_group): + response = await ( + await self.client.connectors.begin_update( + resource_group_name=resource_group.name, + account_name="str", + connector_name="str", + properties={ + "location": "str", + "properties": { + "dataSourceType": "str", + "source": "storage_connector_source", + "creationTime": "str", + "description": "str", + "provisioningState": "str", + "state": "str", + "testConnection": bool, + "uniqueId": "str", + }, + "id": "str", + "name": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_connectors_begin_delete(self, resource_group): + response = await ( + await self.client.connectors.begin_delete( + resource_group_name=resource_group.name, + account_name="str", + connector_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_connectors_list_by_storage_account(self, resource_group): + response = self.client.connectors.list_by_storage_account( + resource_group_name=resource_group.name, + account_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_connectors_begin_test_existing_connection(self, resource_group): + response = await ( + await self.client.connectors.begin_test_existing_connection( + resource_group_name=resource_group.name, + account_name="str", + connector_name="str", + body={"uniqueId": "str"}, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_data_shares_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_data_shares_operations.py new file mode 100644 index 000000000000..5be4a5ee5d02 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_data_shares_operations.py @@ -0,0 +1,124 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.storage import StorageManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStorageManagementDataSharesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StorageManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_data_shares_get(self, resource_group): + response = self.client.data_shares.get( + resource_group_name=resource_group.name, + account_name="str", + data_share_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_data_shares_begin_create(self, resource_group): + response = self.client.data_shares.begin_create( + resource_group_name=resource_group.name, + account_name="str", + data_share_name="str", + resource={ + "location": "str", + "properties": { + "accessPolicies": [{"permission": "str", "principalId": "str", "tenantId": "str"}], + "assets": [{"assetPath": "str", "displayName": "str"}], + "dataShareIdentifier": "str", + "dataShareUri": "str", + "description": "str", + "provisioningState": "str", + }, + "id": "str", + "name": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_data_shares_begin_update(self, resource_group): + response = self.client.data_shares.begin_update( + resource_group_name=resource_group.name, + account_name="str", + data_share_name="str", + properties={ + "location": "str", + "properties": { + "accessPolicies": [{"permission": "str", "principalId": "str", "tenantId": "str"}], + "assets": [{"assetPath": "str", "displayName": "str"}], + "dataShareIdentifier": "str", + "dataShareUri": "str", + "description": "str", + "provisioningState": "str", + }, + "id": "str", + "name": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_data_shares_begin_delete(self, resource_group): + response = self.client.data_shares.begin_delete( + resource_group_name=resource_group.name, + account_name="str", + data_share_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_data_shares_list_by_storage_account(self, resource_group): + response = self.client.data_shares.list_by_storage_account( + resource_group_name=resource_group.name, + account_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_data_shares_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_data_shares_operations_async.py new file mode 100644 index 000000000000..854d3349cef3 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_data_shares_operations_async.py @@ -0,0 +1,131 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.storage.aio import StorageManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStorageManagementDataSharesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StorageManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_data_shares_get(self, resource_group): + response = await self.client.data_shares.get( + resource_group_name=resource_group.name, + account_name="str", + data_share_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_data_shares_begin_create(self, resource_group): + response = await ( + await self.client.data_shares.begin_create( + resource_group_name=resource_group.name, + account_name="str", + data_share_name="str", + resource={ + "location": "str", + "properties": { + "accessPolicies": [{"permission": "str", "principalId": "str", "tenantId": "str"}], + "assets": [{"assetPath": "str", "displayName": "str"}], + "dataShareIdentifier": "str", + "dataShareUri": "str", + "description": "str", + "provisioningState": "str", + }, + "id": "str", + "name": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_data_shares_begin_update(self, resource_group): + response = await ( + await self.client.data_shares.begin_update( + resource_group_name=resource_group.name, + account_name="str", + data_share_name="str", + properties={ + "location": "str", + "properties": { + "accessPolicies": [{"permission": "str", "principalId": "str", "tenantId": "str"}], + "assets": [{"assetPath": "str", "displayName": "str"}], + "dataShareIdentifier": "str", + "dataShareUri": "str", + "description": "str", + "provisioningState": "str", + }, + "id": "str", + "name": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_data_shares_begin_delete(self, resource_group): + response = await ( + await self.client.data_shares.begin_delete( + resource_group_name=resource_group.name, + account_name="str", + data_share_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_data_shares_list_by_storage_account(self, resource_group): + response = self.client.data_shares.list_by_storage_account( + resource_group_name=resource_group.name, + account_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_deleted_accounts_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_deleted_accounts_operations.py index 8eb7a9d2b7ee..139d6b189783 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_deleted_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_deleted_accounts_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -18,24 +18,21 @@ class TestStorageManagementDeletedAccountsOperations(AzureMgmtRecordedTestCase): def setup_method(self, method): self.client = self.create_mgmt_client(StorageManagementClient) - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy - def test_deleted_accounts_list(self, resource_group): - response = self.client.deleted_accounts.list( - api_version="2025-06-01", - ) - result = [r for r in response] - # please add some check logic here by yourself - # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy def test_deleted_accounts_get(self, resource_group): response = self.client.deleted_accounts.get( deleted_account_name="str", location="str", - api_version="2025-06-01", ) # please add some check logic here by yourself # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_deleted_accounts_list(self, resource_group): + response = self.client.deleted_accounts.list() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_deleted_accounts_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_deleted_accounts_operations_async.py index 0882e26faf44..2c108d5426da 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_deleted_accounts_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_deleted_accounts_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -19,24 +19,21 @@ class TestStorageManagementDeletedAccountsOperationsAsync(AzureMgmtRecordedTestC def setup_method(self, method): self.client = self.create_mgmt_client(StorageManagementClient, is_async=True) - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy_async - async def test_deleted_accounts_list(self, resource_group): - response = self.client.deleted_accounts.list( - api_version="2025-06-01", - ) - result = [r async for r in response] - # please add some check logic here by yourself - # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async async def test_deleted_accounts_get(self, resource_group): response = await self.client.deleted_accounts.get( deleted_account_name="str", location="str", - api_version="2025-06-01", ) # please add some check logic here by yourself # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_deleted_accounts_list(self, resource_group): + response = self.client.deleted_accounts.list() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_encryption_scopes_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_encryption_scopes_operations.py index 39a0c1bf6a63..0fac5e180e7e 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_encryption_scopes_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_encryption_scopes_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -20,27 +20,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_encryption_scopes_put(self, resource_group): - response = self.client.encryption_scopes.put( + def test_encryption_scopes_get(self, resource_group): + response = self.client.encryption_scopes.get( resource_group_name=resource_group.name, account_name="str", encryption_scope_name="str", - encryption_scope={ - "creationTime": "2020-02-20 00:00:00", - "id": "str", - "keyVaultProperties": { - "currentVersionedKeyIdentifier": "str", - "keyUri": "str", - "lastKeyRotationTimestamp": "2020-02-20 00:00:00", - }, - "lastModifiedTime": "2020-02-20 00:00:00", - "name": "str", - "requireInfrastructureEncryption": bool, - "source": "str", - "state": "str", - "type": "str", - }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -48,27 +32,36 @@ def test_encryption_scopes_put(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_encryption_scopes_patch(self, resource_group): - response = self.client.encryption_scopes.patch( + def test_encryption_scopes_put(self, resource_group): + response = self.client.encryption_scopes.put( resource_group_name=resource_group.name, account_name="str", encryption_scope_name="str", encryption_scope={ - "creationTime": "2020-02-20 00:00:00", "id": "str", - "keyVaultProperties": { - "currentVersionedKeyIdentifier": "str", - "keyUri": "str", - "lastKeyRotationTimestamp": "2020-02-20 00:00:00", - }, - "lastModifiedTime": "2020-02-20 00:00:00", "name": "str", - "requireInfrastructureEncryption": bool, - "source": "str", - "state": "str", + "properties": { + "creationTime": "2020-02-20 00:00:00", + "keyVaultProperties": { + "currentVersionedKeyIdentifier": "str", + "keyUri": "str", + "lastKeyRotationTimestamp": "2020-02-20 00:00:00", + }, + "lastModifiedTime": "2020-02-20 00:00:00", + "requireInfrastructureEncryption": bool, + "source": "str", + "state": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -76,12 +69,36 @@ def test_encryption_scopes_patch(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_encryption_scopes_get(self, resource_group): - response = self.client.encryption_scopes.get( + def test_encryption_scopes_patch(self, resource_group): + response = self.client.encryption_scopes.patch( resource_group_name=resource_group.name, account_name="str", encryption_scope_name="str", - api_version="2025-06-01", + encryption_scope={ + "id": "str", + "name": "str", + "properties": { + "creationTime": "2020-02-20 00:00:00", + "keyVaultProperties": { + "currentVersionedKeyIdentifier": "str", + "keyUri": "str", + "lastKeyRotationTimestamp": "2020-02-20 00:00:00", + }, + "lastModifiedTime": "2020-02-20 00:00:00", + "requireInfrastructureEncryption": bool, + "source": "str", + "state": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, ) # please add some check logic here by yourself @@ -93,7 +110,6 @@ def test_encryption_scopes_list(self, resource_group): response = self.client.encryption_scopes.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) result = [r for r in response] # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_encryption_scopes_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_encryption_scopes_operations_async.py index d60acb3bf62d..d69480b114fb 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_encryption_scopes_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_encryption_scopes_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -21,27 +21,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_encryption_scopes_put(self, resource_group): - response = await self.client.encryption_scopes.put( + async def test_encryption_scopes_get(self, resource_group): + response = await self.client.encryption_scopes.get( resource_group_name=resource_group.name, account_name="str", encryption_scope_name="str", - encryption_scope={ - "creationTime": "2020-02-20 00:00:00", - "id": "str", - "keyVaultProperties": { - "currentVersionedKeyIdentifier": "str", - "keyUri": "str", - "lastKeyRotationTimestamp": "2020-02-20 00:00:00", - }, - "lastModifiedTime": "2020-02-20 00:00:00", - "name": "str", - "requireInfrastructureEncryption": bool, - "source": "str", - "state": "str", - "type": "str", - }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -49,27 +33,36 @@ async def test_encryption_scopes_put(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_encryption_scopes_patch(self, resource_group): - response = await self.client.encryption_scopes.patch( + async def test_encryption_scopes_put(self, resource_group): + response = await self.client.encryption_scopes.put( resource_group_name=resource_group.name, account_name="str", encryption_scope_name="str", encryption_scope={ - "creationTime": "2020-02-20 00:00:00", "id": "str", - "keyVaultProperties": { - "currentVersionedKeyIdentifier": "str", - "keyUri": "str", - "lastKeyRotationTimestamp": "2020-02-20 00:00:00", - }, - "lastModifiedTime": "2020-02-20 00:00:00", "name": "str", - "requireInfrastructureEncryption": bool, - "source": "str", - "state": "str", + "properties": { + "creationTime": "2020-02-20 00:00:00", + "keyVaultProperties": { + "currentVersionedKeyIdentifier": "str", + "keyUri": "str", + "lastKeyRotationTimestamp": "2020-02-20 00:00:00", + }, + "lastModifiedTime": "2020-02-20 00:00:00", + "requireInfrastructureEncryption": bool, + "source": "str", + "state": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -77,12 +70,36 @@ async def test_encryption_scopes_patch(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_encryption_scopes_get(self, resource_group): - response = await self.client.encryption_scopes.get( + async def test_encryption_scopes_patch(self, resource_group): + response = await self.client.encryption_scopes.patch( resource_group_name=resource_group.name, account_name="str", encryption_scope_name="str", - api_version="2025-06-01", + encryption_scope={ + "id": "str", + "name": "str", + "properties": { + "creationTime": "2020-02-20 00:00:00", + "keyVaultProperties": { + "currentVersionedKeyIdentifier": "str", + "keyUri": "str", + "lastKeyRotationTimestamp": "2020-02-20 00:00:00", + }, + "lastModifiedTime": "2020-02-20 00:00:00", + "requireInfrastructureEncryption": bool, + "source": "str", + "state": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, ) # please add some check logic here by yourself @@ -94,7 +111,6 @@ async def test_encryption_scopes_list(self, resource_group): response = self.client.encryption_scopes.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) result = [r async for r in response] # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_file_services_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_file_services_operations.py index 09a12cf05028..fe23d4d5bce6 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_file_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_file_services_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -20,11 +20,10 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_file_services_list(self, resource_group): - response = self.client.file_services.list( + def test_file_services_get_service_properties(self, resource_group): + response = self.client.file_services.get_service_properties( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -37,36 +36,44 @@ def test_file_services_set_service_properties(self, resource_group): resource_group_name=resource_group.name, account_name="str", parameters={ - "cors": { - "corsRules": [ - { - "allowedHeaders": ["str"], - "allowedMethods": ["str"], - "allowedOrigins": ["str"], - "exposedHeaders": ["str"], - "maxAgeInSeconds": 0, - } - ] - }, "id": "str", "name": "str", - "protocolSettings": { - "nfs": {"encryptionInTransit": {"required": bool}}, - "smb": { - "authenticationMethods": "str", - "channelEncryption": "str", - "encryptionInTransit": {"required": bool}, - "kerberosTicketEncryption": "str", - "multichannel": {"enabled": bool}, - "versions": "str", + "properties": { + "cors": { + "corsRules": [ + { + "allowedHeaders": ["str"], + "allowedMethods": ["str"], + "allowedOrigins": ["str"], + "exposedHeaders": ["str"], + "maxAgeInSeconds": 0, + } + ] + }, + "protocolSettings": { + "nfs": {"encryptionInTransit": {"required": bool}}, + "smb": { + "authenticationMethods": "str", + "channelEncryption": "str", + "encryptionInTransit": {"required": bool}, + "kerberosTicketEncryption": "str", + "multichannel": {"enabled": bool}, + "versions": "str", + }, }, + "shareDeleteRetentionPolicy": {"allowPermanentDelete": bool, "days": 0, "enabled": bool}, }, - "shareDeleteRetentionPolicy": {"allowPermanentDelete": bool, "days": 0, "enabled": bool}, "sku": {"name": "str", "tier": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", - file_services_name="default", ) # please add some check logic here by yourself @@ -74,12 +81,10 @@ def test_file_services_set_service_properties(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_file_services_get_service_properties(self, resource_group): - response = self.client.file_services.get_service_properties( + def test_file_services_list(self, resource_group): + response = self.client.file_services.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", - file_services_name="default", ) # please add some check logic here by yourself @@ -87,27 +92,22 @@ def test_file_services_get_service_properties(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_file_services_list_service_usages(self, resource_group): - response = self.client.file_services.list_service_usages( + def test_file_services_get_service_usage(self, resource_group): + response = self.client.file_services.get_service_usage( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", - file_services_name="default", ) - result = [r for r in response] + # please add some check logic here by yourself # ... @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_file_services_get_service_usage(self, resource_group): - response = self.client.file_services.get_service_usage( + def test_file_services_list_service_usages(self, resource_group): + response = self.client.file_services.list_service_usages( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", - file_services_name="default", - file_service_usages_name="default", ) - + result = [r for r in response] # please add some check logic here by yourself # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_file_services_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_file_services_operations_async.py index 0cad87bc0cfd..c11f5ded1660 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_file_services_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_file_services_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -21,11 +21,10 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_file_services_list(self, resource_group): - response = await self.client.file_services.list( + async def test_file_services_get_service_properties(self, resource_group): + response = await self.client.file_services.get_service_properties( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -38,36 +37,44 @@ async def test_file_services_set_service_properties(self, resource_group): resource_group_name=resource_group.name, account_name="str", parameters={ - "cors": { - "corsRules": [ - { - "allowedHeaders": ["str"], - "allowedMethods": ["str"], - "allowedOrigins": ["str"], - "exposedHeaders": ["str"], - "maxAgeInSeconds": 0, - } - ] - }, "id": "str", "name": "str", - "protocolSettings": { - "nfs": {"encryptionInTransit": {"required": bool}}, - "smb": { - "authenticationMethods": "str", - "channelEncryption": "str", - "encryptionInTransit": {"required": bool}, - "kerberosTicketEncryption": "str", - "multichannel": {"enabled": bool}, - "versions": "str", + "properties": { + "cors": { + "corsRules": [ + { + "allowedHeaders": ["str"], + "allowedMethods": ["str"], + "allowedOrigins": ["str"], + "exposedHeaders": ["str"], + "maxAgeInSeconds": 0, + } + ] + }, + "protocolSettings": { + "nfs": {"encryptionInTransit": {"required": bool}}, + "smb": { + "authenticationMethods": "str", + "channelEncryption": "str", + "encryptionInTransit": {"required": bool}, + "kerberosTicketEncryption": "str", + "multichannel": {"enabled": bool}, + "versions": "str", + }, }, + "shareDeleteRetentionPolicy": {"allowPermanentDelete": bool, "days": 0, "enabled": bool}, }, - "shareDeleteRetentionPolicy": {"allowPermanentDelete": bool, "days": 0, "enabled": bool}, "sku": {"name": "str", "tier": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", - file_services_name="default", ) # please add some check logic here by yourself @@ -75,12 +82,10 @@ async def test_file_services_set_service_properties(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_file_services_get_service_properties(self, resource_group): - response = await self.client.file_services.get_service_properties( + async def test_file_services_list(self, resource_group): + response = await self.client.file_services.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", - file_services_name="default", ) # please add some check logic here by yourself @@ -88,27 +93,22 @@ async def test_file_services_get_service_properties(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_file_services_list_service_usages(self, resource_group): - response = self.client.file_services.list_service_usages( + async def test_file_services_get_service_usage(self, resource_group): + response = await self.client.file_services.get_service_usage( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", - file_services_name="default", ) - result = [r async for r in response] + # please add some check logic here by yourself # ... @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_file_services_get_service_usage(self, resource_group): - response = await self.client.file_services.get_service_usage( + async def test_file_services_list_service_usages(self, resource_group): + response = self.client.file_services.list_service_usages( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", - file_services_name="default", - file_service_usages_name="default", ) - + result = [r async for r in response] # please add some check logic here by yourself # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_file_shares_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_file_shares_operations.py index eed19d8b71eb..24a95635d24a 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_file_shares_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_file_shares_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -20,13 +20,13 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_file_shares_list(self, resource_group): - response = self.client.file_shares.list( + def test_file_shares_get(self, resource_group): + response = self.client.file_shares.get( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", + share_name="str", ) - result = [r for r in response] + # please add some check logic here by yourself # ... @@ -38,51 +38,60 @@ def test_file_shares_create(self, resource_group): account_name="str", share_name="str", file_share={ - "accessTier": "str", - "accessTierChangeTime": "2020-02-20 00:00:00", - "accessTierStatus": "str", - "deleted": bool, - "deletedTime": "2020-02-20 00:00:00", - "enabledProtocols": "str", "etag": "str", - "fileSharePaidBursting": { - "paidBurstingEnabled": bool, - "paidBurstingMaxBandwidthMibps": 0, - "paidBurstingMaxIops": 0, - }, "id": "str", - "includedBurstIops": 0, - "lastModifiedTime": "2020-02-20 00:00:00", - "leaseDuration": "str", - "leaseState": "str", - "leaseStatus": "str", - "maxBurstCreditsForIops": 0, - "metadata": {"str": "str"}, "name": "str", - "nextAllowedProvisionedBandwidthDowngradeTime": "2020-02-20 00:00:00", - "nextAllowedProvisionedIopsDowngradeTime": "2020-02-20 00:00:00", - "nextAllowedQuotaDowngradeTime": "2020-02-20 00:00:00", - "provisionedBandwidthMibps": 0, - "provisionedIops": 0, - "remainingRetentionDays": 0, - "rootSquash": "str", - "shareQuota": 0, - "shareUsageBytes": 0, - "signedIdentifiers": [ - { - "accessPolicy": { - "expiryTime": "2020-02-20 00:00:00", - "permission": "str", - "startTime": "2020-02-20 00:00:00", - }, - "id": "str", - } - ], - "snapshotTime": "2020-02-20 00:00:00", + "properties": { + "accessTier": "str", + "accessTierChangeTime": "2020-02-20 00:00:00", + "accessTierStatus": "str", + "deleted": bool, + "deletedTime": "2020-02-20 00:00:00", + "enabledProtocols": "str", + "fileSharePaidBursting": { + "paidBurstingEnabled": bool, + "paidBurstingMaxBandwidthMibps": 0, + "paidBurstingMaxIops": 0, + }, + "includedBurstIops": 0, + "lastModifiedTime": "2020-02-20 00:00:00", + "leaseDuration": "str", + "leaseState": "str", + "leaseStatus": "str", + "maxBurstCreditsForIops": 0, + "metadata": {"str": "str"}, + "nextAllowedProvisionedBandwidthDowngradeTime": "2020-02-20 00:00:00", + "nextAllowedProvisionedIopsDowngradeTime": "2020-02-20 00:00:00", + "nextAllowedQuotaDowngradeTime": "2020-02-20 00:00:00", + "provisionedBandwidthMibps": 0, + "provisionedIops": 0, + "remainingRetentionDays": 0, + "rootSquash": "str", + "shareQuota": 0, + "shareUsageBytes": 0, + "signedIdentifiers": [ + { + "accessPolicy": { + "expiryTime": "2020-02-20 00:00:00", + "permission": "str", + "startTime": "2020-02-20 00:00:00", + }, + "id": "str", + } + ], + "snapshotTime": "2020-02-20 00:00:00", + "version": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", - "version": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -96,51 +105,60 @@ def test_file_shares_update(self, resource_group): account_name="str", share_name="str", file_share={ - "accessTier": "str", - "accessTierChangeTime": "2020-02-20 00:00:00", - "accessTierStatus": "str", - "deleted": bool, - "deletedTime": "2020-02-20 00:00:00", - "enabledProtocols": "str", "etag": "str", - "fileSharePaidBursting": { - "paidBurstingEnabled": bool, - "paidBurstingMaxBandwidthMibps": 0, - "paidBurstingMaxIops": 0, - }, "id": "str", - "includedBurstIops": 0, - "lastModifiedTime": "2020-02-20 00:00:00", - "leaseDuration": "str", - "leaseState": "str", - "leaseStatus": "str", - "maxBurstCreditsForIops": 0, - "metadata": {"str": "str"}, "name": "str", - "nextAllowedProvisionedBandwidthDowngradeTime": "2020-02-20 00:00:00", - "nextAllowedProvisionedIopsDowngradeTime": "2020-02-20 00:00:00", - "nextAllowedQuotaDowngradeTime": "2020-02-20 00:00:00", - "provisionedBandwidthMibps": 0, - "provisionedIops": 0, - "remainingRetentionDays": 0, - "rootSquash": "str", - "shareQuota": 0, - "shareUsageBytes": 0, - "signedIdentifiers": [ - { - "accessPolicy": { - "expiryTime": "2020-02-20 00:00:00", - "permission": "str", - "startTime": "2020-02-20 00:00:00", - }, - "id": "str", - } - ], - "snapshotTime": "2020-02-20 00:00:00", + "properties": { + "accessTier": "str", + "accessTierChangeTime": "2020-02-20 00:00:00", + "accessTierStatus": "str", + "deleted": bool, + "deletedTime": "2020-02-20 00:00:00", + "enabledProtocols": "str", + "fileSharePaidBursting": { + "paidBurstingEnabled": bool, + "paidBurstingMaxBandwidthMibps": 0, + "paidBurstingMaxIops": 0, + }, + "includedBurstIops": 0, + "lastModifiedTime": "2020-02-20 00:00:00", + "leaseDuration": "str", + "leaseState": "str", + "leaseStatus": "str", + "maxBurstCreditsForIops": 0, + "metadata": {"str": "str"}, + "nextAllowedProvisionedBandwidthDowngradeTime": "2020-02-20 00:00:00", + "nextAllowedProvisionedIopsDowngradeTime": "2020-02-20 00:00:00", + "nextAllowedQuotaDowngradeTime": "2020-02-20 00:00:00", + "provisionedBandwidthMibps": 0, + "provisionedIops": 0, + "remainingRetentionDays": 0, + "rootSquash": "str", + "shareQuota": 0, + "shareUsageBytes": 0, + "signedIdentifiers": [ + { + "accessPolicy": { + "expiryTime": "2020-02-20 00:00:00", + "permission": "str", + "startTime": "2020-02-20 00:00:00", + }, + "id": "str", + } + ], + "snapshotTime": "2020-02-20 00:00:00", + "version": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", - "version": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -148,12 +166,11 @@ def test_file_shares_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_file_shares_get(self, resource_group): - response = self.client.file_shares.get( + def test_file_shares_delete(self, resource_group): + response = self.client.file_shares.delete( resource_group_name=resource_group.name, account_name="str", share_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -161,12 +178,12 @@ def test_file_shares_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_file_shares_delete(self, resource_group): - response = self.client.file_shares.delete( + def test_file_shares_restore(self, resource_group): + response = self.client.file_shares.restore( resource_group_name=resource_group.name, account_name="str", share_name="str", - api_version="2025-06-01", + deleted_share={"deletedShareName": "str", "deletedShareVersion": "str"}, ) # please add some check logic here by yourself @@ -174,13 +191,11 @@ def test_file_shares_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_file_shares_restore(self, resource_group): - response = self.client.file_shares.restore( + def test_file_shares_lease(self, resource_group): + response = self.client.file_shares.lease( resource_group_name=resource_group.name, account_name="str", share_name="str", - deleted_share={"deletedShareName": "str", "deletedShareVersion": "str"}, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -188,13 +203,11 @@ def test_file_shares_restore(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_file_shares_lease(self, resource_group): - response = self.client.file_shares.lease( + def test_file_shares_list(self, resource_group): + response = self.client.file_shares.list( resource_group_name=resource_group.name, account_name="str", - share_name="str", - api_version="2025-06-01", ) - + result = [r for r in response] # please add some check logic here by yourself # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_file_shares_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_file_shares_operations_async.py index 9a8c9be84acc..d253b6cef29e 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_file_shares_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_file_shares_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -21,13 +21,13 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_file_shares_list(self, resource_group): - response = self.client.file_shares.list( + async def test_file_shares_get(self, resource_group): + response = await self.client.file_shares.get( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", + share_name="str", ) - result = [r async for r in response] + # please add some check logic here by yourself # ... @@ -39,51 +39,60 @@ async def test_file_shares_create(self, resource_group): account_name="str", share_name="str", file_share={ - "accessTier": "str", - "accessTierChangeTime": "2020-02-20 00:00:00", - "accessTierStatus": "str", - "deleted": bool, - "deletedTime": "2020-02-20 00:00:00", - "enabledProtocols": "str", "etag": "str", - "fileSharePaidBursting": { - "paidBurstingEnabled": bool, - "paidBurstingMaxBandwidthMibps": 0, - "paidBurstingMaxIops": 0, - }, "id": "str", - "includedBurstIops": 0, - "lastModifiedTime": "2020-02-20 00:00:00", - "leaseDuration": "str", - "leaseState": "str", - "leaseStatus": "str", - "maxBurstCreditsForIops": 0, - "metadata": {"str": "str"}, "name": "str", - "nextAllowedProvisionedBandwidthDowngradeTime": "2020-02-20 00:00:00", - "nextAllowedProvisionedIopsDowngradeTime": "2020-02-20 00:00:00", - "nextAllowedQuotaDowngradeTime": "2020-02-20 00:00:00", - "provisionedBandwidthMibps": 0, - "provisionedIops": 0, - "remainingRetentionDays": 0, - "rootSquash": "str", - "shareQuota": 0, - "shareUsageBytes": 0, - "signedIdentifiers": [ - { - "accessPolicy": { - "expiryTime": "2020-02-20 00:00:00", - "permission": "str", - "startTime": "2020-02-20 00:00:00", - }, - "id": "str", - } - ], - "snapshotTime": "2020-02-20 00:00:00", + "properties": { + "accessTier": "str", + "accessTierChangeTime": "2020-02-20 00:00:00", + "accessTierStatus": "str", + "deleted": bool, + "deletedTime": "2020-02-20 00:00:00", + "enabledProtocols": "str", + "fileSharePaidBursting": { + "paidBurstingEnabled": bool, + "paidBurstingMaxBandwidthMibps": 0, + "paidBurstingMaxIops": 0, + }, + "includedBurstIops": 0, + "lastModifiedTime": "2020-02-20 00:00:00", + "leaseDuration": "str", + "leaseState": "str", + "leaseStatus": "str", + "maxBurstCreditsForIops": 0, + "metadata": {"str": "str"}, + "nextAllowedProvisionedBandwidthDowngradeTime": "2020-02-20 00:00:00", + "nextAllowedProvisionedIopsDowngradeTime": "2020-02-20 00:00:00", + "nextAllowedQuotaDowngradeTime": "2020-02-20 00:00:00", + "provisionedBandwidthMibps": 0, + "provisionedIops": 0, + "remainingRetentionDays": 0, + "rootSquash": "str", + "shareQuota": 0, + "shareUsageBytes": 0, + "signedIdentifiers": [ + { + "accessPolicy": { + "expiryTime": "2020-02-20 00:00:00", + "permission": "str", + "startTime": "2020-02-20 00:00:00", + }, + "id": "str", + } + ], + "snapshotTime": "2020-02-20 00:00:00", + "version": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", - "version": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -97,51 +106,60 @@ async def test_file_shares_update(self, resource_group): account_name="str", share_name="str", file_share={ - "accessTier": "str", - "accessTierChangeTime": "2020-02-20 00:00:00", - "accessTierStatus": "str", - "deleted": bool, - "deletedTime": "2020-02-20 00:00:00", - "enabledProtocols": "str", "etag": "str", - "fileSharePaidBursting": { - "paidBurstingEnabled": bool, - "paidBurstingMaxBandwidthMibps": 0, - "paidBurstingMaxIops": 0, - }, "id": "str", - "includedBurstIops": 0, - "lastModifiedTime": "2020-02-20 00:00:00", - "leaseDuration": "str", - "leaseState": "str", - "leaseStatus": "str", - "maxBurstCreditsForIops": 0, - "metadata": {"str": "str"}, "name": "str", - "nextAllowedProvisionedBandwidthDowngradeTime": "2020-02-20 00:00:00", - "nextAllowedProvisionedIopsDowngradeTime": "2020-02-20 00:00:00", - "nextAllowedQuotaDowngradeTime": "2020-02-20 00:00:00", - "provisionedBandwidthMibps": 0, - "provisionedIops": 0, - "remainingRetentionDays": 0, - "rootSquash": "str", - "shareQuota": 0, - "shareUsageBytes": 0, - "signedIdentifiers": [ - { - "accessPolicy": { - "expiryTime": "2020-02-20 00:00:00", - "permission": "str", - "startTime": "2020-02-20 00:00:00", - }, - "id": "str", - } - ], - "snapshotTime": "2020-02-20 00:00:00", + "properties": { + "accessTier": "str", + "accessTierChangeTime": "2020-02-20 00:00:00", + "accessTierStatus": "str", + "deleted": bool, + "deletedTime": "2020-02-20 00:00:00", + "enabledProtocols": "str", + "fileSharePaidBursting": { + "paidBurstingEnabled": bool, + "paidBurstingMaxBandwidthMibps": 0, + "paidBurstingMaxIops": 0, + }, + "includedBurstIops": 0, + "lastModifiedTime": "2020-02-20 00:00:00", + "leaseDuration": "str", + "leaseState": "str", + "leaseStatus": "str", + "maxBurstCreditsForIops": 0, + "metadata": {"str": "str"}, + "nextAllowedProvisionedBandwidthDowngradeTime": "2020-02-20 00:00:00", + "nextAllowedProvisionedIopsDowngradeTime": "2020-02-20 00:00:00", + "nextAllowedQuotaDowngradeTime": "2020-02-20 00:00:00", + "provisionedBandwidthMibps": 0, + "provisionedIops": 0, + "remainingRetentionDays": 0, + "rootSquash": "str", + "shareQuota": 0, + "shareUsageBytes": 0, + "signedIdentifiers": [ + { + "accessPolicy": { + "expiryTime": "2020-02-20 00:00:00", + "permission": "str", + "startTime": "2020-02-20 00:00:00", + }, + "id": "str", + } + ], + "snapshotTime": "2020-02-20 00:00:00", + "version": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", - "version": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -149,12 +167,11 @@ async def test_file_shares_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_file_shares_get(self, resource_group): - response = await self.client.file_shares.get( + async def test_file_shares_delete(self, resource_group): + response = await self.client.file_shares.delete( resource_group_name=resource_group.name, account_name="str", share_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -162,12 +179,12 @@ async def test_file_shares_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_file_shares_delete(self, resource_group): - response = await self.client.file_shares.delete( + async def test_file_shares_restore(self, resource_group): + response = await self.client.file_shares.restore( resource_group_name=resource_group.name, account_name="str", share_name="str", - api_version="2025-06-01", + deleted_share={"deletedShareName": "str", "deletedShareVersion": "str"}, ) # please add some check logic here by yourself @@ -175,13 +192,11 @@ async def test_file_shares_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_file_shares_restore(self, resource_group): - response = await self.client.file_shares.restore( + async def test_file_shares_lease(self, resource_group): + response = await self.client.file_shares.lease( resource_group_name=resource_group.name, account_name="str", share_name="str", - deleted_share={"deletedShareName": "str", "deletedShareVersion": "str"}, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -189,13 +204,11 @@ async def test_file_shares_restore(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_file_shares_lease(self, resource_group): - response = await self.client.file_shares.lease( + async def test_file_shares_list(self, resource_group): + response = self.client.file_shares.list( resource_group_name=resource_group.name, account_name="str", - share_name="str", - api_version="2025-06-01", ) - + result = [r async for r in response] # please add some check logic here by yourself # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_local_users_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_local_users_operations.py index 75e833b49edd..9ce3ca023985 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_local_users_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_local_users_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -18,18 +18,6 @@ class TestStorageManagementLocalUsersOperations(AzureMgmtRecordedTestCase): def setup_method(self, method): self.client = self.create_mgmt_client(StorageManagementClient) - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy - def test_local_users_list(self, resource_group): - response = self.client.local_users.list( - resource_group_name=resource_group.name, - account_name="str", - api_version="2025-06-01", - ) - result = [r for r in response] - # please add some check logic here by yourself - # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy def test_local_users_get(self, resource_group): @@ -37,7 +25,6 @@ def test_local_users_get(self, resource_group): resource_group_name=resource_group.name, account_name="str", username="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -51,19 +38,22 @@ def test_local_users_create_or_update(self, resource_group): account_name="str", username="str", properties={ - "allowAclAuthorization": bool, - "extendedGroups": [0], - "groupId": 0, - "hasSharedKey": bool, - "hasSshKey": bool, - "hasSshPassword": bool, - "homeDirectory": "str", "id": "str", - "isNFSv3Enabled": bool, "name": "str", - "permissionScopes": [{"permissions": "str", "resourceName": "str", "service": "str"}], - "sid": "str", - "sshAuthorizedKeys": [{"description": "str", "key": "str"}], + "properties": { + "allowAclAuthorization": bool, + "extendedGroups": [0], + "groupId": 0, + "hasSharedKey": bool, + "hasSshKey": bool, + "hasSshPassword": bool, + "homeDirectory": "str", + "isNFSv3Enabled": bool, + "permissionScopes": [{"permissions": "str", "resourceName": "str", "service": "str"}], + "sid": "str", + "sshAuthorizedKeys": [{"description": "str", "key": "str"}], + "userId": 0, + }, "systemData": { "createdAt": "2020-02-20 00:00:00", "createdBy": "str", @@ -73,9 +63,7 @@ def test_local_users_create_or_update(self, resource_group): "lastModifiedByType": "str", }, "type": "str", - "userId": 0, }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -88,12 +76,22 @@ def test_local_users_delete(self, resource_group): resource_group_name=resource_group.name, account_name="str", username="str", - api_version="2025-06-01", ) # please add some check logic here by yourself # ... + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_local_users_list(self, resource_group): + response = self.client.local_users.list( + resource_group_name=resource_group.name, + account_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy def test_local_users_list_keys(self, resource_group): @@ -101,7 +99,6 @@ def test_local_users_list_keys(self, resource_group): resource_group_name=resource_group.name, account_name="str", username="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -114,7 +111,6 @@ def test_local_users_regenerate_password(self, resource_group): resource_group_name=resource_group.name, account_name="str", username="str", - api_version="2025-06-01", ) # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_local_users_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_local_users_operations_async.py index a581ed476d3b..7b5b3b659d95 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_local_users_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_local_users_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -19,18 +19,6 @@ class TestStorageManagementLocalUsersOperationsAsync(AzureMgmtRecordedTestCase): def setup_method(self, method): self.client = self.create_mgmt_client(StorageManagementClient, is_async=True) - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy_async - async def test_local_users_list(self, resource_group): - response = self.client.local_users.list( - resource_group_name=resource_group.name, - account_name="str", - api_version="2025-06-01", - ) - result = [r async for r in response] - # please add some check logic here by yourself - # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async async def test_local_users_get(self, resource_group): @@ -38,7 +26,6 @@ async def test_local_users_get(self, resource_group): resource_group_name=resource_group.name, account_name="str", username="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -52,19 +39,22 @@ async def test_local_users_create_or_update(self, resource_group): account_name="str", username="str", properties={ - "allowAclAuthorization": bool, - "extendedGroups": [0], - "groupId": 0, - "hasSharedKey": bool, - "hasSshKey": bool, - "hasSshPassword": bool, - "homeDirectory": "str", "id": "str", - "isNFSv3Enabled": bool, "name": "str", - "permissionScopes": [{"permissions": "str", "resourceName": "str", "service": "str"}], - "sid": "str", - "sshAuthorizedKeys": [{"description": "str", "key": "str"}], + "properties": { + "allowAclAuthorization": bool, + "extendedGroups": [0], + "groupId": 0, + "hasSharedKey": bool, + "hasSshKey": bool, + "hasSshPassword": bool, + "homeDirectory": "str", + "isNFSv3Enabled": bool, + "permissionScopes": [{"permissions": "str", "resourceName": "str", "service": "str"}], + "sid": "str", + "sshAuthorizedKeys": [{"description": "str", "key": "str"}], + "userId": 0, + }, "systemData": { "createdAt": "2020-02-20 00:00:00", "createdBy": "str", @@ -74,9 +64,7 @@ async def test_local_users_create_or_update(self, resource_group): "lastModifiedByType": "str", }, "type": "str", - "userId": 0, }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -89,12 +77,22 @@ async def test_local_users_delete(self, resource_group): resource_group_name=resource_group.name, account_name="str", username="str", - api_version="2025-06-01", ) # please add some check logic here by yourself # ... + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_local_users_list(self, resource_group): + response = self.client.local_users.list( + resource_group_name=resource_group.name, + account_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async async def test_local_users_list_keys(self, resource_group): @@ -102,7 +100,6 @@ async def test_local_users_list_keys(self, resource_group): resource_group_name=resource_group.name, account_name="str", username="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -115,7 +112,6 @@ async def test_local_users_regenerate_password(self, resource_group): resource_group_name=resource_group.name, account_name="str", username="str", - api_version="2025-06-01", ) # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_management_policies_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_management_policies_operations.py index 6f4881817a29..51363c7372bb 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_management_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_management_policies_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -25,7 +25,6 @@ def test_management_policies_get(self, resource_group): resource_group_name=resource_group.name, account_name="str", management_policy_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -40,106 +39,115 @@ def test_management_policies_create_or_update(self, resource_group): management_policy_name="str", properties={ "id": "str", - "lastModifiedTime": "2020-02-20 00:00:00", "name": "str", - "policy": { - "rules": [ - { - "definition": { - "actions": { - "baseBlob": { - "delete": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastAccessTimeGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - "daysAfterModificationGreaterThan": 0.0, + "properties": { + "policy": { + "rules": [ + { + "definition": { + "actions": { + "baseBlob": { + "delete": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastAccessTimeGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + "daysAfterModificationGreaterThan": 0.0, + }, + "enableAutoTierToHotFromCool": bool, + "tierToArchive": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastAccessTimeGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + "daysAfterModificationGreaterThan": 0.0, + }, + "tierToCold": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastAccessTimeGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + "daysAfterModificationGreaterThan": 0.0, + }, + "tierToCool": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastAccessTimeGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + "daysAfterModificationGreaterThan": 0.0, + }, + "tierToHot": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastAccessTimeGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + "daysAfterModificationGreaterThan": 0.0, + }, }, - "enableAutoTierToHotFromCool": bool, - "tierToArchive": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastAccessTimeGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - "daysAfterModificationGreaterThan": 0.0, + "snapshot": { + "delete": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, + "tierToArchive": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, + "tierToCold": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, + "tierToCool": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, + "tierToHot": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, }, - "tierToCold": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastAccessTimeGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - "daysAfterModificationGreaterThan": 0.0, - }, - "tierToCool": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastAccessTimeGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - "daysAfterModificationGreaterThan": 0.0, - }, - "tierToHot": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastAccessTimeGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - "daysAfterModificationGreaterThan": 0.0, + "version": { + "delete": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, + "tierToArchive": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, + "tierToCold": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, + "tierToCool": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, + "tierToHot": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, }, }, - "snapshot": { - "delete": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - "tierToArchive": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - "tierToCold": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - "tierToCool": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - "tierToHot": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, + "filters": { + "blobTypes": ["str"], + "blobIndexMatch": [{"name": "str", "op": "str", "value": "str"}], + "prefixMatch": ["str"], }, - "version": { - "delete": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - "tierToArchive": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - "tierToCold": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - "tierToCool": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - "tierToHot": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - }, - }, - "filters": { - "blobTypes": ["str"], - "blobIndexMatch": [{"name": "str", "op": "str", "value": "str"}], - "prefixMatch": ["str"], }, - }, - "name": "str", - "type": "str", - "enabled": bool, - } - ] + "name": "str", + "type": "str", + "enabled": bool, + } + ] + }, + "lastModifiedTime": "2020-02-20 00:00:00", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", }, "type": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -152,7 +160,6 @@ def test_management_policies_delete(self, resource_group): resource_group_name=resource_group.name, account_name="str", management_policy_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_management_policies_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_management_policies_operations_async.py index 62d6f53afec0..9bd0cfaa3b2e 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_management_policies_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_management_policies_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -26,7 +26,6 @@ async def test_management_policies_get(self, resource_group): resource_group_name=resource_group.name, account_name="str", management_policy_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -41,106 +40,115 @@ async def test_management_policies_create_or_update(self, resource_group): management_policy_name="str", properties={ "id": "str", - "lastModifiedTime": "2020-02-20 00:00:00", "name": "str", - "policy": { - "rules": [ - { - "definition": { - "actions": { - "baseBlob": { - "delete": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastAccessTimeGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - "daysAfterModificationGreaterThan": 0.0, + "properties": { + "policy": { + "rules": [ + { + "definition": { + "actions": { + "baseBlob": { + "delete": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastAccessTimeGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + "daysAfterModificationGreaterThan": 0.0, + }, + "enableAutoTierToHotFromCool": bool, + "tierToArchive": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastAccessTimeGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + "daysAfterModificationGreaterThan": 0.0, + }, + "tierToCold": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastAccessTimeGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + "daysAfterModificationGreaterThan": 0.0, + }, + "tierToCool": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastAccessTimeGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + "daysAfterModificationGreaterThan": 0.0, + }, + "tierToHot": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastAccessTimeGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + "daysAfterModificationGreaterThan": 0.0, + }, }, - "enableAutoTierToHotFromCool": bool, - "tierToArchive": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastAccessTimeGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - "daysAfterModificationGreaterThan": 0.0, + "snapshot": { + "delete": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, + "tierToArchive": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, + "tierToCold": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, + "tierToCool": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, + "tierToHot": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, }, - "tierToCold": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastAccessTimeGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - "daysAfterModificationGreaterThan": 0.0, - }, - "tierToCool": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastAccessTimeGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - "daysAfterModificationGreaterThan": 0.0, - }, - "tierToHot": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastAccessTimeGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - "daysAfterModificationGreaterThan": 0.0, + "version": { + "delete": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, + "tierToArchive": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, + "tierToCold": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, + "tierToCool": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, + "tierToHot": { + "daysAfterCreationGreaterThan": 0.0, + "daysAfterLastTierChangeGreaterThan": 0.0, + }, }, }, - "snapshot": { - "delete": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - "tierToArchive": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - "tierToCold": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - "tierToCool": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - "tierToHot": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, + "filters": { + "blobTypes": ["str"], + "blobIndexMatch": [{"name": "str", "op": "str", "value": "str"}], + "prefixMatch": ["str"], }, - "version": { - "delete": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - "tierToArchive": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - "tierToCold": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - "tierToCool": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - "tierToHot": { - "daysAfterCreationGreaterThan": 0.0, - "daysAfterLastTierChangeGreaterThan": 0.0, - }, - }, - }, - "filters": { - "blobTypes": ["str"], - "blobIndexMatch": [{"name": "str", "op": "str", "value": "str"}], - "prefixMatch": ["str"], }, - }, - "name": "str", - "type": "str", - "enabled": bool, - } - ] + "name": "str", + "type": "str", + "enabled": bool, + } + ] + }, + "lastModifiedTime": "2020-02-20 00:00:00", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", }, "type": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -153,7 +161,6 @@ async def test_management_policies_delete(self, resource_group): resource_group_name=resource_group.name, account_name="str", management_policy_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_network_security_perimeter_configurations_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_network_security_perimeter_configurations_operations.py index 394b5b0dd2c5..7e06cff3a326 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_network_security_perimeter_configurations_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_network_security_perimeter_configurations_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -20,26 +20,24 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_network_security_perimeter_configurations_list(self, resource_group): - response = self.client.network_security_perimeter_configurations.list( + def test_network_security_perimeter_configurations_get(self, resource_group): + response = self.client.network_security_perimeter_configurations.get( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", + network_security_perimeter_configuration_name="str", ) - result = [r for r in response] + # please add some check logic here by yourself # ... @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_network_security_perimeter_configurations_get(self, resource_group): - response = self.client.network_security_perimeter_configurations.get( + def test_network_security_perimeter_configurations_list(self, resource_group): + response = self.client.network_security_perimeter_configurations.list( resource_group_name=resource_group.name, account_name="str", - network_security_perimeter_configuration_name="str", - api_version="2025-06-01", ) - + result = [r for r in response] # please add some check logic here by yourself # ... @@ -50,7 +48,6 @@ def test_network_security_perimeter_configurations_begin_reconcile(self, resourc resource_group_name=resource_group.name, account_name="str", network_security_perimeter_configuration_name="str", - api_version="2025-06-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_network_security_perimeter_configurations_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_network_security_perimeter_configurations_operations_async.py index 69627089a1b5..96555b2d57c7 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_network_security_perimeter_configurations_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_network_security_perimeter_configurations_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -21,26 +21,24 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_network_security_perimeter_configurations_list(self, resource_group): - response = self.client.network_security_perimeter_configurations.list( + async def test_network_security_perimeter_configurations_get(self, resource_group): + response = await self.client.network_security_perimeter_configurations.get( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", + network_security_perimeter_configuration_name="str", ) - result = [r async for r in response] + # please add some check logic here by yourself # ... @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_network_security_perimeter_configurations_get(self, resource_group): - response = await self.client.network_security_perimeter_configurations.get( + async def test_network_security_perimeter_configurations_list(self, resource_group): + response = self.client.network_security_perimeter_configurations.list( resource_group_name=resource_group.name, account_name="str", - network_security_perimeter_configuration_name="str", - api_version="2025-06-01", ) - + result = [r async for r in response] # please add some check logic here by yourself # ... @@ -52,7 +50,6 @@ async def test_network_security_perimeter_configurations_begin_reconcile(self, r resource_group_name=resource_group.name, account_name="str", network_security_perimeter_configuration_name="str", - api_version="2025-06-01", ) ).result() # call '.result()' to poll until service return final result diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_object_replication_policies_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_object_replication_policies_operations.py index 1959278bd452..516736510a0c 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_object_replication_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_object_replication_policies_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -18,18 +18,6 @@ class TestStorageManagementObjectReplicationPoliciesOperations(AzureMgmtRecorded def setup_method(self, method): self.client = self.create_mgmt_client(StorageManagementClient) - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy - def test_object_replication_policies_list(self, resource_group): - response = self.client.object_replication_policies.list( - resource_group_name=resource_group.name, - account_name="str", - api_version="2025-06-01", - ) - result = [r for r in response] - # please add some check logic here by yourself - # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy def test_object_replication_policies_get(self, resource_group): @@ -37,7 +25,6 @@ def test_object_replication_policies_get(self, resource_group): resource_group_name=resource_group.name, account_name="str", object_replication_policy_id="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -51,25 +38,35 @@ def test_object_replication_policies_create_or_update(self, resource_group): account_name="str", object_replication_policy_id="str", properties={ - "destinationAccount": "str", - "enabledTime": "2020-02-20 00:00:00", "id": "str", - "metrics": {"enabled": bool}, "name": "str", - "policyId": "str", - "priorityReplication": {"enabled": bool}, - "rules": [ - { - "destinationContainer": "str", - "sourceContainer": "str", - "filters": {"minCreationTime": "str", "prefixMatch": ["str"]}, - "ruleId": "str", - } - ], - "sourceAccount": "str", + "properties": { + "destinationAccount": "str", + "sourceAccount": "str", + "enabledTime": "2020-02-20 00:00:00", + "metrics": {"enabled": bool}, + "policyId": "str", + "priorityReplication": {"enabled": bool}, + "rules": [ + { + "destinationContainer": "str", + "sourceContainer": "str", + "filters": {"minCreationTime": "str", "prefixMatch": ["str"]}, + "ruleId": "str", + } + ], + "tagsReplication": {"enabled": bool}, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -82,8 +79,18 @@ def test_object_replication_policies_delete(self, resource_group): resource_group_name=resource_group.name, account_name="str", object_replication_policy_id="str", - api_version="2025-06-01", ) # please add some check logic here by yourself # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_object_replication_policies_list(self, resource_group): + response = self.client.object_replication_policies.list( + resource_group_name=resource_group.name, + account_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_object_replication_policies_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_object_replication_policies_operations_async.py index 81ecd79c93d4..d3642e0be631 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_object_replication_policies_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_object_replication_policies_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -19,18 +19,6 @@ class TestStorageManagementObjectReplicationPoliciesOperationsAsync(AzureMgmtRec def setup_method(self, method): self.client = self.create_mgmt_client(StorageManagementClient, is_async=True) - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy_async - async def test_object_replication_policies_list(self, resource_group): - response = self.client.object_replication_policies.list( - resource_group_name=resource_group.name, - account_name="str", - api_version="2025-06-01", - ) - result = [r async for r in response] - # please add some check logic here by yourself - # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async async def test_object_replication_policies_get(self, resource_group): @@ -38,7 +26,6 @@ async def test_object_replication_policies_get(self, resource_group): resource_group_name=resource_group.name, account_name="str", object_replication_policy_id="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -52,25 +39,35 @@ async def test_object_replication_policies_create_or_update(self, resource_group account_name="str", object_replication_policy_id="str", properties={ - "destinationAccount": "str", - "enabledTime": "2020-02-20 00:00:00", "id": "str", - "metrics": {"enabled": bool}, "name": "str", - "policyId": "str", - "priorityReplication": {"enabled": bool}, - "rules": [ - { - "destinationContainer": "str", - "sourceContainer": "str", - "filters": {"minCreationTime": "str", "prefixMatch": ["str"]}, - "ruleId": "str", - } - ], - "sourceAccount": "str", + "properties": { + "destinationAccount": "str", + "sourceAccount": "str", + "enabledTime": "2020-02-20 00:00:00", + "metrics": {"enabled": bool}, + "policyId": "str", + "priorityReplication": {"enabled": bool}, + "rules": [ + { + "destinationContainer": "str", + "sourceContainer": "str", + "filters": {"minCreationTime": "str", "prefixMatch": ["str"]}, + "ruleId": "str", + } + ], + "tagsReplication": {"enabled": bool}, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -83,8 +80,18 @@ async def test_object_replication_policies_delete(self, resource_group): resource_group_name=resource_group.name, account_name="str", object_replication_policy_id="str", - api_version="2025-06-01", ) # please add some check logic here by yourself # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_object_replication_policies_list(self, resource_group): + response = self.client.object_replication_policies.list( + resource_group_name=resource_group.name, + account_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_operations.py index cc13c0f83ed7..36982f2f4481 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -21,9 +21,7 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy def test_operations_list(self, resource_group): - response = self.client.operations.list( - api_version="2025-06-01", - ) + response = self.client.operations.list() result = [r for r in response] # please add some check logic here by yourself # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_operations_async.py index 7af82dda8655..fa5773dec83f 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -22,9 +22,7 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async async def test_operations_list(self, resource_group): - response = self.client.operations.list( - api_version="2025-06-01", - ) + response = self.client.operations.list() result = [r async for r in response] # please add some check logic here by yourself # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_private_endpoint_connections_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_private_endpoint_connections_operations.py index 076c0bf1412b..63e85f1fde7f 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_private_endpoint_connections_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_private_endpoint_connections_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -18,18 +18,6 @@ class TestStorageManagementPrivateEndpointConnectionsOperations(AzureMgmtRecorde def setup_method(self, method): self.client = self.create_mgmt_client(StorageManagementClient) - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy - def test_private_endpoint_connections_list(self, resource_group): - response = self.client.private_endpoint_connections.list( - resource_group_name=resource_group.name, - account_name="str", - api_version="2025-06-01", - ) - result = [r for r in response] - # please add some check logic here by yourself - # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy def test_private_endpoint_connections_get(self, resource_group): @@ -37,7 +25,6 @@ def test_private_endpoint_connections_get(self, resource_group): resource_group_name=resource_group.name, account_name="str", private_endpoint_connection_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -53,12 +40,25 @@ def test_private_endpoint_connections_put(self, resource_group): properties={ "id": "str", "name": "str", - "privateEndpoint": {"id": "str"}, - "privateLinkServiceConnectionState": {"actionRequired": "str", "description": "str", "status": "str"}, - "provisioningState": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionRequired": "str", + "description": "str", + "status": "str", + }, + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -71,8 +71,18 @@ def test_private_endpoint_connections_delete(self, resource_group): resource_group_name=resource_group.name, account_name="str", private_endpoint_connection_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_private_endpoint_connections_list(self, resource_group): + response = self.client.private_endpoint_connections.list( + resource_group_name=resource_group.name, + account_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_private_endpoint_connections_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_private_endpoint_connections_operations_async.py index 9d7b9d31f46e..b8b21f7f05d4 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_private_endpoint_connections_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_private_endpoint_connections_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -19,18 +19,6 @@ class TestStorageManagementPrivateEndpointConnectionsOperationsAsync(AzureMgmtRe def setup_method(self, method): self.client = self.create_mgmt_client(StorageManagementClient, is_async=True) - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy_async - async def test_private_endpoint_connections_list(self, resource_group): - response = self.client.private_endpoint_connections.list( - resource_group_name=resource_group.name, - account_name="str", - api_version="2025-06-01", - ) - result = [r async for r in response] - # please add some check logic here by yourself - # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async async def test_private_endpoint_connections_get(self, resource_group): @@ -38,7 +26,6 @@ async def test_private_endpoint_connections_get(self, resource_group): resource_group_name=resource_group.name, account_name="str", private_endpoint_connection_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -54,12 +41,25 @@ async def test_private_endpoint_connections_put(self, resource_group): properties={ "id": "str", "name": "str", - "privateEndpoint": {"id": "str"}, - "privateLinkServiceConnectionState": {"actionRequired": "str", "description": "str", "status": "str"}, - "provisioningState": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionRequired": "str", + "description": "str", + "status": "str", + }, + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -72,8 +72,18 @@ async def test_private_endpoint_connections_delete(self, resource_group): resource_group_name=resource_group.name, account_name="str", private_endpoint_connection_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_private_endpoint_connections_list(self, resource_group): + response = self.client.private_endpoint_connections.list( + resource_group_name=resource_group.name, + account_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_private_link_resources_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_private_link_resources_operations.py index 2a2e749a1fa4..b07cdd0a5276 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_private_link_resources_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_private_link_resources_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -24,7 +24,6 @@ def test_private_link_resources_list_by_storage_account(self, resource_group): response = self.client.private_link_resources.list_by_storage_account( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_private_link_resources_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_private_link_resources_operations_async.py index 81d6fa2d9ef4..5bfbf0300dcf 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_private_link_resources_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_private_link_resources_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -25,7 +25,6 @@ async def test_private_link_resources_list_by_storage_account(self, resource_gro response = await self.client.private_link_resources.list_by_storage_account( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_queue_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_queue_operations.py index acb8012baa2b..0aea8df1362f 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_queue_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_queue_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -20,27 +20,22 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_queue_create(self, resource_group): - response = self.client.queue.create( + def test_queue_list(self, resource_group): + response = self.client.queue.list( resource_group_name=resource_group.name, account_name="str", - queue_name="str", - queue={"approximateMessageCount": 0, "id": "str", "metadata": {"str": "str"}, "name": "str", "type": "str"}, - api_version="2025-06-01", ) - + result = [r for r in response] # please add some check logic here by yourself # ... @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_queue_update(self, resource_group): - response = self.client.queue.update( + def test_queue_get(self, resource_group): + response = self.client.queue.get( resource_group_name=resource_group.name, account_name="str", queue_name="str", - queue={"approximateMessageCount": 0, "id": "str", "metadata": {"str": "str"}, "name": "str", "type": "str"}, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -48,12 +43,25 @@ def test_queue_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_queue_get(self, resource_group): - response = self.client.queue.get( + def test_queue_create(self, resource_group): + response = self.client.queue.create( resource_group_name=resource_group.name, account_name="str", queue_name="str", - api_version="2025-06-01", + queue={ + "id": "str", + "name": "str", + "properties": {"approximateMessageCount": 0, "metadata": {"str": "str"}}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, ) # please add some check logic here by yourself @@ -61,12 +69,25 @@ def test_queue_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_queue_delete(self, resource_group): - response = self.client.queue.delete( + def test_queue_update(self, resource_group): + response = self.client.queue.update( resource_group_name=resource_group.name, account_name="str", queue_name="str", - api_version="2025-06-01", + queue={ + "id": "str", + "name": "str", + "properties": {"approximateMessageCount": 0, "metadata": {"str": "str"}}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, ) # please add some check logic here by yourself @@ -74,12 +95,12 @@ def test_queue_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_queue_list(self, resource_group): - response = self.client.queue.list( + def test_queue_delete(self, resource_group): + response = self.client.queue.delete( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", + queue_name="str", ) - result = [r for r in response] + # please add some check logic here by yourself # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_queue_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_queue_operations_async.py index 307be0662ec8..5a39bb10182c 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_queue_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_queue_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -21,27 +21,22 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_queue_create(self, resource_group): - response = await self.client.queue.create( + async def test_queue_list(self, resource_group): + response = self.client.queue.list( resource_group_name=resource_group.name, account_name="str", - queue_name="str", - queue={"approximateMessageCount": 0, "id": "str", "metadata": {"str": "str"}, "name": "str", "type": "str"}, - api_version="2025-06-01", ) - + result = [r async for r in response] # please add some check logic here by yourself # ... @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_queue_update(self, resource_group): - response = await self.client.queue.update( + async def test_queue_get(self, resource_group): + response = await self.client.queue.get( resource_group_name=resource_group.name, account_name="str", queue_name="str", - queue={"approximateMessageCount": 0, "id": "str", "metadata": {"str": "str"}, "name": "str", "type": "str"}, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -49,12 +44,25 @@ async def test_queue_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_queue_get(self, resource_group): - response = await self.client.queue.get( + async def test_queue_create(self, resource_group): + response = await self.client.queue.create( resource_group_name=resource_group.name, account_name="str", queue_name="str", - api_version="2025-06-01", + queue={ + "id": "str", + "name": "str", + "properties": {"approximateMessageCount": 0, "metadata": {"str": "str"}}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, ) # please add some check logic here by yourself @@ -62,12 +70,25 @@ async def test_queue_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_queue_delete(self, resource_group): - response = await self.client.queue.delete( + async def test_queue_update(self, resource_group): + response = await self.client.queue.update( resource_group_name=resource_group.name, account_name="str", queue_name="str", - api_version="2025-06-01", + queue={ + "id": "str", + "name": "str", + "properties": {"approximateMessageCount": 0, "metadata": {"str": "str"}}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, ) # please add some check logic here by yourself @@ -75,12 +96,12 @@ async def test_queue_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_queue_list(self, resource_group): - response = self.client.queue.list( + async def test_queue_delete(self, resource_group): + response = await self.client.queue.delete( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", + queue_name="str", ) - result = [r async for r in response] + # please add some check logic here by yourself # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_queue_services_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_queue_services_operations.py index b17a58451b40..90bbc1d6f646 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_queue_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_queue_services_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -20,11 +20,10 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_queue_services_list(self, resource_group): - response = self.client.queue_services.list( + def test_queue_services_get_service_properties(self, resource_group): + response = self.client.queue_services.get_service_properties( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -37,23 +36,31 @@ def test_queue_services_set_service_properties(self, resource_group): resource_group_name=resource_group.name, account_name="str", parameters={ - "cors": { - "corsRules": [ - { - "allowedHeaders": ["str"], - "allowedMethods": ["str"], - "allowedOrigins": ["str"], - "exposedHeaders": ["str"], - "maxAgeInSeconds": 0, - } - ] - }, "id": "str", "name": "str", + "properties": { + "cors": { + "corsRules": [ + { + "allowedHeaders": ["str"], + "allowedMethods": ["str"], + "allowedOrigins": ["str"], + "exposedHeaders": ["str"], + "maxAgeInSeconds": 0, + } + ] + } + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", - queue_service_name="default", ) # please add some check logic here by yourself @@ -61,12 +68,10 @@ def test_queue_services_set_service_properties(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_queue_services_get_service_properties(self, resource_group): - response = self.client.queue_services.get_service_properties( + def test_queue_services_list(self, resource_group): + response = self.client.queue_services.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", - queue_service_name="default", ) # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_queue_services_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_queue_services_operations_async.py index 1144d4fdf74d..116cf4404d4b 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_queue_services_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_queue_services_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -21,11 +21,10 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_queue_services_list(self, resource_group): - response = await self.client.queue_services.list( + async def test_queue_services_get_service_properties(self, resource_group): + response = await self.client.queue_services.get_service_properties( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -38,23 +37,31 @@ async def test_queue_services_set_service_properties(self, resource_group): resource_group_name=resource_group.name, account_name="str", parameters={ - "cors": { - "corsRules": [ - { - "allowedHeaders": ["str"], - "allowedMethods": ["str"], - "allowedOrigins": ["str"], - "exposedHeaders": ["str"], - "maxAgeInSeconds": 0, - } - ] - }, "id": "str", "name": "str", + "properties": { + "cors": { + "corsRules": [ + { + "allowedHeaders": ["str"], + "allowedMethods": ["str"], + "allowedOrigins": ["str"], + "exposedHeaders": ["str"], + "maxAgeInSeconds": 0, + } + ] + } + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", - queue_service_name="default", ) # please add some check logic here by yourself @@ -62,12 +69,10 @@ async def test_queue_services_set_service_properties(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_queue_services_get_service_properties(self, resource_group): - response = await self.client.queue_services.get_service_properties( + async def test_queue_services_list(self, resource_group): + response = await self.client.queue_services.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", - queue_service_name="default", ) # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_skus_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_skus_operations.py index ed2f220275eb..7004da315c4a 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_skus_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_skus_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -21,9 +21,7 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy def test_skus_list(self, resource_group): - response = self.client.skus.list( - api_version="2025-06-01", - ) + response = self.client.skus.list() result = [r for r in response] # please add some check logic here by yourself # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_skus_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_skus_operations_async.py index e63b0a1444d6..4d6c538311c5 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_skus_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_skus_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -22,9 +22,7 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async async def test_skus_list(self, resource_group): - response = self.client.skus.list( - api_version="2025-06-01", - ) + response = self.client.skus.list() result = [r async for r in response] # please add some check logic here by yourself # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_accounts_operations.py index 0e0b2553fd96..4f1ecafef6f7 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_accounts_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -23,7 +23,17 @@ def setup_method(self, method): def test_storage_accounts_check_name_availability(self, resource_group): response = self.client.storage_accounts.check_name_availability( account_name={"name": "str", "type": "Microsoft.Storage/storageAccounts"}, - api_version="2025-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_accounts_get_properties(self, resource_group): + response = self.client.storage_accounts.get_properties( + resource_group_name=resource_group.name, + account_name="str", ) # please add some check logic here by yourself @@ -39,123 +49,111 @@ def test_storage_accounts_begin_create(self, resource_group): "kind": "str", "location": "str", "sku": {"name": "str", "tier": "str"}, - "accessTier": "str", - "allowBlobPublicAccess": bool, - "allowCrossTenantReplication": bool, - "allowSharedKeyAccess": bool, - "allowedCopyScope": "str", - "azureFilesIdentityBasedAuthentication": { - "directoryServiceOptions": "str", - "activeDirectoryProperties": { - "accountType": "str", - "azureStorageSid": "str", - "domainGuid": "str", - "domainName": "str", - "domainSid": "str", - "forestName": "str", - "netBiosDomainName": "str", - "samAccountName": "str", - }, - "defaultSharePermission": "str", - "smbOAuthSettings": {"isSmbOAuthEnabled": bool}, - }, - "customDomain": {"name": "str", "useSubDomainName": bool}, - "defaultToOAuthAuthentication": bool, - "dnsEndpointType": "str", - "dualStackEndpointPreference": {"publishIpv6Endpoint": bool}, - "enableExtendedGroups": bool, - "encryption": { - "identity": {"federatedIdentityClientId": "str", "userAssignedIdentity": "str"}, - "keySource": "Microsoft.Storage", - "keyvaultproperties": { - "currentVersionedKeyExpirationTimestamp": "2020-02-20 00:00:00", - "currentVersionedKeyIdentifier": "str", - "keyname": "str", - "keyvaulturi": "str", - "keyversion": "str", - "lastKeyRotationTimestamp": "2020-02-20 00:00:00", - }, - "requireInfrastructureEncryption": bool, - "services": { - "blob": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, - "file": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, - "queue": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, - "table": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, - }, - }, "extendedLocation": {"name": "str", "type": "str"}, - "geoPriorityReplicationStatus": {"isBlobEnabled": bool}, "identity": { "type": "str", "principalId": "str", "tenantId": "str", "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, }, - "immutableStorageWithVersioning": { - "enabled": bool, - "immutabilityPolicy": { - "allowProtectedAppendWrites": bool, - "immutabilityPeriodSinceCreationInDays": 0, - "state": "str", - }, - }, - "isHnsEnabled": bool, - "isLocalUserEnabled": bool, - "isNfsV3Enabled": bool, - "isSftpEnabled": bool, - "keyPolicy": {"keyExpirationPeriodInDays": 0}, - "largeFileSharesState": "str", - "minimumTlsVersion": "str", - "networkAcls": { - "defaultAction": "Allow", - "bypass": "AzureServices", - "ipRules": [{"value": "str", "action": "Allow"}], - "ipv6Rules": [{"value": "str", "action": "Allow"}], - "resourceAccessRules": [{"resourceId": "str", "tenantId": "str"}], - "virtualNetworkRules": [{"id": "str", "action": "Allow", "state": "str"}], - }, "placement": {"zonePlacementPolicy": "str"}, - "publicNetworkAccess": "str", - "routingPreference": { - "publishInternetEndpoints": bool, - "publishMicrosoftEndpoints": bool, - "routingChoice": "str", + "properties": { + "accessTier": "str", + "allowBlobPublicAccess": bool, + "allowCrossTenantReplication": bool, + "allowSharedKeyAccess": bool, + "allowSharedKeyAccessForServices": { + "blob": {"enabled": bool}, + "file": {"enabled": bool}, + "queue": {"enabled": bool}, + "table": {"enabled": bool}, + }, + "allowedCopyScope": "str", + "azureFilesIdentityBasedAuthentication": { + "directoryServiceOptions": "str", + "activeDirectoryProperties": { + "accountType": "str", + "azureStorageSid": "str", + "domainGuid": "str", + "domainName": "str", + "domainSid": "str", + "forestName": "str", + "netBiosDomainName": "str", + "samAccountName": "str", + }, + "defaultSharePermission": "str", + "smbOAuthSettings": {"isSmbOAuthEnabled": bool}, + }, + "customDomain": {"name": "str", "useSubDomainName": bool}, + "dataCollaborationPolicyProperties": { + "allowCrossTenantDataSharing": bool, + "allowStorageConnectors": bool, + "allowStorageDataShares": bool, + }, + "defaultToOAuthAuthentication": bool, + "dnsEndpointType": "str", + "dualStackEndpointPreference": {"publishIpv6Endpoint": bool}, + "enableExtendedGroups": bool, + "encryption": { + "identity": {"federatedIdentityClientId": "str", "userAssignedIdentity": "str"}, + "keySource": "str", + "keyvaultproperties": { + "currentVersionedKeyExpirationTimestamp": "2020-02-20 00:00:00", + "currentVersionedKeyIdentifier": "str", + "keyname": "str", + "keyvaulturi": "str", + "keyversion": "str", + "lastKeyRotationTimestamp": "2020-02-20 00:00:00", + }, + "requireInfrastructureEncryption": bool, + "services": { + "blob": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, + "file": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, + "queue": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, + "table": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, + }, + }, + "geoPriorityReplicationStatus": {"isBlobEnabled": bool}, + "immutableStorageWithVersioning": { + "enabled": bool, + "immutabilityPolicy": { + "allowProtectedAppendWrites": bool, + "immutabilityPeriodSinceCreationInDays": 0, + "state": "str", + }, + }, + "isHnsEnabled": bool, + "isLocalUserEnabled": bool, + "isNfsV3Enabled": bool, + "isSftpEnabled": bool, + "keyPolicy": {"keyExpirationPeriodInDays": 0}, + "largeFileSharesState": "str", + "minimumTlsVersion": "str", + "networkAcls": { + "defaultAction": "str", + "bypass": "str", + "ipRules": [{"value": "str", "action": "Allow"}], + "ipv6Rules": [{"value": "str", "action": "Allow"}], + "resourceAccessRules": [{"resourceId": "str", "tenantId": "str"}], + "virtualNetworkRules": [{"id": "str", "action": "Allow", "state": "str"}], + }, + "publicNetworkAccess": "str", + "routingPreference": { + "publishInternetEndpoints": bool, + "publishMicrosoftEndpoints": bool, + "routingChoice": "str", + }, + "sasPolicy": {"expirationAction": "str", "sasExpirationPeriod": "str"}, + "supportsHttpsTrafficOnly": bool, }, - "sasPolicy": {"expirationAction": "Log", "sasExpirationPeriod": "str"}, - "supportsHttpsTrafficOnly": bool, "tags": {"str": "str"}, "zones": ["str"], }, - api_version="2025-06-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy - def test_storage_accounts_delete(self, resource_group): - response = self.client.storage_accounts.delete( - resource_group_name=resource_group.name, - account_name="str", - api_version="2025-06-01", - ) - - # please add some check logic here by yourself - # ... - - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy - def test_storage_accounts_get_properties(self, resource_group): - response = self.client.storage_accounts.get_properties( - resource_group_name=resource_group.name, - account_name="str", - api_version="2025-06-01", - ) - - # please add some check logic here by yourself - # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy def test_storage_accounts_update(self, resource_group): @@ -163,93 +161,105 @@ def test_storage_accounts_update(self, resource_group): resource_group_name=resource_group.name, account_name="str", parameters={ - "accessTier": "str", - "allowBlobPublicAccess": bool, - "allowCrossTenantReplication": bool, - "allowSharedKeyAccess": bool, - "allowedCopyScope": "str", - "azureFilesIdentityBasedAuthentication": { - "directoryServiceOptions": "str", - "activeDirectoryProperties": { - "accountType": "str", - "azureStorageSid": "str", - "domainGuid": "str", - "domainName": "str", - "domainSid": "str", - "forestName": "str", - "netBiosDomainName": "str", - "samAccountName": "str", - }, - "defaultSharePermission": "str", - "smbOAuthSettings": {"isSmbOAuthEnabled": bool}, - }, - "customDomain": {"name": "str", "useSubDomainName": bool}, - "defaultToOAuthAuthentication": bool, - "dnsEndpointType": "str", - "dualStackEndpointPreference": {"publishIpv6Endpoint": bool}, - "enableExtendedGroups": bool, - "encryption": { - "identity": {"federatedIdentityClientId": "str", "userAssignedIdentity": "str"}, - "keySource": "Microsoft.Storage", - "keyvaultproperties": { - "currentVersionedKeyExpirationTimestamp": "2020-02-20 00:00:00", - "currentVersionedKeyIdentifier": "str", - "keyname": "str", - "keyvaulturi": "str", - "keyversion": "str", - "lastKeyRotationTimestamp": "2020-02-20 00:00:00", - }, - "requireInfrastructureEncryption": bool, - "services": { - "blob": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, - "file": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, - "queue": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, - "table": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, - }, - }, - "geoPriorityReplicationStatus": {"isBlobEnabled": bool}, "identity": { "type": "str", "principalId": "str", "tenantId": "str", "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, }, - "immutableStorageWithVersioning": { - "enabled": bool, - "immutabilityPolicy": { - "allowProtectedAppendWrites": bool, - "immutabilityPeriodSinceCreationInDays": 0, - "state": "str", - }, - }, - "isLocalUserEnabled": bool, - "isSftpEnabled": bool, - "keyPolicy": {"keyExpirationPeriodInDays": 0}, "kind": "str", - "largeFileSharesState": "str", - "minimumTlsVersion": "str", - "networkAcls": { - "defaultAction": "Allow", - "bypass": "AzureServices", - "ipRules": [{"value": "str", "action": "Allow"}], - "ipv6Rules": [{"value": "str", "action": "Allow"}], - "resourceAccessRules": [{"resourceId": "str", "tenantId": "str"}], - "virtualNetworkRules": [{"id": "str", "action": "Allow", "state": "str"}], - }, "placement": {"zonePlacementPolicy": "str"}, - "publicNetworkAccess": "str", - "routingPreference": { - "publishInternetEndpoints": bool, - "publishMicrosoftEndpoints": bool, - "routingChoice": "str", + "properties": { + "accessTier": "str", + "allowBlobPublicAccess": bool, + "allowCrossTenantReplication": bool, + "allowSharedKeyAccess": bool, + "allowSharedKeyAccessForServices": { + "blob": {"enabled": bool}, + "file": {"enabled": bool}, + "queue": {"enabled": bool}, + "table": {"enabled": bool}, + }, + "allowedCopyScope": "str", + "azureFilesIdentityBasedAuthentication": { + "directoryServiceOptions": "str", + "activeDirectoryProperties": { + "accountType": "str", + "azureStorageSid": "str", + "domainGuid": "str", + "domainName": "str", + "domainSid": "str", + "forestName": "str", + "netBiosDomainName": "str", + "samAccountName": "str", + }, + "defaultSharePermission": "str", + "smbOAuthSettings": {"isSmbOAuthEnabled": bool}, + }, + "customDomain": {"name": "str", "useSubDomainName": bool}, + "dataCollaborationPolicyProperties": { + "allowCrossTenantDataSharing": bool, + "allowStorageConnectors": bool, + "allowStorageDataShares": bool, + }, + "defaultToOAuthAuthentication": bool, + "dnsEndpointType": "str", + "dualStackEndpointPreference": {"publishIpv6Endpoint": bool}, + "enableExtendedGroups": bool, + "encryption": { + "identity": {"federatedIdentityClientId": "str", "userAssignedIdentity": "str"}, + "keySource": "str", + "keyvaultproperties": { + "currentVersionedKeyExpirationTimestamp": "2020-02-20 00:00:00", + "currentVersionedKeyIdentifier": "str", + "keyname": "str", + "keyvaulturi": "str", + "keyversion": "str", + "lastKeyRotationTimestamp": "2020-02-20 00:00:00", + }, + "requireInfrastructureEncryption": bool, + "services": { + "blob": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, + "file": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, + "queue": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, + "table": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, + }, + }, + "geoPriorityReplicationStatus": {"isBlobEnabled": bool}, + "immutableStorageWithVersioning": { + "enabled": bool, + "immutabilityPolicy": { + "allowProtectedAppendWrites": bool, + "immutabilityPeriodSinceCreationInDays": 0, + "state": "str", + }, + }, + "isLocalUserEnabled": bool, + "isSftpEnabled": bool, + "keyPolicy": {"keyExpirationPeriodInDays": 0}, + "largeFileSharesState": "str", + "minimumTlsVersion": "str", + "networkAcls": { + "defaultAction": "str", + "bypass": "str", + "ipRules": [{"value": "str", "action": "Allow"}], + "ipv6Rules": [{"value": "str", "action": "Allow"}], + "resourceAccessRules": [{"resourceId": "str", "tenantId": "str"}], + "virtualNetworkRules": [{"id": "str", "action": "Allow", "state": "str"}], + }, + "publicNetworkAccess": "str", + "routingPreference": { + "publishInternetEndpoints": bool, + "publishMicrosoftEndpoints": bool, + "routingChoice": "str", + }, + "sasPolicy": {"expirationAction": "str", "sasExpirationPeriod": "str"}, + "supportsHttpsTrafficOnly": bool, }, - "sasPolicy": {"expirationAction": "Log", "sasExpirationPeriod": "str"}, "sku": {"name": "str", "tier": "str"}, - "supportsHttpsTrafficOnly": bool, "tags": {"str": "str"}, "zones": ["str"], }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -257,11 +267,12 @@ def test_storage_accounts_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_storage_accounts_list(self, resource_group): - response = self.client.storage_accounts.list( - api_version="2025-06-01", + def test_storage_accounts_delete(self, resource_group): + response = self.client.storage_accounts.delete( + resource_group_name=resource_group.name, + account_name="str", ) - result = [r for r in response] + # please add some check logic here by yourself # ... @@ -270,19 +281,25 @@ def test_storage_accounts_list(self, resource_group): def test_storage_accounts_list_by_resource_group(self, resource_group): response = self.client.storage_accounts.list_by_resource_group( resource_group_name=resource_group.name, - api_version="2025-06-01", ) result = [r for r in response] # please add some check logic here by yourself # ... + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_accounts_list(self, resource_group): + response = self.client.storage_accounts.list() + result = [r for r in response] + # please add some check logic here by yourself + # ... + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy def test_storage_accounts_list_keys(self, resource_group): response = self.client.storage_accounts.list_keys( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -295,7 +312,6 @@ def test_storage_accounts_regenerate_key(self, resource_group): resource_group_name=resource_group.name, account_name="str", regenerate_key={"keyName": "str"}, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -317,7 +333,6 @@ def test_storage_accounts_list_account_sas(self, resource_group): "signedProtocol": "str", "signedStart": "2020-02-20 00:00:00", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -349,7 +364,6 @@ def test_storage_accounts_list_service_sas(self, resource_group): "startPk": "str", "startRk": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -361,7 +375,6 @@ def test_storage_accounts_begin_failover(self, resource_group): response = self.client.storage_accounts.begin_failover( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself @@ -374,7 +387,6 @@ def test_storage_accounts_begin_hierarchical_namespace_migration(self, resource_ resource_group_name=resource_group.name, account_name="str", request_type="str", - api_version="2025-06-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself @@ -386,7 +398,6 @@ def test_storage_accounts_begin_abort_hierarchical_namespace_migration(self, res response = self.client.storage_accounts.begin_abort_hierarchical_namespace_migration( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself @@ -399,33 +410,29 @@ def test_storage_accounts_begin_customer_initiated_migration(self, resource_grou resource_group_name=resource_group.name, account_name="str", parameters={ - "targetSkuName": "str", + "properties": { + "targetSkuName": "str", + "migrationFailedDetailedReason": "str", + "migrationFailedReason": "str", + "migrationStatus": "str", + }, "id": "str", - "migrationFailedDetailedReason": "str", - "migrationFailedReason": "str", - "migrationStatus": "str", "name": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy - def test_storage_accounts_get_customer_initiated_migration(self, resource_group): - response = self.client.storage_accounts.get_customer_initiated_migration( - resource_group_name=resource_group.name, - account_name="str", - migration_name="str", - api_version="2025-06-01", - ) - - # please add some check logic here by yourself - # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy def test_storage_accounts_begin_restore_blob_ranges(self, resource_group): @@ -436,7 +443,6 @@ def test_storage_accounts_begin_restore_blob_ranges(self, resource_group): "blobRanges": [{"endRange": "str", "startRange": "str"}], "timeToRestore": "2020-02-20 00:00:00", }, - api_version="2025-06-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself @@ -448,7 +454,18 @@ def test_storage_accounts_revoke_user_delegation_keys(self, resource_group): response = self.client.storage_accounts.revoke_user_delegation_keys( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_accounts_get_customer_initiated_migration(self, resource_group): + response = self.client.storage_accounts.get_customer_initiated_migration( + resource_group_name=resource_group.name, + account_name="str", + migration_name="str", ) # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_accounts_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_accounts_operations_async.py index 8f9d1f38ce27..19ea81f37eb5 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_accounts_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_accounts_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -24,7 +24,17 @@ def setup_method(self, method): async def test_storage_accounts_check_name_availability(self, resource_group): response = await self.client.storage_accounts.check_name_availability( account_name={"name": "str", "type": "Microsoft.Storage/storageAccounts"}, - api_version="2025-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_accounts_get_properties(self, resource_group): + response = await self.client.storage_accounts.get_properties( + resource_group_name=resource_group.name, + account_name="str", ) # please add some check logic here by yourself @@ -41,10 +51,138 @@ async def test_storage_accounts_begin_create(self, resource_group): "kind": "str", "location": "str", "sku": {"name": "str", "tier": "str"}, + "extendedLocation": {"name": "str", "type": "str"}, + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "placement": {"zonePlacementPolicy": "str"}, + "properties": { + "accessTier": "str", + "allowBlobPublicAccess": bool, + "allowCrossTenantReplication": bool, + "allowSharedKeyAccess": bool, + "allowSharedKeyAccessForServices": { + "blob": {"enabled": bool}, + "file": {"enabled": bool}, + "queue": {"enabled": bool}, + "table": {"enabled": bool}, + }, + "allowedCopyScope": "str", + "azureFilesIdentityBasedAuthentication": { + "directoryServiceOptions": "str", + "activeDirectoryProperties": { + "accountType": "str", + "azureStorageSid": "str", + "domainGuid": "str", + "domainName": "str", + "domainSid": "str", + "forestName": "str", + "netBiosDomainName": "str", + "samAccountName": "str", + }, + "defaultSharePermission": "str", + "smbOAuthSettings": {"isSmbOAuthEnabled": bool}, + }, + "customDomain": {"name": "str", "useSubDomainName": bool}, + "dataCollaborationPolicyProperties": { + "allowCrossTenantDataSharing": bool, + "allowStorageConnectors": bool, + "allowStorageDataShares": bool, + }, + "defaultToOAuthAuthentication": bool, + "dnsEndpointType": "str", + "dualStackEndpointPreference": {"publishIpv6Endpoint": bool}, + "enableExtendedGroups": bool, + "encryption": { + "identity": {"federatedIdentityClientId": "str", "userAssignedIdentity": "str"}, + "keySource": "str", + "keyvaultproperties": { + "currentVersionedKeyExpirationTimestamp": "2020-02-20 00:00:00", + "currentVersionedKeyIdentifier": "str", + "keyname": "str", + "keyvaulturi": "str", + "keyversion": "str", + "lastKeyRotationTimestamp": "2020-02-20 00:00:00", + }, + "requireInfrastructureEncryption": bool, + "services": { + "blob": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, + "file": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, + "queue": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, + "table": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, + }, + }, + "geoPriorityReplicationStatus": {"isBlobEnabled": bool}, + "immutableStorageWithVersioning": { + "enabled": bool, + "immutabilityPolicy": { + "allowProtectedAppendWrites": bool, + "immutabilityPeriodSinceCreationInDays": 0, + "state": "str", + }, + }, + "isHnsEnabled": bool, + "isLocalUserEnabled": bool, + "isNfsV3Enabled": bool, + "isSftpEnabled": bool, + "keyPolicy": {"keyExpirationPeriodInDays": 0}, + "largeFileSharesState": "str", + "minimumTlsVersion": "str", + "networkAcls": { + "defaultAction": "str", + "bypass": "str", + "ipRules": [{"value": "str", "action": "Allow"}], + "ipv6Rules": [{"value": "str", "action": "Allow"}], + "resourceAccessRules": [{"resourceId": "str", "tenantId": "str"}], + "virtualNetworkRules": [{"id": "str", "action": "Allow", "state": "str"}], + }, + "publicNetworkAccess": "str", + "routingPreference": { + "publishInternetEndpoints": bool, + "publishMicrosoftEndpoints": bool, + "routingChoice": "str", + }, + "sasPolicy": {"expirationAction": "str", "sasExpirationPeriod": "str"}, + "supportsHttpsTrafficOnly": bool, + }, + "tags": {"str": "str"}, + "zones": ["str"], + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_accounts_update(self, resource_group): + response = await self.client.storage_accounts.update( + resource_group_name=resource_group.name, + account_name="str", + parameters={ + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "kind": "str", + "placement": {"zonePlacementPolicy": "str"}, + "properties": { "accessTier": "str", "allowBlobPublicAccess": bool, "allowCrossTenantReplication": bool, "allowSharedKeyAccess": bool, + "allowSharedKeyAccessForServices": { + "blob": {"enabled": bool}, + "file": {"enabled": bool}, + "queue": {"enabled": bool}, + "table": {"enabled": bool}, + }, "allowedCopyScope": "str", "azureFilesIdentityBasedAuthentication": { "directoryServiceOptions": "str", @@ -62,13 +200,18 @@ async def test_storage_accounts_begin_create(self, resource_group): "smbOAuthSettings": {"isSmbOAuthEnabled": bool}, }, "customDomain": {"name": "str", "useSubDomainName": bool}, + "dataCollaborationPolicyProperties": { + "allowCrossTenantDataSharing": bool, + "allowStorageConnectors": bool, + "allowStorageDataShares": bool, + }, "defaultToOAuthAuthentication": bool, "dnsEndpointType": "str", "dualStackEndpointPreference": {"publishIpv6Endpoint": bool}, "enableExtendedGroups": bool, "encryption": { "identity": {"federatedIdentityClientId": "str", "userAssignedIdentity": "str"}, - "keySource": "Microsoft.Storage", + "keySource": "str", "keyvaultproperties": { "currentVersionedKeyExpirationTimestamp": "2020-02-20 00:00:00", "currentVersionedKeyIdentifier": "str", @@ -85,14 +228,7 @@ async def test_storage_accounts_begin_create(self, resource_group): "table": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, }, }, - "extendedLocation": {"name": "str", "type": "str"}, "geoPriorityReplicationStatus": {"isBlobEnabled": bool}, - "identity": { - "type": "str", - "principalId": "str", - "tenantId": "str", - "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, - }, "immutableStorageWithVersioning": { "enabled": bool, "immutabilityPolicy": { @@ -101,36 +237,33 @@ async def test_storage_accounts_begin_create(self, resource_group): "state": "str", }, }, - "isHnsEnabled": bool, "isLocalUserEnabled": bool, - "isNfsV3Enabled": bool, "isSftpEnabled": bool, "keyPolicy": {"keyExpirationPeriodInDays": 0}, "largeFileSharesState": "str", "minimumTlsVersion": "str", "networkAcls": { - "defaultAction": "Allow", - "bypass": "AzureServices", + "defaultAction": "str", + "bypass": "str", "ipRules": [{"value": "str", "action": "Allow"}], "ipv6Rules": [{"value": "str", "action": "Allow"}], "resourceAccessRules": [{"resourceId": "str", "tenantId": "str"}], "virtualNetworkRules": [{"id": "str", "action": "Allow", "state": "str"}], }, - "placement": {"zonePlacementPolicy": "str"}, "publicNetworkAccess": "str", "routingPreference": { "publishInternetEndpoints": bool, "publishMicrosoftEndpoints": bool, "routingChoice": "str", }, - "sasPolicy": {"expirationAction": "Log", "sasExpirationPeriod": "str"}, + "sasPolicy": {"expirationAction": "str", "sasExpirationPeriod": "str"}, "supportsHttpsTrafficOnly": bool, - "tags": {"str": "str"}, - "zones": ["str"], }, - api_version="2025-06-01", - ) - ).result() # call '.result()' to poll until service return final result + "sku": {"name": "str", "tier": "str"}, + "tags": {"str": "str"}, + "zones": ["str"], + }, + ) # please add some check logic here by yourself # ... @@ -141,7 +274,6 @@ async def test_storage_accounts_delete(self, resource_group): response = await self.client.storage_accounts.delete( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -149,120 +281,9 @@ async def test_storage_accounts_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_storage_accounts_get_properties(self, resource_group): - response = await self.client.storage_accounts.get_properties( - resource_group_name=resource_group.name, - account_name="str", - api_version="2025-06-01", - ) - - # please add some check logic here by yourself - # ... - - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy_async - async def test_storage_accounts_update(self, resource_group): - response = await self.client.storage_accounts.update( + async def test_storage_accounts_list_by_resource_group(self, resource_group): + response = self.client.storage_accounts.list_by_resource_group( resource_group_name=resource_group.name, - account_name="str", - parameters={ - "accessTier": "str", - "allowBlobPublicAccess": bool, - "allowCrossTenantReplication": bool, - "allowSharedKeyAccess": bool, - "allowedCopyScope": "str", - "azureFilesIdentityBasedAuthentication": { - "directoryServiceOptions": "str", - "activeDirectoryProperties": { - "accountType": "str", - "azureStorageSid": "str", - "domainGuid": "str", - "domainName": "str", - "domainSid": "str", - "forestName": "str", - "netBiosDomainName": "str", - "samAccountName": "str", - }, - "defaultSharePermission": "str", - "smbOAuthSettings": {"isSmbOAuthEnabled": bool}, - }, - "customDomain": {"name": "str", "useSubDomainName": bool}, - "defaultToOAuthAuthentication": bool, - "dnsEndpointType": "str", - "dualStackEndpointPreference": {"publishIpv6Endpoint": bool}, - "enableExtendedGroups": bool, - "encryption": { - "identity": {"federatedIdentityClientId": "str", "userAssignedIdentity": "str"}, - "keySource": "Microsoft.Storage", - "keyvaultproperties": { - "currentVersionedKeyExpirationTimestamp": "2020-02-20 00:00:00", - "currentVersionedKeyIdentifier": "str", - "keyname": "str", - "keyvaulturi": "str", - "keyversion": "str", - "lastKeyRotationTimestamp": "2020-02-20 00:00:00", - }, - "requireInfrastructureEncryption": bool, - "services": { - "blob": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, - "file": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, - "queue": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, - "table": {"enabled": bool, "keyType": "str", "lastEnabledTime": "2020-02-20 00:00:00"}, - }, - }, - "geoPriorityReplicationStatus": {"isBlobEnabled": bool}, - "identity": { - "type": "str", - "principalId": "str", - "tenantId": "str", - "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, - }, - "immutableStorageWithVersioning": { - "enabled": bool, - "immutabilityPolicy": { - "allowProtectedAppendWrites": bool, - "immutabilityPeriodSinceCreationInDays": 0, - "state": "str", - }, - }, - "isLocalUserEnabled": bool, - "isSftpEnabled": bool, - "keyPolicy": {"keyExpirationPeriodInDays": 0}, - "kind": "str", - "largeFileSharesState": "str", - "minimumTlsVersion": "str", - "networkAcls": { - "defaultAction": "Allow", - "bypass": "AzureServices", - "ipRules": [{"value": "str", "action": "Allow"}], - "ipv6Rules": [{"value": "str", "action": "Allow"}], - "resourceAccessRules": [{"resourceId": "str", "tenantId": "str"}], - "virtualNetworkRules": [{"id": "str", "action": "Allow", "state": "str"}], - }, - "placement": {"zonePlacementPolicy": "str"}, - "publicNetworkAccess": "str", - "routingPreference": { - "publishInternetEndpoints": bool, - "publishMicrosoftEndpoints": bool, - "routingChoice": "str", - }, - "sasPolicy": {"expirationAction": "Log", "sasExpirationPeriod": "str"}, - "sku": {"name": "str", "tier": "str"}, - "supportsHttpsTrafficOnly": bool, - "tags": {"str": "str"}, - "zones": ["str"], - }, - api_version="2025-06-01", - ) - - # please add some check logic here by yourself - # ... - - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy_async - async def test_storage_accounts_list(self, resource_group): - response = self.client.storage_accounts.list( - api_version="2025-06-01", ) result = [r async for r in response] # please add some check logic here by yourself @@ -270,11 +291,8 @@ async def test_storage_accounts_list(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_storage_accounts_list_by_resource_group(self, resource_group): - response = self.client.storage_accounts.list_by_resource_group( - resource_group_name=resource_group.name, - api_version="2025-06-01", - ) + async def test_storage_accounts_list(self, resource_group): + response = self.client.storage_accounts.list() result = [r async for r in response] # please add some check logic here by yourself # ... @@ -285,7 +303,6 @@ async def test_storage_accounts_list_keys(self, resource_group): response = await self.client.storage_accounts.list_keys( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -298,7 +315,6 @@ async def test_storage_accounts_regenerate_key(self, resource_group): resource_group_name=resource_group.name, account_name="str", regenerate_key={"keyName": "str"}, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -320,7 +336,6 @@ async def test_storage_accounts_list_account_sas(self, resource_group): "signedProtocol": "str", "signedStart": "2020-02-20 00:00:00", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -352,7 +367,6 @@ async def test_storage_accounts_list_service_sas(self, resource_group): "startPk": "str", "startRk": "str", }, - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -365,7 +379,6 @@ async def test_storage_accounts_begin_failover(self, resource_group): await self.client.storage_accounts.begin_failover( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) ).result() # call '.result()' to poll until service return final result @@ -380,7 +393,6 @@ async def test_storage_accounts_begin_hierarchical_namespace_migration(self, res resource_group_name=resource_group.name, account_name="str", request_type="str", - api_version="2025-06-01", ) ).result() # call '.result()' to poll until service return final result @@ -394,7 +406,6 @@ async def test_storage_accounts_begin_abort_hierarchical_namespace_migration(sel await self.client.storage_accounts.begin_abort_hierarchical_namespace_migration( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) ).result() # call '.result()' to poll until service return final result @@ -409,34 +420,30 @@ async def test_storage_accounts_begin_customer_initiated_migration(self, resourc resource_group_name=resource_group.name, account_name="str", parameters={ - "targetSkuName": "str", + "properties": { + "targetSkuName": "str", + "migrationFailedDetailedReason": "str", + "migrationFailedReason": "str", + "migrationStatus": "str", + }, "id": "str", - "migrationFailedDetailedReason": "str", - "migrationFailedReason": "str", - "migrationStatus": "str", "name": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", ) ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy_async - async def test_storage_accounts_get_customer_initiated_migration(self, resource_group): - response = await self.client.storage_accounts.get_customer_initiated_migration( - resource_group_name=resource_group.name, - account_name="str", - migration_name="str", - api_version="2025-06-01", - ) - - # please add some check logic here by yourself - # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async async def test_storage_accounts_begin_restore_blob_ranges(self, resource_group): @@ -448,7 +455,6 @@ async def test_storage_accounts_begin_restore_blob_ranges(self, resource_group): "blobRanges": [{"endRange": "str", "startRange": "str"}], "timeToRestore": "2020-02-20 00:00:00", }, - api_version="2025-06-01", ) ).result() # call '.result()' to poll until service return final result @@ -461,7 +467,18 @@ async def test_storage_accounts_revoke_user_delegation_keys(self, resource_group response = await self.client.storage_accounts.revoke_user_delegation_keys( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_accounts_get_customer_initiated_migration(self, resource_group): + response = await self.client.storage_accounts.get_customer_initiated_migration( + resource_group_name=resource_group.name, + account_name="str", + migration_name="str", ) # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignment_instances_report_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignment_instances_report_operations.py index 9d48f2bb32cb..f2984cb6941e 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignment_instances_report_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignment_instances_report_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -25,7 +25,6 @@ def test_storage_task_assignment_instances_report_list(self, resource_group): resource_group_name=resource_group.name, account_name="str", storage_task_assignment_name="str", - api_version="2025-06-01", ) result = [r for r in response] # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignment_instances_report_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignment_instances_report_operations_async.py index b3beec0dec3f..913c77ead1f3 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignment_instances_report_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignment_instances_report_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -26,7 +26,6 @@ async def test_storage_task_assignment_instances_report_list(self, resource_grou resource_group_name=resource_group.name, account_name="str", storage_task_assignment_name="str", - api_version="2025-06-01", ) result = [r async for r in response] # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignments_instances_report_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignments_instances_report_operations.py index 303df115d614..d13b5fd71dc1 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignments_instances_report_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignments_instances_report_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -24,7 +24,6 @@ def test_storage_task_assignments_instances_report_list(self, resource_group): response = self.client.storage_task_assignments_instances_report.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) result = [r for r in response] # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignments_instances_report_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignments_instances_report_operations_async.py index 724228698758..c4c8a69b3c56 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignments_instances_report_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignments_instances_report_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -25,7 +25,6 @@ async def test_storage_task_assignments_instances_report_list(self, resource_gro response = self.client.storage_task_assignments_instances_report.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) result = [r async for r in response] # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignments_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignments_operations.py index 26200c8d668a..3229f03df33f 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignments_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignments_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -18,6 +18,18 @@ class TestStorageManagementStorageTaskAssignmentsOperations(AzureMgmtRecordedTes def setup_method(self, method): self.client = self.create_mgmt_client(StorageManagementClient) + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_task_assignments_get(self, resource_group): + response = self.client.storage_task_assignments.get( + resource_group_name=resource_group.name, + account_name="str", + storage_task_assignment_name="str", + ) + + # please add some check logic here by yourself + # ... + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy def test_storage_task_assignments_begin_create(self, resource_group): @@ -26,6 +38,8 @@ def test_storage_task_assignments_begin_create(self, resource_group): account_name="str", storage_task_assignment_name="str", parameters={ + "id": "str", + "name": "str", "properties": { "description": "str", "enabled": bool, @@ -62,11 +76,16 @@ def test_storage_task_assignments_begin_create(self, resource_group): "taskVersion": "str", }, }, - "id": "str", - "name": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself @@ -117,25 +136,11 @@ def test_storage_task_assignments_begin_update(self, resource_group): "taskId": "str", } }, - api_version="2025-06-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy - def test_storage_task_assignments_get(self, resource_group): - response = self.client.storage_task_assignments.get( - resource_group_name=resource_group.name, - account_name="str", - storage_task_assignment_name="str", - api_version="2025-06-01", - ) - - # please add some check logic here by yourself - # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy def test_storage_task_assignments_begin_delete(self, resource_group): @@ -143,7 +148,6 @@ def test_storage_task_assignments_begin_delete(self, resource_group): resource_group_name=resource_group.name, account_name="str", storage_task_assignment_name="str", - api_version="2025-06-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself @@ -155,8 +159,19 @@ def test_storage_task_assignments_list(self, resource_group): response = self.client.storage_task_assignments.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) result = [r for r in response] # please add some check logic here by yourself # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_task_assignments_begin_stop_assignment(self, resource_group): + response = self.client.storage_task_assignments.begin_stop_assignment( + resource_group_name=resource_group.name, + account_name="str", + storage_task_assignment_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignments_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignments_operations_async.py index f2650c012785..cace697fdb46 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignments_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_storage_task_assignments_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -19,6 +19,18 @@ class TestStorageManagementStorageTaskAssignmentsOperationsAsync(AzureMgmtRecord def setup_method(self, method): self.client = self.create_mgmt_client(StorageManagementClient, is_async=True) + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_task_assignments_get(self, resource_group): + response = await self.client.storage_task_assignments.get( + resource_group_name=resource_group.name, + account_name="str", + storage_task_assignment_name="str", + ) + + # please add some check logic here by yourself + # ... + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async async def test_storage_task_assignments_begin_create(self, resource_group): @@ -28,6 +40,8 @@ async def test_storage_task_assignments_begin_create(self, resource_group): account_name="str", storage_task_assignment_name="str", parameters={ + "id": "str", + "name": "str", "properties": { "description": "str", "enabled": bool, @@ -64,11 +78,16 @@ async def test_storage_task_assignments_begin_create(self, resource_group): "taskVersion": "str", }, }, - "id": "str", - "name": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", ) ).result() # call '.result()' to poll until service return final result @@ -121,26 +140,12 @@ async def test_storage_task_assignments_begin_update(self, resource_group): "taskId": "str", } }, - api_version="2025-06-01", ) ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy_async - async def test_storage_task_assignments_get(self, resource_group): - response = await self.client.storage_task_assignments.get( - resource_group_name=resource_group.name, - account_name="str", - storage_task_assignment_name="str", - api_version="2025-06-01", - ) - - # please add some check logic here by yourself - # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async async def test_storage_task_assignments_begin_delete(self, resource_group): @@ -149,7 +154,6 @@ async def test_storage_task_assignments_begin_delete(self, resource_group): resource_group_name=resource_group.name, account_name="str", storage_task_assignment_name="str", - api_version="2025-06-01", ) ).result() # call '.result()' to poll until service return final result @@ -162,8 +166,21 @@ async def test_storage_task_assignments_list(self, resource_group): response = self.client.storage_task_assignments.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) result = [r async for r in response] # please add some check logic here by yourself # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_task_assignments_begin_stop_assignment(self, resource_group): + response = await ( + await self.client.storage_task_assignments.begin_stop_assignment( + resource_group_name=resource_group.name, + account_name="str", + storage_task_assignment_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_table_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_table_operations.py index 5ac4bf14d166..18a6b89dd8fa 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_table_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_table_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -20,12 +20,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_table_create(self, resource_group): - response = self.client.table.create( + def test_table_get(self, resource_group): + response = self.client.table.get( resource_group_name=resource_group.name, account_name="str", table_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -33,12 +32,11 @@ def test_table_create(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_table_update(self, resource_group): - response = self.client.table.update( + def test_table_create(self, resource_group): + response = self.client.table.create( resource_group_name=resource_group.name, account_name="str", table_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -46,12 +44,11 @@ def test_table_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_table_get(self, resource_group): - response = self.client.table.get( + def test_table_update(self, resource_group): + response = self.client.table.update( resource_group_name=resource_group.name, account_name="str", table_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -64,7 +61,6 @@ def test_table_delete(self, resource_group): resource_group_name=resource_group.name, account_name="str", table_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -76,7 +72,6 @@ def test_table_list(self, resource_group): response = self.client.table.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) result = [r for r in response] # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_table_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_table_operations_async.py index a5adb0ef355b..28983f06da79 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_table_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_table_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -21,12 +21,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_table_create(self, resource_group): - response = await self.client.table.create( + async def test_table_get(self, resource_group): + response = await self.client.table.get( resource_group_name=resource_group.name, account_name="str", table_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -34,12 +33,11 @@ async def test_table_create(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_table_update(self, resource_group): - response = await self.client.table.update( + async def test_table_create(self, resource_group): + response = await self.client.table.create( resource_group_name=resource_group.name, account_name="str", table_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -47,12 +45,11 @@ async def test_table_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_table_get(self, resource_group): - response = await self.client.table.get( + async def test_table_update(self, resource_group): + response = await self.client.table.update( resource_group_name=resource_group.name, account_name="str", table_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -65,7 +62,6 @@ async def test_table_delete(self, resource_group): resource_group_name=resource_group.name, account_name="str", table_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -77,7 +73,6 @@ async def test_table_list(self, resource_group): response = self.client.table.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) result = [r async for r in response] # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_table_services_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_table_services_operations.py index b096c2c5cefe..b12760a4a715 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_table_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_table_services_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -20,11 +20,10 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_table_services_list(self, resource_group): - response = self.client.table_services.list( + def test_table_services_get_service_properties(self, resource_group): + response = self.client.table_services.get_service_properties( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -37,23 +36,31 @@ def test_table_services_set_service_properties(self, resource_group): resource_group_name=resource_group.name, account_name="str", parameters={ - "cors": { - "corsRules": [ - { - "allowedHeaders": ["str"], - "allowedMethods": ["str"], - "allowedOrigins": ["str"], - "exposedHeaders": ["str"], - "maxAgeInSeconds": 0, - } - ] - }, "id": "str", "name": "str", + "properties": { + "cors": { + "corsRules": [ + { + "allowedHeaders": ["str"], + "allowedMethods": ["str"], + "allowedOrigins": ["str"], + "exposedHeaders": ["str"], + "maxAgeInSeconds": 0, + } + ] + } + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", - table_service_name="default", ) # please add some check logic here by yourself @@ -61,12 +68,10 @@ def test_table_services_set_service_properties(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_table_services_get_service_properties(self, resource_group): - response = self.client.table_services.get_service_properties( + def test_table_services_list(self, resource_group): + response = self.client.table_services.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", - table_service_name="default", ) # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_table_services_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_table_services_operations_async.py index c3613fe3793f..5d1aaa3d2754 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_table_services_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_table_services_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -21,11 +21,10 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_table_services_list(self, resource_group): - response = await self.client.table_services.list( + async def test_table_services_get_service_properties(self, resource_group): + response = await self.client.table_services.get_service_properties( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", ) # please add some check logic here by yourself @@ -38,23 +37,31 @@ async def test_table_services_set_service_properties(self, resource_group): resource_group_name=resource_group.name, account_name="str", parameters={ - "cors": { - "corsRules": [ - { - "allowedHeaders": ["str"], - "allowedMethods": ["str"], - "allowedOrigins": ["str"], - "exposedHeaders": ["str"], - "maxAgeInSeconds": 0, - } - ] - }, "id": "str", "name": "str", + "properties": { + "cors": { + "corsRules": [ + { + "allowedHeaders": ["str"], + "allowedMethods": ["str"], + "allowedOrigins": ["str"], + "exposedHeaders": ["str"], + "maxAgeInSeconds": 0, + } + ] + } + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "type": "str", }, - api_version="2025-06-01", - table_service_name="default", ) # please add some check logic here by yourself @@ -62,12 +69,10 @@ async def test_table_services_set_service_properties(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_table_services_get_service_properties(self, resource_group): - response = await self.client.table_services.get_service_properties( + async def test_table_services_list(self, resource_group): + response = await self.client.table_services.list( resource_group_name=resource_group.name, account_name="str", - api_version="2025-06-01", - table_service_name="default", ) # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_usages_operations.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_usages_operations.py index 50eb32c51372..44d7f45d9973 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_usages_operations.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_usages_operations.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -23,7 +23,6 @@ def setup_method(self, method): def test_usages_list_by_location(self, resource_group): response = self.client.usages.list_by_location( location="str", - api_version="2025-06-01", ) result = [r for r in response] # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_usages_operations_async.py b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_usages_operations_async.py index f4346a07fbfc..a59ccc275c30 100644 --- a/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_usages_operations_async.py +++ b/sdk/storage/azure-mgmt-storage/generated_tests/test_storage_management_usages_operations_async.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest @@ -24,7 +24,6 @@ def setup_method(self, method): async def test_usages_list_by_location(self, resource_group): response = self.client.usages.list_by_location( location="str", - api_version="2025-06-01", ) result = [r async for r in response] # please add some check logic here by yourself diff --git a/sdk/storage/azure-mgmt-storage/tests/disable_test_cli_mgmt_storage.py b/sdk/storage/azure-mgmt-storage/tests/disable_test_cli_mgmt_storage.py index 04a87f989b9d..1dbac679b08d 100644 --- a/sdk/storage/azure-mgmt-storage/tests/disable_test_cli_mgmt_storage.py +++ b/sdk/storage/azure-mgmt-storage/tests/disable_test_cli_mgmt_storage.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/storage/azure-mgmt-storage/tsp-location.yaml b/sdk/storage/azure-mgmt-storage/tsp-location.yaml new file mode 100644 index 000000000000..0a6c7f86ffc5 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/tsp-location.yaml @@ -0,0 +1,4 @@ +directory: specification/storage/Storage.Management +commit: 7368cdcd5be76911942ac8efd7f7abe060879b42 +repo: Azure/azure-rest-api-specs +additionalDirectories: