diff --git a/sdk/batch/azure-batch/samples/batch_automatic_scaling.py b/sdk/batch/azure-batch/samples/batch_automatic_scaling.py new file mode 100644 index 000000000000..f15e33a8455c --- /dev/null +++ b/sdk/batch/azure-batch/samples/batch_automatic_scaling.py @@ -0,0 +1,50 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +"""Snippets extracted from articles/batch/batch-automatic-scaling.md. +""" + +import datetime + +from azure.batch import BatchClient, models + + +def autoscale_create_and_enable(batch_client: BatchClient, pool_id: str) -> None: + # [START autoscale_pool_create_enable_python] + # Create a pool; specify configuration + new_pool = models.BatchPoolCreateOptions( + id=pool_id, + virtual_machine_configuration=models.VirtualMachineConfiguration( + image_reference=models.BatchVmImageReference( + publisher="Canonical", + offer="UbuntuServer", + sku="20.04-LTS", + version="latest", + ), + node_agent_sku_id="batch.node.ubuntu 20.04", + ), + vm_size="STANDARD_D1_v2", + target_dedicated_nodes=0, + target_low_priority_nodes=0, + ) + batch_client.create_pool(pool=new_pool) # Add the pool to the service client + + formula = ( + "$curTime = time();\n" + "$workHours = $curTime.hour >= 8 && $curTime.hour < 18;\n" + "$isWeekday = $curTime.weekday >= 1 && $curTime.weekday <= 5;\n" + "$isWorkingWeekdayHour = $workHours && $isWeekday;\n" + "$TargetDedicated = $isWorkingWeekdayHour ? 20:10;" + ) + + # Enable autoscale; specify the formula + enable_options = models.BatchPoolEnableAutoScaleOptions( + auto_scale_formula=formula, + auto_scale_evaluation_interval=datetime.timedelta(minutes=10), + ) + batch_client.enable_pool_auto_scale(pool_id=pool_id, enable_auto_scale_options=enable_options) + # [END autoscale_pool_create_enable_python] diff --git a/sdk/batch/azure-batch/samples/batch_docker_container_workloads.py b/sdk/batch/azure-batch/samples/batch_docker_container_workloads.py new file mode 100644 index 000000000000..271475a4d0ce --- /dev/null +++ b/sdk/batch/azure-batch/samples/batch_docker_container_workloads.py @@ -0,0 +1,114 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +# Snippets extracted from articles/batch/batch-docker-container-workloads.md (Python only). + + +from azure.batch import models + + +def make_pool_no_prefetch(pool_id: str): + # [START docker_pool_no_prefetch_python] + image_ref_to_use = models.BatchVmImageReference( + publisher='microsoft-dsvm', + offer='ubuntu-hpc', + sku='2204', + version='latest') + + # Specify container configuration. This is required even though there are no prefetched images. + container_conf = models.BatchContainerConfiguration(type='dockerCompatible') + + new_pool = models.BatchPoolCreateOptions( + id=pool_id, + virtual_machine_configuration=models.VirtualMachineConfiguration( + image_reference=image_ref_to_use, + container_configuration=container_conf, + node_agent_sku_id='batch.node.ubuntu 22.04'), + vm_size='STANDARD_D2S_V3', + target_dedicated_nodes=1) + # [END docker_pool_no_prefetch_python] + return new_pool + + +def make_pool_dockerhub_prefetch(pool_id: str): + # [START docker_pool_dockerhub_prefetch_python] + image_ref_to_use = models.BatchVmImageReference( + publisher='microsoft-dsvm', + offer='ubuntu-hpc', + sku='2204', + version='latest') + + # Specify container configuration, fetching the official Ubuntu container image from Docker Hub. + container_conf = models.BatchContainerConfiguration( + type='dockerCompatible', + container_image_names=['ubuntu']) + + new_pool = models.BatchPoolCreateOptions( + id=pool_id, + virtual_machine_configuration=models.VirtualMachineConfiguration( + image_reference=image_ref_to_use, + container_configuration=container_conf, + node_agent_sku_id='batch.node.ubuntu 22.04'), + vm_size='STANDARD_D2S_V3', + target_dedicated_nodes=1) + # [END docker_pool_dockerhub_prefetch_python] + return new_pool + + +def make_pool_acr_prefetch(): + # [START docker_pool_acr_prefetch_python] + image_ref_to_use = models.BatchVmImageReference( + publisher='microsoft-dsvm', + offer='ubuntu-hpc', + sku='2204', + version='latest') + + # Specify a container registry + subscription_id = "yyyy-yyy-yyy-yyy-yyy" + resource_group_name = "TestRG" + user_assigned_identity_name = "testUMI" + resource_id = ( + f"/subscriptions/{subscription_id}/resourceGroups/{resource_group_name}" + f"/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{user_assigned_identity_name}" + ) + + container_registry = models.ContainerRegistryReference( + registry_server="myRegistry.azurecr.io", + identity_reference=models.BatchNodeIdentityReference(resource_id=resource_id)) + + # Create container configuration, prefetching Docker images from the container registry + container_conf = models.BatchContainerConfiguration( + type='dockerCompatible', + container_image_names=["myRegistry.azurecr.io/samples/myImage"], + container_registries=[container_registry]) + + new_pool = models.BatchPoolCreateOptions( + id="myPool", + virtual_machine_configuration=models.VirtualMachineConfiguration( + image_reference=image_ref_to_use, + container_configuration=container_conf, + node_agent_sku_id='batch.node.ubuntu 22.04'), + vm_size='STANDARD_D2S_V3', + target_dedicated_nodes=1) + # [END docker_pool_acr_prefetch_python] + return new_pool + + +def make_container_task(): + # [START docker_container_task_python] + task_id = 'sampletask' + task_container_settings = models.BatchTaskContainerSettings( + image_name='myimage', + container_run_options='--rm --workdir /') + task = models.BatchTaskCreateOptions( + id=task_id, + command_line='/bin/sh -c "echo \'hello world\' > $AZ_BATCH_TASK_WORKING_DIR/output.txt"', + container_settings=task_container_settings + ) + # [END docker_container_task_python] + return task diff --git a/sdk/batch/azure-batch/samples/batch_linux_nodes.py b/sdk/batch/azure-batch/samples/batch_linux_nodes.py new file mode 100644 index 000000000000..07133faac0b1 --- /dev/null +++ b/sdk/batch/azure-batch/samples/batch_linux_nodes.py @@ -0,0 +1,135 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +# Snippets extracted from articles/batch/batch-linux-nodes.md (Python only). + +import datetime +import getpass + +from azure.batch import BatchClient, models +from azure.core.credentials import AzureNamedKeyCredential + + +def create_pool_explicit_image(): + # [START linux_nodes_pool_create_python] + # Specify Batch account credentials + account = "" + key = "" + account_endpoint = "" + + # Pool settings + pool_id = "LinuxNodesSamplePoolPython" + vm_size = "STANDARD_D2_V3" + node_count = 1 + + # Initialize the Batch client + creds = AzureNamedKeyCredential(account, key) + client = BatchClient(endpoint=account_endpoint, credential=creds) + + # Configure the start task for the pool + start_task = models.BatchStartTask( + command_line="printenv AZ_BATCH_NODE_STARTUP_DIR", + user_identity=models.UserIdentity( + auto_user=models.AutoUserSpecification( + elevation_level=models.ElevationLevel.ADMIN, + scope=models.AutoUserScope.POOL, + ) + ), + ) + + # Create an ImageReference which specifies the Marketplace + # virtual machine image to install on the nodes + ir = models.BatchVmImageReference( + publisher="canonical", + offer="0001-com-ubuntu-server-focal", + sku="20_04-lts", + version="latest") + + # Create the VirtualMachineConfiguration, specifying + # the VM image reference and the Batch node agent + # to install on the node + vmc = models.VirtualMachineConfiguration( + image_reference=ir, + node_agent_sku_id="batch.node.ubuntu 20.04") + + # Create the unbound pool + new_pool = models.BatchPoolCreateOptions( + id=pool_id, + vm_size=vm_size, + target_dedicated_nodes=node_count, + virtual_machine_configuration=vmc, + start_task=start_task, + ) + + # Create pool in the Batch service + client.create_pool(pool=new_pool) + # [END linux_nodes_pool_create_python] + return client + + +def vm_config_from_supported_images(client: BatchClient): + # [START linux_nodes_image_reference_python] + # Get the list of supported images from the Batch service + images = list(client.list_supported_images()) + + # Obtain the desired image reference + image = None + for img in images: + if (img.image_reference.publisher and img.image_reference.publisher.lower() == "canonical" and + img.image_reference.offer and img.image_reference.offer.lower() == "0001-com-ubuntu-server-focal" and + img.image_reference.sku and img.image_reference.sku.lower() == "20_04-lts"): + image = img + break + + if image is None: + raise RuntimeError('invalid image reference for desired configuration') + + # Create the VirtualMachineConfiguration, specifying the VM image + # reference and the Batch node agent to be installed on the node + vmc = models.VirtualMachineConfiguration( + image_reference=image.image_reference, + node_agent_sku_id=image.node_agent_sku_id) + # [END linux_nodes_image_reference_python] + return vmc + + +def ssh_create_user_demo_inputs(): + # [START linux_nodes_ssh_user_python] + # Specify your own account credentials + batch_account_name = '' + batch_account_key = '' + batch_account_url = '' + + # Specify the ID of an existing pool containing Linux nodes + # currently in the 'idle' state + pool_id = '' + + # Specify the username and prompt for a password + username = 'linuxuser' + password = getpass.getpass() + + # Create a BatchClient + creds = AzureNamedKeyCredential(batch_account_name, batch_account_key) + batch_client = BatchClient(endpoint=batch_account_url, credential=creds) + + # Create the user that will be added to each node in the pool + expiry = datetime.datetime.utcnow() + datetime.timedelta(days=30) + user = models.BatchNodeUserCreateOptions( + name=username, + password=password, + is_admin=True, + expiry_time=expiry, + ) + + # Get the list of nodes in the pool and add the user to each + nodes = batch_client.list_nodes(pool_id=pool_id) + for node in nodes: + batch_client.create_node_user(pool_id=pool_id, node_id=node.id, user=user) + login_settings = batch_client.get_node_remote_login_settings(pool_id=pool_id, node_id=node.id) + print(f"{node.id}: ssh {username}@{login_settings.remote_login_ip_address} -p {login_settings.remote_login_port}") + # [END linux_nodes_ssh_user_python] diff --git a/sdk/batch/azure-batch/samples/batch_sig_images.py b/sdk/batch/azure-batch/samples/batch_sig_images.py new file mode 100644 index 000000000000..d24dd5b6fb7e --- /dev/null +++ b/sdk/batch/azure-batch/samples/batch_sig_images.py @@ -0,0 +1,69 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +# Snippets extracted from articles/batch/batch-sig-images.md. + +from azure.batch import BatchClient, models +from azure.identity import DefaultAzureCredential + + +def create_sig_pool(): + # [START sig_create_pool_python] + # Specify Batch account credentials + account_endpoint = "https://{batch-account-name}.{region}.batch.azure.com" + + # Pool settings + pool_id = "LinuxNodesSamplePoolPython" + vm_size = "STANDARD_D2_V3" + node_count = 1 + + # Initialize the Batch client with Microsoft Entra ID authentication + client = BatchClient(endpoint=account_endpoint, credential=DefaultAzureCredential()) + + # Configure the start task for the pool + start_task = models.BatchStartTask( + command_line="printenv AZ_BATCH_NODE_STARTUP_DIR", + user_identity=models.UserIdentity( + auto_user=models.AutoUserSpecification( + elevation_level=models.ElevationLevel.ADMIN, + scope=models.AutoUserScope.POOL, + ) + ), + ) + + # Create an image reference that points to an Azure Compute Gallery image. + ir = models.BatchVmImageReference( + virtual_machine_image_id=( + "/subscriptions/{sub id}/resourceGroups/{resource group name}" + "/providers/Microsoft.Compute/galleries/{gallery name}" + "/images/{image definition name}/versions/{version id}" + ) + ) + + # Create the VirtualMachineConfiguration + vmc = models.VirtualMachineConfiguration( + image_reference=ir, + node_agent_sku_id="batch.node.ubuntu 22.04", + ) + + # Create the unbound pool + new_pool = models.BatchPoolCreateOptions( + id=pool_id, + vm_size=vm_size, + target_dedicated_nodes=node_count, + virtual_machine_configuration=vmc, + start_task=start_task, + ) + + # Create pool in the Batch service + client.create_pool(pool=new_pool) + # [END sig_create_pool_python] + + +if __name__ == "__main__": + create_sig_pool() diff --git a/sdk/batch/azure-batch/samples/batch_user_accounts.py b/sdk/batch/azure-batch/samples/batch_user_accounts.py new file mode 100644 index 000000000000..30bfd4eaa418 --- /dev/null +++ b/sdk/batch/azure-batch/samples/batch_user_accounts.py @@ -0,0 +1,49 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +# Snippets extracted from articles/batch/batch-user-accounts.md (Python only). + +from azure.batch import BatchClient, models + + +def add_admin_autouser_task(batch_client: BatchClient, jobid: str) -> None: + # [START user_accounts_admin_autouser_python] + user = models.UserIdentity( + auto_user=models.AutoUserSpecification( + elevation_level=models.ElevationLevel.ADMIN, + scope=models.AutoUserScope.TASK)) + task = models.BatchTaskCreateOptions( + id='task_1', + command_line='cmd /c "echo hello world"', + user_identity=user) + batch_client.create_task(job_id=jobid, task=task) + # [END user_accounts_admin_autouser_python] + + +def make_named_user_pool(pool_id: str, image_ref_to_use, sku_to_use, vm_size: str, vm_count: int): + # [START user_accounts_pool_python] + users = [ + models.UserAccount( + name='pool-admin', + password='', + elevation_level=models.ElevationLevel.ADMIN), + models.UserAccount( + name='pool-nonadmin', + password='', + elevation_level=models.ElevationLevel.NON_ADMIN), + ] + pool = models.BatchPoolCreateOptions( + id=pool_id, + user_accounts=users, + virtual_machine_configuration=models.VirtualMachineConfiguration( + image_reference=image_ref_to_use, + node_agent_sku_id=sku_to_use), + vm_size=vm_size, + target_dedicated_nodes=vm_count) + # [END user_accounts_pool_python] + return pool diff --git a/sdk/batch/azure-batch/samples/create_pool_ephemeral_os_disk.py b/sdk/batch/azure-batch/samples/create_pool_ephemeral_os_disk.py new file mode 100644 index 000000000000..6870f90de11f --- /dev/null +++ b/sdk/batch/azure-batch/samples/create_pool_ephemeral_os_disk.py @@ -0,0 +1,26 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +# Snippets extracted from articles/batch/create-pool-ephemeral-os-disk.md. + +from azure.batch import models + + +def make_vm_configuration(image_ref_to_use, node_sku_id): + # [START ephemeral_os_disk_vm_config] + virtual_machine_configuration = models.VirtualMachineConfiguration( + image_reference=image_ref_to_use, + node_agent_sku_id=node_sku_id, + os_disk=models.BatchOsDisk( + ephemeral_os_disk_settings=models.BatchDiffDiskSettings( + placement=models.DiffDiskPlacement.CACHE_DISK + ) + ) + ) + # [END ephemeral_os_disk_vm_config] + return virtual_machine_configuration diff --git a/sdk/batch/azure-batch/samples/pool_endpoint_configuration.py b/sdk/batch/azure-batch/samples/pool_endpoint_configuration.py new file mode 100644 index 000000000000..cf1477c91b78 --- /dev/null +++ b/sdk/batch/azure-batch/samples/pool_endpoint_configuration.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +# Snippets extracted from articles/batch/pool-endpoint-configuration.md (Python only). + +from azure.batch import models + + +class AzureBatchAllow: + def set_ports_pool(self, pool, **kwargs): + # [START endpoint_config_allow_subnet_python] + pool.network_configuration = models.NetworkConfiguration( + endpoint_configuration=models.BatchPoolEndpointConfiguration( + inbound_nat_pools=[models.BatchInboundNatPool( + name='SSH', + protocol=models.InboundEndpointProtocol.TCP, + backend_port=22, + frontend_port_range_start=4000, + frontend_port_range_end=4100, + network_security_group_rules=[ + models.NetworkSecurityGroupRule( + priority=170, + access=models.NetworkSecurityGroupRuleAccess.ALLOW, + source_address_prefix='192.168.1.0/24' + ), + models.NetworkSecurityGroupRule( + priority=175, + access=models.NetworkSecurityGroupRuleAccess.DENY, + source_address_prefix='*' + ) + ] + ) + ] + ) + ) + # [END endpoint_config_allow_subnet_python] + + +class AzureBatchDeny: + def set_ports_pool(self, pool, **kwargs): + # [START endpoint_config_deny_ssh_python] + pool.network_configuration = models.NetworkConfiguration( + endpoint_configuration=models.BatchPoolEndpointConfiguration( + inbound_nat_pools=[models.BatchInboundNatPool( + name='SSH', + protocol=models.InboundEndpointProtocol.TCP, + backend_port=22, + frontend_port_range_start=4000, + frontend_port_range_end=4100, + network_security_group_rules=[ + models.NetworkSecurityGroupRule( + priority=170, + access=models.NetworkSecurityGroupRuleAccess.DENY, + source_address_prefix='Internet' + ) + ] + ) + ] + ) + ) + # [END endpoint_config_deny_ssh_python] diff --git a/sdk/batch/azure-batch/samples/quick_run_python.py b/sdk/batch/azure-batch/samples/quick_run_python.py new file mode 100644 index 000000000000..922163e4855d --- /dev/null +++ b/sdk/batch/azure-batch/samples/quick_run_python.py @@ -0,0 +1,118 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +# Snippets extracted from articles/batch/quick-run-python.md. + +import os +import sys + +from azure.batch import BatchClient, models +from azure.core.credentials import AzureNamedKeyCredential +from azure.storage.blob import BlobServiceClient + + +def make_blob_service_client(config): + # [START quickrun_python_blob_client] + blob_service_client = BlobServiceClient( + account_url=f"https://{config.STORAGE_ACCOUNT_NAME}.{config.STORAGE_ACCOUNT_DOMAIN}/", + credential=config.STORAGE_ACCOUNT_KEY + ) + # [END quickrun_python_blob_client] + return blob_service_client + + +def upload_inputs(blob_service_client, input_container_name, upload_file_to_container): + # [START quickrun_python_upload_inputs] + input_file_paths = [os.path.join(sys.path[0], 'taskdata0.txt'), + os.path.join(sys.path[0], 'taskdata1.txt'), + os.path.join(sys.path[0], 'taskdata2.txt')] + + input_files = [ + upload_file_to_container(blob_service_client, input_container_name, file_path) + for file_path in input_file_paths] + # [END quickrun_python_upload_inputs] + return input_files + + +def make_batch_client(config): + # [START quickrun_python_batch_client] + credentials = AzureNamedKeyCredential(config.BATCH_ACCOUNT_NAME, + config.BATCH_ACCOUNT_KEY) + + batch_client = BatchClient( + endpoint=config.BATCH_ACCOUNT_URL, + credential=credentials) + # [END quickrun_python_batch_client] + return batch_client + + +def create_pool(batch_client: BatchClient, pool_id: str, config): + # [START quickrun_python_create_pool] + new_pool = models.BatchPoolCreateOptions( + id=pool_id, + virtual_machine_configuration=models.VirtualMachineConfiguration( + image_reference=models.BatchVmImageReference( + publisher="canonical", + offer="0001-com-ubuntu-server-focal", + sku="22_04-lts", + version="latest" + ), + node_agent_sku_id="batch.node.ubuntu 22.04"), + vm_size=config.POOL_VM_SIZE, + target_dedicated_nodes=config.POOL_NODE_COUNT + ) + batch_client.create_pool(pool=new_pool) + # [END quickrun_python_create_pool] + + +def create_job(batch_client: BatchClient, job_id: str, pool_id: str): + # [START quickrun_python_create_job] + job = models.BatchJobCreateOptions( + id=job_id, + pool_info=models.BatchPoolInfo(pool_id=pool_id)) + + batch_client.create_job(job=job) + # [END quickrun_python_create_job] + + +def add_tasks(batch_client: BatchClient, job_id: str, resource_input_files): + # [START quickrun_python_add_tasks] + tasks = [] + + for idx, input_file in enumerate(resource_input_files): + command = f"/bin/bash -c \"cat {input_file.file_path}\"" + tasks.append(models.BatchTaskCreateOptions( + id=f'Task{idx}', + command_line=command, + resource_files=[input_file] + ) + ) + + batch_client.create_tasks(job_id=job_id, task_collection=tasks) + # [END quickrun_python_add_tasks] + + +def view_task_output(batch_client: BatchClient, job_id: str, config, _read_stream_as_string, text_encoding): + # [START quickrun_python_view_output] + tasks = batch_client.list_tasks(job_id=job_id) + + for task in tasks: + + node_info = batch_client.get_task(job_id=job_id, task_id=task.id).node_info + node_id = node_info.node_id if node_info else None + print(f"Task: {task.id}") + print(f"Node: {node_id}") + + stream = batch_client.download_task_file( + job_id=job_id, task_id=task.id, file_path=config.STANDARD_OUT_FILE_NAME) + + file_text = _read_stream_as_string( + stream, + text_encoding) + print(file_text) + # [END quickrun_python_view_output] diff --git a/sdk/batch/azure-batch/samples/tutorial_parallel_python.py b/sdk/batch/azure-batch/samples/tutorial_parallel_python.py new file mode 100644 index 000000000000..b89f6c27646d --- /dev/null +++ b/sdk/batch/azure-batch/samples/tutorial_parallel_python.py @@ -0,0 +1,152 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +# Snippets extracted from articles/batch/tutorial-parallel-python.md. + +import datetime +import os +import shlex +import sys +import time + +from azure.batch import BatchClient, models +from azure.core.credentials import AzureNamedKeyCredential +from azure.storage.blob import BlobServiceClient + + +# [START tutorial_parallel_config] +_BATCH_ACCOUNT_NAME = os.getenv('AZURE_BATCH_ACCOUNT_NAME', '') +_BATCH_ACCOUNT_KEY = os.getenv('AZURE_BATCH_ACCOUNT_KEY', '') +_BATCH_ACCOUNT_URL = os.getenv('AZURE_BATCH_ACCOUNT_URL', 'https://yourbatchaccount.yourbatchregion.batch.azure.com') +_STORAGE_ACCOUNT_NAME = os.getenv('AZURE_STORAGE_ACCOUNT_NAME', '') +_STORAGE_ACCOUNT_KEY = os.getenv('AZURE_STORAGE_ACCOUNT_KEY', '') +# [END tutorial_parallel_config] + + +def make_blob_client(): + # [START tutorial_parallel_blob_client] + blob_service_client = BlobServiceClient( + account_url=f"https://{_STORAGE_ACCOUNT_NAME}.blob.core.windows.net/", + credential=_STORAGE_ACCOUNT_KEY) + # [END tutorial_parallel_blob_client] + return blob_service_client + + +def make_batch_client(): + # [START tutorial_parallel_batch_client] + credentials = AzureNamedKeyCredential(_BATCH_ACCOUNT_NAME, + _BATCH_ACCOUNT_KEY) + + batch_client = BatchClient( + endpoint=_BATCH_ACCOUNT_URL, + credential=credentials) + # [END tutorial_parallel_batch_client] + return batch_client + + +def upload_inputs(blob_service_client, input_container_name, output_container_name, upload_file_to_container): + # [START tutorial_parallel_upload_inputs] + blob_service_client.create_container(input_container_name) + blob_service_client.create_container(output_container_name) + input_file_paths = [] + + for folder, subs, files in os.walk(os.path.join(sys.path[0], './InputFiles/')): + for filename in files: + if filename.endswith(".mp4"): + input_file_paths.append(os.path.abspath( + os.path.join(folder, filename))) + + # Upload the input files. This is the collection of files that are to be processed by the tasks. + input_files = [ + upload_file_to_container(blob_service_client, input_container_name, file_path) + for file_path in input_file_paths] + # [END tutorial_parallel_upload_inputs] + return input_files + + +def create_pool(batch_client: BatchClient, pool_id: str, pool_vm_size: str, + dedicated_node_count: int, low_priority_node_count: int): + # [START tutorial_parallel_create_pool] + new_pool = models.BatchPoolCreateOptions( + id=pool_id, + virtual_machine_configuration=models.VirtualMachineConfiguration( + image_reference=models.BatchVmImageReference( + publisher="Canonical", + offer="UbuntuServer", + sku="20.04-LTS", + version="latest" + ), + node_agent_sku_id="batch.node.ubuntu 20.04"), + vm_size=pool_vm_size, + target_dedicated_nodes=dedicated_node_count, + target_low_priority_nodes=low_priority_node_count, + start_task=models.BatchStartTask( + command_line="/bin/bash -c \"apt-get update && apt-get install -y ffmpeg\"", + wait_for_success=True, + user_identity=models.UserIdentity( + auto_user=models.AutoUserSpecification( + scope=models.AutoUserScope.POOL, + elevation_level=models.ElevationLevel.ADMIN)), + ) + ) + batch_client.create_pool(pool=new_pool) + # [END tutorial_parallel_create_pool] + + +def create_job(batch_client: BatchClient, job_id: str, pool_id: str): + # [START tutorial_parallel_create_job] + job = models.BatchJobCreateOptions( + id=job_id, + pool_info=models.BatchPoolInfo(pool_id=pool_id)) + + batch_client.create_job(job=job) + # [END tutorial_parallel_create_job] + + +def add_tasks(batch_client: BatchClient, job_id: str, input_files, output_container_sas_url): + # [START tutorial_parallel_add_tasks] + tasks = list() + + for idx, input_file in enumerate(input_files): + input_file_path = input_file.file_path + output_file_path = "".join((input_file_path).split('.')[:-1]) + '.mp3' + command = "/bin/bash -c \"ffmpeg -i {} {}\"".format( + shlex.quote(input_file_path), shlex.quote(output_file_path)) + tasks.append(models.BatchTaskCreateOptions( + id='Task{}'.format(idx), + command_line=command, + resource_files=[input_file], + output_files=[models.OutputFile( + file_pattern=output_file_path, + destination=models.OutputFileDestination( + container=models.OutputFileBlobContainerDestination( + container_url=output_container_sas_url)), + upload_options=models.OutputFileUploadConfiguration( + upload_condition=models.OutputFileUploadCondition.TASK_SUCCESS))] + ) + ) + batch_client.create_tasks(job_id=job_id, task_collection=tasks) + # [END tutorial_parallel_add_tasks] + + +def wait_for_tasks(batch_client: BatchClient, job_id: str, timeout_expiration): + # [START tutorial_parallel_wait_tasks] + while datetime.datetime.now() < timeout_expiration: + print('.', end='') + sys.stdout.flush() + tasks = batch_client.list_tasks(job_id=job_id) + + incomplete_tasks = [task for task in tasks if + task.state != models.BatchTaskState.COMPLETED] + if not incomplete_tasks: + print() + return True + else: + time.sleep(1) + return False + # [END tutorial_parallel_wait_tasks]