aztk.spark.models package¶
-
class
aztk.spark.models.
Application
(cloud_task: azure.batch.models.cloud_task.CloudTask)[source]¶ Bases:
object
-
class
aztk.spark.models.
ApplicationConfiguration
(name=None, application=None, application_args=None, main_class=None, jars=None, py_files=None, files=None, driver_java_options=None, driver_library_path=None, driver_class_path=None, driver_memory=None, executor_memory=None, driver_cores=None, executor_cores=None, max_retry_count=None)[source]¶ Bases:
object
-
class
aztk.spark.models.
ApplicationLog
(name: str, cluster_id: str, log: str, total_bytes: int, application_state: azure.batch.models.batch_service_client_enums.TaskState, exit_code: int)[source]¶ Bases:
object
-
class
aztk.spark.models.
Cluster
(pool: azure.batch.models.cloud_pool.CloudPool = None, nodes: azure.batch.models.compute_node_paged.ComputeNodePaged = None)[source]¶ Bases:
aztk.models.models.Cluster
-
class
aztk.spark.models.
ClusterConfiguration
(custom_scripts: List[aztk.spark.models.models.CustomScript] = None, file_shares: List[aztk.spark.models.models.FileShare] = None, cluster_id: str = None, vm_count=0, vm_low_pri_count=0, vm_size=None, subnet_id=None, toolkit: aztk.spark.models.models.SparkToolkit = None, user_configuration: aztk.spark.models.models.UserConfiguration = None, spark_configuration: aztk.spark.models.models.SparkConfiguration = None, worker_on_master: bool = None)[source]¶
-
class
aztk.spark.models.
File
(name: str, payload: _io.StringIO)[source]¶ Bases:
aztk.models.models.File
Bases:
aztk.models.models.FileShare
-
class
aztk.spark.models.
Job
(cloud_job_schedule: azure.batch.models.cloud_job_schedule.CloudJobSchedule, cloud_tasks: List[azure.batch.models.cloud_task.CloudTask] = None, pool: azure.batch.models.cloud_pool.CloudPool = None, nodes: azure.batch.models.compute_node_paged.ComputeNodePaged = None)[source]¶ Bases:
object
-
class
aztk.spark.models.
JobConfiguration
(id, applications, vm_size, custom_scripts=None, spark_configuration=None, toolkit=None, max_dedicated_nodes=0, max_low_pri_nodes=0, subnet_id=None, worker_on_master=None)[source]¶ Bases:
object
-
class
aztk.spark.models.
JobState
[source]¶ Bases:
object
-
complete
= 'completed'¶
-
active
= 'active'¶
-
completed
= 'completed'¶
-
disabled
= 'disabled'¶
-
terminating
= 'terminating'¶
-
deleting
= 'deleting'¶
-
-
class
aztk.spark.models.
List
[source]¶ Bases:
list
,typing.MutableSequence
-
class
aztk.spark.models.
PluginConfiguration
(name: str, ports: List[aztk.models.plugins.plugin_configuration.PluginPort] = None, files: List[aztk.models.plugins.plugin_file.PluginFile] = None, execute: str = None, args=None, env=None, target_role: aztk.models.plugins.plugin_configuration.PluginTargetRole = None, target: aztk.models.plugins.plugin_configuration.PluginTarget = None)[source]¶ Bases:
aztk.models.plugins.plugin_configuration.PluginConfiguration
-
class
aztk.spark.models.
SecretsConfiguration
(service_principal=None, shared_key=None, docker=None, ssh_pub_key=None, ssh_priv_key=None)[source]¶
-
class
aztk.spark.models.
ServicePrincipalConfiguration
(tenant_id: str = None, client_id: str = None, credential: str = None, batch_account_resource_id: str = None, storage_account_resource_id: str = None)[source]¶
-
class
aztk.spark.models.
SparkConfiguration
(spark_defaults_conf=None, spark_env_sh=None, core_site_xml=None, jars=None)[source]¶ Bases:
object
-
class
aztk.spark.models.
SparkToolkit
(version: str, environment: str = None, environment_version: str = None)[source]¶ Bases:
aztk.models.toolkit.Toolkit
-
class
aztk.spark.models.
UserConfiguration
(username: str, ssh_key: str = None, password: str = None)[source]¶
-
class
aztk.spark.models.
VmImage
(publisher, offer, sku)[source]¶ Bases:
aztk.models.models.VmImage