Skip to content

Commit f0d1003

Browse files
maltheandrewmchen
authored andcommitted
Add 'init_scripts' optional argument (#266)
This fixes issue #264. Note that the 'init_scripts' argument was added previously to 'edit_cluster', but (inadvertently?) removed later. This change adds it to both 'create_cluster' and 'edit_cluster'.
1 parent aad9f95 commit f0d1003

1 file changed

Lines changed: 10 additions & 6 deletions

File tree

databricks_cli/sdk/service.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -215,9 +215,9 @@ def list_clusters(self, headers=None):
215215
def create_cluster(self, num_workers=None, autoscale=None, cluster_name=None, spark_version=None,
216216
spark_conf=None, aws_attributes=None, node_type_id=None,
217217
driver_node_type_id=None, ssh_public_keys=None, custom_tags=None,
218-
cluster_log_conf=None, spark_env_vars=None, autotermination_minutes=None,
219-
enable_elastic_disk=None, cluster_source=None, instance_pool_id=None,
220-
headers=None):
218+
cluster_log_conf=None, init_scripts=None, spark_env_vars=None,
219+
autotermination_minutes=None, enable_elastic_disk=None, cluster_source=None,
220+
instance_pool_id=None, headers=None):
221221
_data = {}
222222
if num_workers is not None:
223223
_data['num_workers'] = num_workers
@@ -247,6 +247,8 @@ def create_cluster(self, num_workers=None, autoscale=None, cluster_name=None, sp
247247
_data['cluster_log_conf'] = cluster_log_conf
248248
if not isinstance(cluster_log_conf, dict):
249249
raise TypeError('Expected databricks.ClusterLogConf() or dict for field cluster_log_conf')
250+
if init_scripts is not None:
251+
_data['init_scripts'] = init_scripts
250252
if spark_env_vars is not None:
251253
_data['spark_env_vars'] = spark_env_vars
252254
if autotermination_minutes is not None:
@@ -303,9 +305,9 @@ def resize_cluster(self, cluster_id, num_workers=None, autoscale=None, headers=N
303305
def edit_cluster(self, cluster_id, num_workers=None, autoscale=None, cluster_name=None,
304306
spark_version=None, spark_conf=None, aws_attributes=None, node_type_id=None,
305307
driver_node_type_id=None, ssh_public_keys=None, custom_tags=None,
306-
cluster_log_conf=None, spark_env_vars=None, autotermination_minutes=None,
307-
enable_elastic_disk=None, cluster_source=None, instance_pool_id=None,
308-
headers=None):
308+
cluster_log_conf=None, init_scripts=None, spark_env_vars=None,
309+
autotermination_minutes=None, enable_elastic_disk=None, cluster_source=None,
310+
instance_pool_id=None, headers=None):
309311
_data = {}
310312
if cluster_id is not None:
311313
_data['cluster_id'] = cluster_id
@@ -337,6 +339,8 @@ def edit_cluster(self, cluster_id, num_workers=None, autoscale=None, cluster_nam
337339
_data['cluster_log_conf'] = cluster_log_conf
338340
if not isinstance(cluster_log_conf, dict):
339341
raise TypeError('Expected databricks.ClusterLogConf() or dict for field cluster_log_conf')
342+
if init_scripts is not None:
343+
_data['init_scripts'] = init_scripts
340344
if spark_env_vars is not None:
341345
_data['spark_env_vars'] = spark_env_vars
342346
if autotermination_minutes is not None:

0 commit comments

Comments
 (0)