Skip to content

Commit e46be59

Browse files
lianghe-databricksandrewmchen
authored andcommitted
[SC-18790] Update Databricks CLI for Pool APIs (#250)
* init * fix the create/edit api path * fix format * more format * update the list pool CLI * fmt * init the tests, bugs inside * test format * fix tests * fix bug * tests passed * address comments * remove unused lib
1 parent 6cf5b4d commit e46be59

9 files changed

Lines changed: 465 additions & 2 deletions

File tree

databricks_cli/cli.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@
3636
from databricks_cli.secrets.cli import secrets_group
3737
from databricks_cli.stack.cli import stack_group
3838
from databricks_cli.groups.cli import groups_group
39+
from databricks_cli.instance_pools.cli import instance_pools_group
3940

4041

4142
@click.group(context_settings=CONTEXT_SETTINGS)
@@ -57,6 +58,7 @@ def cli():
5758
cli.add_command(secrets_group, name='secrets')
5859
cli.add_command(stack_group, name='stack')
5960
cli.add_command(groups_group, name='groups')
61+
cli.add_command(instance_pools_group, name="instance-pools")
6062

6163
if __name__ == "__main__":
6264
cli()

databricks_cli/click_types.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,13 @@ class ClusterIdClickType(ParamType):
6868
'https://*.cloud.databricks.com/#/setting/clusters/$CLUSTER_ID/configuration.')
6969

7070

71+
class InstancePoolIdClickType(ParamType):
72+
name = 'INSTANCE_POOL_ID'
73+
help = ('Can be found in the URL at '
74+
'https://*.cloud.databricks.com/#setting/clusters/instance-pools/view/'
75+
'$INSTANCE_POOL_ID')
76+
77+
7178
class SecretScopeClickType(ParamType):
7279
name = 'SCOPE'
7380
help = 'The name of the secret scope.'
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
# Databricks CLI
2+
# Copyright 2017 Databricks, Inc.
3+
#
4+
# Licensed under the Apache License, Version 2.0 (the "License"), except
5+
# that the use of services to which certain application programming
6+
# interfaces (each, an "API") connect requires that the user first obtain
7+
# a license for the use of the APIs from Databricks, Inc. ("Databricks"),
8+
# by creating an account at www.databricks.com and agreeing to either (a)
9+
# the Community Edition Terms of Service, (b) the Databricks Terms of
10+
# Service, or (c) another written agreement between Licensee and Databricks
11+
# for the use of the APIs.
12+
#
13+
# You may not use this file except in compliance with the License.
14+
# You may obtain a copy of the License at
15+
#
16+
# http://www.apache.org/licenses/LICENSE-2.0
17+
#
18+
# Unless required by applicable law or agreed to in writing, software
19+
# distributed under the License is distributed on an "AS IS" BASIS,
20+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21+
# See the License for the specific language governing permissions and
22+
# limitations under the License.
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
# Databricks CLI
2+
# Copyright 2017 Databricks, Inc.
3+
#
4+
# Licensed under the Apache License, Version 2.0 (the "License"), except
5+
# that the use of services to which certain application programming
6+
# interfaces (each, an "API") connect requires that the user first obtain
7+
# a license for the use of the APIs from Databricks, Inc. ("Databricks"),
8+
# by creating an account at www.databricks.com and agreeing to either (a)
9+
# the Community Edition Terms of Service, (b) the Databricks Terms of
10+
# Service, or (c) another written agreement between Licensee and Databricks
11+
# for the use of the APIs.
12+
#
13+
# You may not use this file except in compliance with the License.
14+
# You may obtain a copy of the License at
15+
#
16+
# http://www.apache.org/licenses/LICENSE-2.0
17+
#
18+
# Unless required by applicable law or agreed to in writing, software
19+
# distributed under the License is distributed on an "AS IS" BASIS,
20+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21+
# See the License for the specific language governing permissions and
22+
# limitations under the License.
23+
from databricks_cli.sdk import InstancePoolService
24+
25+
26+
class InstancePoolsApi(object):
27+
def __init__(self, api_client):
28+
self.client = InstancePoolService(api_client)
29+
30+
def create_instance_pool(self, json):
31+
return self.client.client.perform_query('POST', '/instance-pools/create', data=json)
32+
33+
def edit_instance_pool(self, json):
34+
return self.client.client.perform_query('POST', '/instance-pools/edit', data=json)
35+
36+
def delete_instance_pool(self, instance_pool_id):
37+
return self.client.delete_instance_pool(instance_pool_id)
38+
39+
def get_instance_pool(self, instance_pool_id):
40+
return self.client.get_instance_pool(instance_pool_id)
41+
42+
def list_instance_pools(self):
43+
return self.client.list_instance_pools()
Lines changed: 164 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,164 @@
1+
# Databricks CLI
2+
# Copyright 2017 Databricks, Inc.
3+
#
4+
# Licensed under the Apache License, Version 2.0 (the "License"), except
5+
# that the use of services to which certain application programming
6+
# interfaces (each, an "API") connect requires that the user first obtain
7+
# a license for the use of the APIs from Databricks, Inc. ("Databricks"),
8+
# by creating an account at www.databricks.com and agreeing to either (a)
9+
# the Community Edition Terms of Service, (b) the Databricks Terms of
10+
# Service, or (c) another written agreement between Licensee and Databricks
11+
# for the use of the APIs.
12+
#
13+
# You may not use this file except in compliance with the License.
14+
# You may obtain a copy of the License at
15+
#
16+
# http://www.apache.org/licenses/LICENSE-2.0
17+
#
18+
# Unless required by applicable law or agreed to in writing, software
19+
# distributed under the License is distributed on an "AS IS" BASIS,
20+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21+
# See the License for the specific language governing permissions and
22+
# limitations under the License.
23+
24+
import click
25+
from tabulate import tabulate
26+
27+
from databricks_cli.click_types import OutputClickType, JsonClickType, InstancePoolIdClickType
28+
from databricks_cli.instance_pools.api import InstancePoolsApi
29+
from databricks_cli.utils import eat_exceptions, CONTEXT_SETTINGS, pretty_format, json_cli_base, \
30+
truncate_string
31+
from databricks_cli.configure.config import provide_api_client, profile_option, debug_option
32+
from databricks_cli.version import print_version_callback, version
33+
34+
35+
@click.command(context_settings=CONTEXT_SETTINGS)
36+
@click.option('--json-file', default=None, type=click.Path(),
37+
help='File containing JSON request to POST to /api/2.0/instance-pools/create.')
38+
@click.option('--json', default=None, type=JsonClickType(),
39+
help=JsonClickType.help('/api/2.0/instance-pools/create'))
40+
@debug_option
41+
@profile_option
42+
@eat_exceptions
43+
@provide_api_client
44+
def create_cli(api_client, json_file, json):
45+
"""
46+
Creates a Databricks instance pool.
47+
48+
The specification for the request json can be found at
49+
https://docs.databricks.com/api/latest/instance-pools.html#create
50+
"""
51+
json_cli_base(json_file, json,
52+
lambda json: InstancePoolsApi(api_client).create_instance_pool(json))
53+
54+
55+
@click.command(context_settings=CONTEXT_SETTINGS)
56+
@click.option('--json-file', default=None, type=click.Path(),
57+
help='File containing JSON request to POST to /api/2.0/instance-pools/edit.')
58+
@click.option('--json', default=None, type=JsonClickType(),
59+
help=JsonClickType.help('/api/2.0/instance-pools/edit'))
60+
@debug_option
61+
@profile_option
62+
@eat_exceptions
63+
@provide_api_client
64+
def edit_cli(api_client, json_file, json):
65+
"""
66+
Edits a Databricks instance pool.
67+
68+
The specification for the request json can be found at
69+
https://docs.databricks.com/api/latest/instance-pools.html#edit
70+
"""
71+
if not bool(json_file) ^ bool(json):
72+
raise RuntimeError('Either --json-file or --json should be provided')
73+
json_cli_base(json_file, json,
74+
lambda json: InstancePoolsApi(api_client).edit_instance_pool(json),
75+
print_response=False)
76+
77+
78+
@click.command(context_settings=CONTEXT_SETTINGS)
79+
@click.option('--instance-pool-id', required=True, type=InstancePoolIdClickType(),
80+
help=InstancePoolIdClickType.help)
81+
@debug_option
82+
@profile_option
83+
@eat_exceptions
84+
@provide_api_client
85+
def delete_cli(api_client, instance_pool_id):
86+
"""
87+
Deletes a Databricks instance pool given its ID.
88+
89+
This permanently deletes the instance pool. The idle instances in the pool are terminated
90+
asynchronously. New clusters cannot attach to the pool. Running clusters attached to the pool
91+
continue to run but cannot auto-scale up. Terminated clusters attached to the pool will fail to
92+
start until they are edited to no longer use the pool.
93+
"""
94+
InstancePoolsApi(api_client).delete_instance_pool(instance_pool_id)
95+
96+
97+
@click.command(context_settings=CONTEXT_SETTINGS)
98+
@click.option('--instance-pool-id', required=True, type=InstancePoolIdClickType(),
99+
help=InstancePoolIdClickType.help)
100+
@debug_option
101+
@profile_option
102+
@eat_exceptions
103+
@provide_api_client
104+
def get_cli(api_client, instance_pool_id):
105+
"""
106+
Retrieves metadata about an instance pool.
107+
"""
108+
click.echo(pretty_format(InstancePoolsApi(api_client).get_instance_pool(instance_pool_id)))
109+
110+
111+
def _instance_pools_to_table(instance_pools_json):
112+
ret = []
113+
stats_headers = ['idle_count', 'used_count', 'pending_idle_count', 'pending_used_count']
114+
for c in instance_pools_json.get('instance_pools', []):
115+
pool_stats = []
116+
pool_stats.append(c['instance_pool_id'])
117+
pool_stats.append(truncate_string(c['instance_pool_name']))
118+
for header in stats_headers:
119+
pool_stats.append(c['stats'][header])
120+
# clone the content in the pool_stats. Pool_stats will be re-used in next iteration.
121+
ret.append(pool_stats[:])
122+
return ret
123+
124+
125+
@click.command(context_settings=CONTEXT_SETTINGS,
126+
short_help='Lists active and recently terminated instance pools.')
127+
@click.option('--output', default=None, help=OutputClickType.help, type=OutputClickType())
128+
@debug_option
129+
@profile_option
130+
@eat_exceptions
131+
@provide_api_client
132+
def list_cli(api_client, output):
133+
"""
134+
Lists active instance pools with the stats of the pools.
135+
"""
136+
instance_pools_json = InstancePoolsApi(api_client).list_instance_pools()
137+
if OutputClickType.is_json(output):
138+
click.echo(pretty_format(instance_pools_json))
139+
else:
140+
headers = ['ID', 'NAME', 'IDLE INSTANCES', 'USED INSTANCES', 'PENDING IDLE INSTANCES',
141+
'PENDING USED INSTANCES']
142+
click.echo(tabulate(_instance_pools_to_table(instance_pools_json), headers=headers,
143+
tablefmt='plain', numalign='left'))
144+
145+
146+
@click.group(context_settings=CONTEXT_SETTINGS,
147+
short_help='Utility to interact with Databricks instance pools.')
148+
@click.option('--version', '-v', is_flag=True, callback=print_version_callback,
149+
expose_value=False, is_eager=True, help=version)
150+
@debug_option
151+
@profile_option
152+
@eat_exceptions
153+
def instance_pools_group():
154+
"""
155+
Utility to interact with Databricks instance pools.
156+
"""
157+
pass
158+
159+
160+
instance_pools_group.add_command(create_cli, name='create')
161+
instance_pools_group.add_command(edit_cli, name='edit')
162+
instance_pools_group.add_command(delete_cli, name='delete')
163+
instance_pools_group.add_command(get_cli, name='get')
164+
instance_pools_group.add_command(list_cli, name='list')

databricks_cli/sdk/service.py

Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -706,3 +706,87 @@ def get_groups_for_principal(self, user_name=None, group_name=None, headers=None
706706
_data['group_name'] = group_name
707707
return self.client.perform_query('GET', '/groups/list-parents', data=_data, headers=headers)
708708

709+
710+
class InstancePoolService(object):
711+
def __init__(self, client):
712+
self.client = client
713+
714+
def create_instance_pool(self, instance_pool_name=None, min_idle_instances=None, max_capacity=None,
715+
aws_attributes=None, node_type_id=None, custom_tags=None,
716+
idle_instance_autotermination_minutes=None, enable_elastic_disk=None,
717+
disk_spec=None, preloaded_spark_versions=None, headers=None):
718+
_data = {}
719+
if instance_pool_name is not None:
720+
_data['instance_pool_name'] = instance_pool_name
721+
if min_idle_instances is not None:
722+
_data['min_idle_instances'] = min_idle_instances
723+
if max_capacity is not None:
724+
_data['max_capacity'] = max_capacity
725+
if aws_attributes is not None:
726+
_data['aws_attributes'] = aws_attributes
727+
if not isinstance(aws_attributes, dict):
728+
raise TypeError('Expected databricks.InstancePoolAwsAttributes() or dict for field aws_attributes')
729+
if node_type_id is not None:
730+
_data['node_type_id'] = node_type_id
731+
if custom_tags is not None:
732+
_data['custom_tags'] = custom_tags
733+
if idle_instance_autotermination_minutes is not None:
734+
_data['idle_instance_autotermination_minutes'] = idle_instance_autotermination_minutes
735+
if enable_elastic_disk is not None:
736+
_data['enable_elastic_disk'] = enable_elastic_disk
737+
if disk_spec is not None:
738+
_data['disk_spec'] = disk_spec
739+
if not isinstance(disk_spec, dict):
740+
raise TypeError('Expected databricks.DiskSpec() or dict for field disk_spec')
741+
if preloaded_spark_versions is not None:
742+
_data['preloaded_spark_versions'] = preloaded_spark_versions
743+
return self.client.perform_query('POST', '/instance-pools/create', data=_data, headers=headers)
744+
745+
def delete_instance_pool(self, instance_pool_id=None, headers=None):
746+
_data = {}
747+
if instance_pool_id is not None:
748+
_data['instance_pool_id'] = instance_pool_id
749+
return self.client.perform_query('POST', '/instance-pools/delete', data=_data, headers=headers)
750+
751+
def edit_instance_pool(self, instance_pool_id, instance_pool_name=None, min_idle_instances=None,
752+
max_capacity=None, aws_attributes=None, node_type_id=None, custom_tags=None,
753+
idle_instance_autotermination_minutes=None, enable_elastic_disk=None,
754+
disk_spec=None, preloaded_spark_versions=None, headers=None):
755+
_data = {}
756+
if instance_pool_id is not None:
757+
_data['instance_pool_id'] = instance_pool_id
758+
if instance_pool_name is not None:
759+
_data['instance_pool_name'] = instance_pool_name
760+
if min_idle_instances is not None:
761+
_data['min_idle_instances'] = min_idle_instances
762+
if max_capacity is not None:
763+
_data['max_capacity'] = max_capacity
764+
if aws_attributes is not None:
765+
_data['aws_attributes'] = aws_attributes
766+
if not isinstance(aws_attributes, dict):
767+
raise TypeError('Expected databricks.InstancePoolAwsAttributes() or dict for field aws_attributes')
768+
if node_type_id is not None:
769+
_data['node_type_id'] = node_type_id
770+
if custom_tags is not None:
771+
_data['custom_tags'] = custom_tags
772+
if idle_instance_autotermination_minutes is not None:
773+
_data['idle_instance_autotermination_minutes'] = idle_instance_autotermination_minutes
774+
if enable_elastic_disk is not None:
775+
_data['enable_elastic_disk'] = enable_elastic_disk
776+
if disk_spec is not None:
777+
_data['disk_spec'] = disk_spec
778+
if not isinstance(disk_spec, dict):
779+
raise TypeError('Expected databricks.DiskSpec() or dict for field disk_spec')
780+
if preloaded_spark_versions is not None:
781+
_data['preloaded_spark_versions'] = preloaded_spark_versions
782+
return self.client.perform_query('POST', '/instance-pools/edit', data=_data, headers=headers)
783+
784+
def get_instance_pool(self, instance_pool_id=None, headers=None):
785+
_data = {}
786+
if instance_pool_id is not None:
787+
_data['instance_pool_id'] = instance_pool_id
788+
return self.client.perform_query('GET', '/instance-pools/get', data=_data, headers=headers)
789+
790+
def list_instance_pools(self, headers=None):
791+
_data = {}
792+
return self.client.perform_query('GET', '/instance-pools/list', data=_data, headers=headers)

databricks_cli/utils.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ def pretty_format(json):
6666
return json_dumps(json, indent=2)
6767

6868

69-
def json_cli_base(json_file, json, api):
69+
def json_cli_base(json_file, json, api, print_response=True):
7070
"""
7171
Takes json_file or json string and calls an function "api" with the json
7272
deserialized
@@ -77,7 +77,8 @@ def json_cli_base(json_file, json, api):
7777
with open(json_file, 'r') as f:
7878
json = f.read()
7979
res = api(json_loads(json))
80-
click.echo(pretty_format(res))
80+
if print_response:
81+
click.echo(pretty_format(res))
8182

8283

8384
def truncate_string(s, length=100):

tests/instance_pools/__init__.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
# Databricks CLI
2+
# Copyright 2017 Databricks, Inc.
3+
#
4+
# Licensed under the Apache License, Version 2.0 (the "License"), except
5+
# that the use of services to which certain application programming
6+
# interfaces (each, an "API") connect requires that the user first obtain
7+
# a license for the use of the APIs from Databricks, Inc. ("Databricks"),
8+
# by creating an account at www.databricks.com and agreeing to either (a)
9+
# the Community Edition Terms of Service, (b) the Databricks Terms of
10+
# Service, or (c) another written agreement between Licensee and Databricks
11+
# for the use of the APIs.
12+
#
13+
# You may not use this file except in compliance with the License.
14+
# You may obtain a copy of the License at
15+
#
16+
# http://www.apache.org/licenses/LICENSE-2.0
17+
#
18+
# Unless required by applicable law or agreed to in writing, software
19+
# distributed under the License is distributed on an "AS IS" BASIS,
20+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21+
# See the License for the specific language governing permissions and
22+
# limitations under the License.

0 commit comments

Comments
 (0)