-
Notifications
You must be signed in to change notification settings - Fork 129
Expand file tree
/
Copy pathtask.py
More file actions
100 lines (75 loc) · 3.32 KB
/
task.py
File metadata and controls
100 lines (75 loc) · 3.32 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
from abc import ABC
from pathlib import Path
from typing import Optional
from pydantic.v1 import root_validator, validator
from dbx.constants import TASKS_SUPPORTED_IN_EXECUTE
from dbx.models.cli.execute import ExecuteParametersPayload
from dbx.models.validators import at_least_one_of, only_one_provided
from dbx.models.workflow.common.flexible import FlexibleModel
from dbx.models.workflow.common.parameters import ParamPair, StringArray
from dbx.models.workflow.common.task_type import TaskType
from dbx.utils import dbx_echo
class BaseNotebookTask(FlexibleModel, ABC):
notebook_path: str
base_parameters: Optional[ParamPair]
class SparkJarTask(FlexibleModel):
main_class_name: str
parameters: Optional[StringArray]
jar_params: Optional[StringArray]
jar_uri: Optional[str]
@validator("jar_uri")
def _deprecated_msg(cls, value): # noqa
dbx_echo(
"[yellow bold] Field jar_uri is DEPRECATED since 04/2016. "
"Provide a [code]jar[/code] through the [code]libraries[/code] field instead."
)
return value
class SparkPythonTask(FlexibleModel):
python_file: str
parameters: Optional[StringArray] = []
@validator("python_file")
def _not_fuse(cls, v): # noqa
if v.startswith("file:fuse://"):
raise ValueError("The python_file property cannot be FUSE-based")
if not v.endswith(".py"):
raise ValueError(f"Only a .py file can be used in this property, provided: {v}")
return v
@property
def execute_file(self) -> Path:
if not self.python_file.startswith("file://"):
raise ValueError("File for execute mode should be located locally and referenced via file:// prefix.")
_path = Path(self.python_file).relative_to("file://")
if not _path.exists():
raise ValueError(f"Provided file doesn't exist {_path}")
return _path
class SparkSubmitTask(FlexibleModel):
parameters: Optional[StringArray]
spark_submit_params: Optional[StringArray]
_validate_provided = root_validator(allow_reuse=True)(
lambda _, values: at_least_one_of(["parameters", "spark_submit_params"], values)
)
class BasePipelineTask(FlexibleModel, ABC):
pipeline_id: str
class BaseTaskMixin(FlexibleModel):
_only_one_provided = root_validator(pre=True, allow_reuse=True)(
lambda _, values: only_one_provided("_task", values)
)
@property
def task_type(self) -> TaskType:
for _type in TaskType:
if self.dict().get(_type):
return TaskType(_type)
return TaskType.undefined_task
def check_if_supported_in_execute(self):
if self.task_type not in TASKS_SUPPORTED_IN_EXECUTE:
raise RuntimeError(
f"Provided task type {self.task_type} is not supported in execute mode. "
f"Supported types are: {TASKS_SUPPORTED_IN_EXECUTE}"
)
def override_execute_parameters(self, payload: ExecuteParametersPayload):
if payload.named_parameters and self.task_type == TaskType.spark_python_task:
raise ValueError(
"`named_parameters` are not supported by spark_python_task. Please use `parameters` instead."
)
pointer = getattr(self, self.task_type)
pointer.__dict__.update(payload.dict(exclude_none=True))