|
| 1 | +#!/usr/bin/env python |
| 2 | +# -*- coding: utf-8 -*- |
| 3 | +# |
| 4 | +# Copyright (c) 2021-2024 The WfCommons Team. |
| 5 | +# |
| 6 | +# This program is free software: you can redistribute it and/or modify |
| 7 | +# it under the terms of the GNU General Public License as published by |
| 8 | +# the Free Software Foundation, either version 3 of the License, or |
| 9 | +# (at your option) any later version. |
| 10 | + |
| 11 | +import pathlib |
| 12 | +import re |
| 13 | +import ast |
| 14 | +import json |
| 15 | + |
| 16 | +from logging import Logger |
| 17 | +from typing import Optional, Union |
| 18 | + |
| 19 | +from .abstract_translator import Translator |
| 20 | +from ...common import Workflow |
| 21 | + |
| 22 | +class AirflowTranslator(Translator): |
| 23 | + """ |
| 24 | + A WfFormat parser for creating Nextflow workflow applications. |
| 25 | +
|
| 26 | + :param workflow: Workflow benchmark object or path to the workflow benchmark JSON instance. |
| 27 | + :type workflow: Union[Workflow, pathlib.Path], |
| 28 | + :param logger: The logger where to log information/warning or errors (optional). |
| 29 | + :type logger: Logger |
| 30 | + """ |
| 31 | + |
| 32 | + def __init__(self, |
| 33 | + workflow: Union[Workflow, pathlib.Path], |
| 34 | + logger: Optional[Logger] = None) -> None: |
| 35 | + """Create an object of the translator.""" |
| 36 | + super().__init__(workflow, logger) |
| 37 | + |
| 38 | + self.script = f""" |
| 39 | +from __future__ import annotations |
| 40 | +
|
| 41 | +import os |
| 42 | +from datetime import datetime |
| 43 | +from airflow.models.dag import DAG |
| 44 | +from airflow.operators.bash import BashOperator |
| 45 | +
|
| 46 | +with DAG( |
| 47 | + "{self.workflow.name}", |
| 48 | + description="airflow translation of a wfcommons instance", |
| 49 | + schedule="0 0 * * *", |
| 50 | + start_date=datetime(2021, 1, 1), |
| 51 | + catchup=False, |
| 52 | + tags=["wfcommons"], |
| 53 | +) as dag: |
| 54 | +""" |
| 55 | + |
| 56 | + def translate(self, output_folder: pathlib.Path) -> None: |
| 57 | + """ |
| 58 | + Translate a workflow benchmark description(WfFormat) into an Airflow workflow application. |
| 59 | +
|
| 60 | + :param output_folder: The name of the output folder. |
| 61 | + :type output_folder: pathlib.Path |
| 62 | + """ |
| 63 | + |
| 64 | + self._prep_commands(output_folder) |
| 65 | + |
| 66 | + for task in self.tasks.values(): |
| 67 | + self.script += f""" |
| 68 | + {task.task_id} = BashOperator( |
| 69 | + task_id="{task.task_id}", |
| 70 | + depends_on_past=False, |
| 71 | + bash_command='{self.task_commands[task.task_id]}', |
| 72 | + env={{"AIRFLOW_HOME": os.environ["AIRFLOW_HOME"]}}, |
| 73 | + retries=3, |
| 74 | + ) |
| 75 | +""" |
| 76 | + for task in self.tasks.values(): |
| 77 | + parents = ", ".join(self.task_parents[task.task_id]) |
| 78 | + if parents: |
| 79 | + self.script += f""" |
| 80 | + [{parents}] >> {task.task_id} |
| 81 | +""" |
| 82 | + # write benchmark files |
| 83 | + output_folder.mkdir(parents=True) |
| 84 | + with open(output_folder.joinpath("workflow.py"), "w") as fp: |
| 85 | + fp.write(self.script) |
| 86 | + |
| 87 | + # additional files |
| 88 | + self._copy_binary_files(output_folder) |
| 89 | + self._generate_input_files(output_folder) |
| 90 | + |
| 91 | + def _prep_commands(self, output_folder: pathlib.Path) -> None: |
| 92 | + """ |
| 93 | + Prepares the bash_command strings for the BashOperators. |
| 94 | +
|
| 95 | + :param output_folder: The name of the output folder. |
| 96 | + :type output_folder: pathlib.Path |
| 97 | + """ |
| 98 | + self.task_commands = {} |
| 99 | + |
| 100 | + for task in self.tasks.values(): |
| 101 | + program = task.program |
| 102 | + args = [] |
| 103 | + for a in task.args: |
| 104 | + if "--output-files" in a: |
| 105 | + flag, output_files_dict = a.split(" ", 1) |
| 106 | + output_files_dict = {str(f"${{AIRFLOW_HOME}}/dags/{output_folder.name}/data/{key}"): value for key, value in ast.literal_eval(output_files_dict).items()} |
| 107 | + a = f"{flag} {json.dumps(output_files_dict)}" |
| 108 | + elif "--input-files" in a: |
| 109 | + flag, input_files_arr = a.split(" ", 1) |
| 110 | + input_files_arr = [str(f"${{AIRFLOW_HOME}}/dags/{output_folder.name}/data/{file}") for file in ast.literal_eval(input_files_arr)] |
| 111 | + a = f"{flag} {json.dumps(input_files_arr)}" |
| 112 | + else: |
| 113 | + a = a.replace("'", "\"") |
| 114 | + args.append(a) |
| 115 | + |
| 116 | + command_str = " ".join([str(program)] + args) |
| 117 | + |
| 118 | + # Escapes all double quotes |
| 119 | + command_str = command_str.replace('"', '\\\\"') |
| 120 | + |
| 121 | + # Wraps --output-files and --input-files arguments in double quotes |
| 122 | + command_str = re.sub( |
| 123 | + r'(--output-files) (\{.*\}) (--input-files) (\[.*?\])', |
| 124 | + lambda m: f'{m.group(1)} "{m.group(2)}" {m.group(3)} "{m.group(4)}"', |
| 125 | + command_str |
| 126 | + ) |
| 127 | + |
| 128 | + self.task_commands[task.task_id] = command_str |
| 129 | + |
0 commit comments