Skip to content

Commit a4e3dc7

Browse files
author
Tom Softreck
committed
update
1 parent 5c35112 commit a4e3dc7

3 files changed

Lines changed: 136 additions & 150 deletions

File tree

dsl/Makefile

Lines changed: 67 additions & 148 deletions
Original file line numberDiff line numberDiff line change
@@ -247,95 +247,13 @@ parallel-run:
247247

248248
## Run basic flow example
249249
example-basic:
250-
@echo -e "Uruchamianie podstawowego przykładu przepływu..."
251-
mkdir -p examples
252-
cat > examples/basic_flow.py << 'EOL'
253-
from flow_dsl import task, run_flow_from_dsl
254-
255-
@task(name="Zadanie 1")
256-
def task1(input_data):
257-
print("Wykonuję zadanie 1")
258-
return {"result": input_data + " - przetworzony przez zadanie 1"}
259-
260-
@task(name="Zadanie 2")
261-
def task2(input_data):
262-
print("Wykonuję zadanie 2")
263-
return {"result": input_data["result"] + " - przetworzony przez zadanie 2"}
264-
265-
@task(name="Zadanie 3")
266-
def task3(input_data):
267-
print("Wykonuję zadanie 3")
268-
return {"result": input_data["result"] + " - przetworzony przez zadanie 3"}
269-
270-
# Definicja przepływu DSL
271-
flow_dsl = """
272-
flow SimpleFlow:
273-
description: "Prosty przepływ sekwencyjny"
274-
task1 -> task2
275-
task2 -> task3
276-
"""
277-
278-
# Uruchomienie przepływu
279-
results = run_flow_from_dsl(flow_dsl, "Dane wejściowe")
280-
print("\nWyniki przepływu:")
281-
print(results)
282-
EOL
283-
python examples/basic_flow.py
250+
@echo -e "Running basic flow example..."
251+
python examples/basic_flow.py --mock
284252

285253
## Run email processing example
286254
example-email:
287-
@echo -e "Uruchamianie przykładu przetwarzania emaili..."
288-
mkdir -p examples
289-
cat > examples/email_flow.py << 'EOL'
290-
from flow_dsl import task, run_flow_from_dsl
291-
292-
@task(name="Pobieranie emaili")
293-
def fetch_emails(server, username, password):
294-
print(f"Pobieranie emaili z {server} jako {username}")
295-
# Symulacja pobierania emaili
296-
emails = [
297-
{"id": "1", "subject": "Pytanie o wsparcie", "urgent": True},
298-
{"id": "2", "subject": "Newsletter", "urgent": False},
299-
{"id": "3", "subject": "Pilna sprawa", "urgent": True}
300-
]
301-
return emails
302-
303-
@task(name="Klasyfikacja emaili")
304-
def classify_emails(emails):
305-
print(f"Klasyfikacja {len(emails)} emaili")
306-
urgent = [email for email in emails if email.get("urgent", False)]
307-
regular = [email for email in emails if not email.get("urgent", False)]
308-
return {"urgent_emails": urgent, "regular_emails": regular}
309-
310-
@task(name="Przetwarzanie pilnych emaili")
311-
def process_urgent_emails(urgent_emails):
312-
print(f"Przetwarzanie {len(urgent_emails)} pilnych emaili")
313-
return [f"Odpowiedź na pilny email {email['id']}" for email in urgent_emails]
314-
315-
@task(name="Przetwarzanie zwykłych emaili")
316-
def process_regular_emails(regular_emails):
317-
print(f"Przetwarzanie {len(regular_emails)} zwykłych emaili")
318-
return [f"Odpowiedź na zwykły email {email['id']}" for email in regular_emails]
319-
320-
# Definicja przepływu DSL
321-
email_dsl = """
322-
flow EmailProcessing:
323-
description: "Przetwarzanie e-maili"
324-
fetch_emails -> classify_emails
325-
classify_emails -> process_urgent_emails
326-
classify_emails -> process_regular_emails
327-
"""
328-
329-
# Uruchomienie przepływu
330-
results = run_flow_from_dsl(email_dsl, {
331-
"server": "imap.example.com",
332-
"username": "info@example.com",
333-
"password": "password123"
334-
})
335-
print("\nWyniki przepływu:")
336-
print(results)
337-
EOL
338-
python examples/email_flow.py
255+
@echo -e "Running email processing example..."
256+
python examples/email_flow.py --mock
339257

340258
## Run data processing example
341259
example-data:
@@ -350,69 +268,70 @@ id,name,value
350268
5,Product B,250
351269
EOL
352270
cat > examples/data_flow.py << 'EOL'
353-
from flow_dsl import task, run_flow_from_dsl
354-
import pandas as pd
355-
import os
356-
357-
@task(name="Wczytanie CSV")
358-
def load_csv(file_path):
359-
print(f"Wczytywanie danych z {file_path}")
360-
df = pd.read_csv(file_path)
361-
return {"dataframe": df}
362-
363-
@task(name="Czyszczenie danych")
364-
def clean_data(input_data):
365-
df = input_data["dataframe"]
366-
print(f"Czyszczenie danych, początkowy rozmiar: {len(df)}")
367-
# Usunięcie duplikatów
368-
df = df.drop_duplicates()
369-
print(f"Rozmiar po usunięciu duplikatów: {len(df)}")
370-
return {"dataframe": df}
371-
372-
@task(name="Transformacja danych")
373-
def transform_data(input_data):
374-
df = input_data["dataframe"]
375-
print(f"Transformacja danych")
376-
# Przykładowa transformacja - konwersja kolumny na wielkie litery
377-
if "name" in df.columns:
378-
df["name"] = df["name"].str.upper()
379-
# Dodanie nowej kolumny
380-
df["tax"] = df["value"] * 0.23
381-
return {"dataframe": df}
382-
383-
@task(name="Zapisanie wyników")
384-
def save_results(input_data, output_path):
385-
df = input_data["dataframe"]
386-
print(f"Zapisywanie wyników do {output_path}")
387-
df.to_csv(output_path, index=False)
388-
return {"rows_count": len(df), "output_path": output_path}
389-
390-
# Definicja przepływu DSL
391-
flow_dsl = """
392-
flow CSVProcessing:
393-
description: "Przetwarzanie pliku CSV"
394-
load_csv -> clean_data
395-
clean_data -> transform_data
396-
transform_data -> save_results
397-
"""
398-
399-
# Upewnienie się, że katalog wyjściowy istnieje
400-
os.makedirs("examples/data/output", exist_ok=True)
401-
402-
# Uruchomienie przepływu
403-
results = run_flow_from_dsl(flow_dsl, {
404-
"file_path": "examples/data/sample.csv",
405-
"output_path": "examples/data/output/processed.csv"
406-
})
407-
print("\nWyniki przepływu:")
408-
print(results)
409-
410-
# Wyświetlenie przetworzonego pliku
411-
print("\nZawartość przetworzonego pliku:")
412-
processed_df = pd.read_csv("examples/data/output/processed.csv")
413-
print(processed_df)
271+
from taskinity.core.taskinity_core import task, run_flow_from_dsl
272+
import pandas as pd
273+
import os
274+
275+
@task(name="Wczytanie CSV")
276+
def load_csv(file_path):
277+
print(f"Wczytywanie danych z {file_path}")
278+
df = pd.read_csv(file_path)
279+
return {"dataframe": df}
280+
281+
@task(name="Czyszczenie danych")
282+
def clean_data(input_data):
283+
df = input_data["dataframe"]
284+
print(f"Czyszczenie danych, początkowy rozmiar: {len(df)}")
285+
# Usunięcie duplikatów
286+
df = df.drop_duplicates()
287+
print(f"Rozmiar po czyszczeniu: {len(df)}")
288+
return {"dataframe": df}
289+
290+
@task(name="Transformacja danych")
291+
def transform_data(input_data):
292+
df = input_data["dataframe"]
293+
print(f"Transformacja danych")
294+
# Grupowanie po nazwie produktu
295+
grouped = df.groupby('name').agg({'value': ['sum', 'mean', 'count']})
296+
grouped.columns = ['total_value', 'avg_value', 'count']
297+
grouped = grouped.reset_index()
298+
return {"dataframe": grouped}
299+
300+
@task(name="Zapisanie wyników")
301+
def save_results(input_data, output_path):
302+
df = input_data["dataframe"]
303+
print(f"Zapisywanie wyników do {output_path}")
304+
df.to_csv(output_path, index=False)
305+
return {"rows": len(df), "columns": len(df.columns), "path": output_path}
306+
307+
flow_dsl = """
308+
flow DataProcessing:
309+
description: "Przepływ przetwarzania danych CSV"
310+
load_csv -> clean_data
311+
clean_data -> transform_data
312+
transform_data -> save_results
313+
"""
314+
315+
# Upewnienie się, że katalog wyjściowy istnieje
316+
os.makedirs("examples/data/output", exist_ok=True)
317+
318+
# Uruchomienie przepływu
319+
results = run_flow_from_dsl(flow_dsl, {
320+
"file_path": "examples/data/sample.csv",
321+
"output_path": "examples/data/output/processed.csv"
322+
})
323+
print("\nWyniki przepływu:")
324+
print(f"Przetworzone rekordy: {results['save_results']['rows']}")
325+
print(f"Zapisany plik: {results['save_results']['path']}")
326+
print(results)
327+
328+
# Wyświetlenie przetworzonego pliku
329+
print("\nZawartość przetworzonego pliku:")
330+
processed_df = pd.read_csv("examples/data/output/processed.csv")
331+
print(processed_df)
414332
EOL
415-
python examples/data_flow.py
333+
python examples/data_flow.py --mock
334+
416335

417336
## Run API integration example
418337
example-api:

dsl/examples/Makefile

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,10 @@ docker-down-all: email-docker-down
7878
.PHONY: email-test
7979
email-test:
8080
@echo "Running email processing tests..."
81+
@if [ ! -d "email_processing/tests" ]; then \
82+
echo "Creating tests directory in email_processing..."; \
83+
mkdir -p email_processing/tests; \
84+
fi
8185
$(EXPORT_PYTHONPATH) && cd email_processing && $(PYTHON) -m pytest tests/
8286

8387
.PHONY: email-run
@@ -109,6 +113,10 @@ email-docker-down:
109113
.PHONY: data-test
110114
data-test:
111115
@echo "Running data processing tests..."
116+
@if [ ! -d "data_processing/tests" ]; then \
117+
echo "Creating tests directory in data_processing..."; \
118+
mkdir -p data_processing/tests; \
119+
fi
112120
$(EXPORT_PYTHONPATH) && cd data_processing && $(PYTHON) -m pytest tests/
113121

114122
.PHONY: data-run
@@ -120,6 +128,10 @@ data-run:
120128
.PHONY: api-test
121129
api-test:
122130
@echo "Running API integration tests..."
131+
@if [ ! -d "api_integration/tests" ]; then \
132+
echo "Creating tests directory in api_integration..."; \
133+
mkdir -p api_integration/tests; \
134+
fi
123135
$(EXPORT_PYTHONPATH) && cd api_integration && $(PYTHON) -m pytest tests/
124136

125137
.PHONY: api-run
@@ -131,6 +143,10 @@ api-run:
131143
.PHONY: parallel-test
132144
parallel-test:
133145
@echo "Running parallel tasks tests..."
146+
@if [ ! -d "parallel_tasks/tests" ]; then \
147+
echo "Creating tests directory in parallel_tasks..."; \
148+
mkdir -p parallel_tasks/tests; \
149+
fi
134150
$(EXPORT_PYTHONPATH) && cd parallel_tasks && $(PYTHON) -m pytest tests/
135151

136152
.PHONY: parallel-run
@@ -142,6 +158,10 @@ parallel-run:
142158
.PHONY: viz-test
143159
viz-test:
144160
@echo "Running visualization tests..."
161+
@if [ ! -d "visualization/tests" ]; then \
162+
echo "Creating tests directory in visualization..."; \
163+
mkdir -p visualization/tests; \
164+
fi
145165
$(EXPORT_PYTHONPATH) && cd visualization && $(PYTHON) -m pytest tests/
146166

147167
.PHONY: viz-run
@@ -153,6 +173,10 @@ viz-run:
153173
.PHONY: perf-test
154174
perf-test:
155175
@echo "Running performance benchmark tests..."
176+
@if [ ! -d "performance_benchmarks/tests" ]; then \
177+
echo "Creating tests directory in performance_benchmarks..."; \
178+
mkdir -p performance_benchmarks/tests; \
179+
fi
156180
$(EXPORT_PYTHONPATH) && cd performance_benchmarks && $(PYTHON) -m pytest tests/
157181

158182
.PHONY: perf-run

dsl/examples/email_processing/flow.py

Lines changed: 45 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,54 @@
55
"""
66
import os
77
import json
8+
import sys
89
from dotenv import load_dotenv
910
from pathlib import Path
1011

11-
# Import Taskinity core functionality
12-
from taskinity.core.taskinity_core import run_flow_from_dsl, save_dsl, load_dsl
12+
# Check if running in mock mode from command line
13+
if '--mock' in sys.argv:
14+
# Create mock modules
15+
class MockTaskinityCore:
16+
@staticmethod
17+
def run_flow_from_dsl(flow_dsl, input_data):
18+
print(f"Running mock flow with {len(input_data)} input parameters")
19+
return {
20+
"status": "success",
21+
"processed_emails": 5,
22+
"send_responses": {
23+
"total_attempted": 5,
24+
"total_sent": 5,
25+
"sent_urgent": 1,
26+
"sent_attachments": 2,
27+
"sent_support": 0,
28+
"sent_orders": 0,
29+
"sent_regular": 2
30+
}
31+
}
32+
33+
@staticmethod
34+
def save_dsl(dsl_text, filename):
35+
dsl_dir = Path("dsl_definitions")
36+
dsl_dir.mkdir(exist_ok=True)
37+
with open(dsl_dir / filename, 'w') as f:
38+
f.write(dsl_text)
39+
return True
40+
41+
@staticmethod
42+
def load_dsl(filename):
43+
return "flow EmailProcessing: mock flow"
44+
45+
# Use mock modules
46+
run_flow_from_dsl = MockTaskinityCore.run_flow_from_dsl
47+
save_dsl = MockTaskinityCore.save_dsl
48+
load_dsl = MockTaskinityCore.load_dsl
49+
else:
50+
# Import real Taskinity core functionality
51+
try:
52+
from taskinity.core.taskinity_core import run_flow_from_dsl, save_dsl, load_dsl
53+
except ImportError:
54+
print("Error: Could not import Taskinity modules. Run with --mock flag for testing.")
55+
sys.exit(1)
1356

1457
# Import tasks
1558
from tasks import (

0 commit comments

Comments
 (0)