Skip to content

Commit 98b25e3

Browse files
authored
Merge pull request patr1ck-m#27 from patr1ck-m/feature/examples
Add examples for CPU usage and exponential distribution anomaly detection
2 parents f5901f7 + 9bc3d9b commit 98b25e3

5 files changed

Lines changed: 125 additions & 39 deletions

File tree

src/examples.effekt

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
module src/examples
2+
3+
import src/lib/cpu_utilization_input
4+
import src/lib/anomaly_detection
5+
import src/lib/anomaly_logger
6+
import src/lib/stream_input
7+
import src/lib/event_logger
8+
import src/lib/aggregation
9+
import src/lib/event
10+
import io/error
11+
import io/time
12+
13+
def cpuUsageAnomalyDetectionExample(): Unit = {
14+
with on[IOError].panic()
15+
with boundary()
16+
with anomaly_logger::logToConsole();
17+
with anomaly_logger::logToFileCsvRethrow("cpu_anomalies.csv");
18+
with minMaxAnomalyDetector(0.0, 0.25);
19+
with event_logger::logToFileCsvRethrow("cpu_usage_aggregated.csv"); // Just for convenience, information is already in cpu_anomalies.csv
20+
21+
with cpuUsagePullStream(100);
22+
with event_logger::logToFileCsvPull("cpu_usage_raw.csv");
23+
with pullStreamDelayer(100);
24+
aggregateMeanWindow(10);
25+
}
26+
27+
/// Example using exponential distribution based anomaly detection on an exponential distributed data stream
28+
def exponentialDistributionWithZScoreAnomalyDetectionExample(): Unit = {
29+
with on[IOError].panic()
30+
with boundary()
31+
with anomaly_logger::logAnomaliesToConsole();
32+
with zScoreAnomalyDetector(0.90);
33+
34+
with stream_input::exponentialDistributedPull(1.0);
35+
with pullStreamDelayer(50);
36+
with pullStreamLimiter(100);
37+
aggregateMean();
38+
}

src/lib/anomaly_logger.effekt

Lines changed: 13 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -258,17 +258,9 @@ namespace examples {
258258
Event(6, -1.0),
259259
Event(7, 3.0)
260260
]
261-
try {
262-
minMaxAnomalyDetector(0.0, 5.0)
263-
} with read[Event] { () =>
264-
if (lst.isEmpty()) {
265-
println("No more values to read, stopping...")
266-
resume { do stop() }
267-
} else {
268-
val first = lst.head()
269-
lst = lst.deleteAt(0)
270-
println("Providing value: " ++ first.value.show())
271-
resume { unbox first }
261+
minMaxAnomalyDetector(0.0, 5.0) {
262+
lst.foreach { e =>
263+
do emit(e)
272264
}
273265
}
274266
}
@@ -285,17 +277,9 @@ namespace examples {
285277
Event(6, -1.0),
286278
Event(7, 3.0)
287279
]
288-
try {
289-
zScoreAnomalyDetector(1.0)
290-
} with read[Event] { () =>
291-
if (lst.isEmpty()) {
292-
println("No more values to read, stopping...")
293-
resume { do stop() }
294-
} else {
295-
val first = lst.head()
296-
lst = lst.deleteAt(0)
297-
println("Providing value: " ++ first.value.show())
298-
resume { unbox first }
280+
zScoreAnomalyDetector(1.0) {
281+
lst.foreach { e =>
282+
do emit(e)
299283
}
300284
}
301285
}
@@ -315,25 +299,17 @@ namespace examples {
315299
Event(6, -1.0),
316300
Event(7, 3.0)
317301
]
318-
try {
319-
zScoreAnomalyDetector(1.0)
320-
} with read[Event] { () =>
321-
if (lst.isEmpty()) {
322-
println("No more values to read, stopping...")
323-
resume { do stop() }
324-
} else {
325-
val first = lst.head()
326-
lst = lst.deleteAt(0)
327-
println("Providing value: " ++ first.value.show())
328-
resume { unbox first }
302+
zScoreAnomalyDetector(1.0) {
303+
lst.foreach { e =>
304+
do emit(e)
329305
}
330306
}
331307
}
332-
}
333308

334-
def main(): Unit = {
335-
examples::example3()
336-
()
309+
def main(): Unit = {
310+
examples::example3()
311+
()
312+
}
337313
}
338314

339315
namespace internal {

src/lib/event_logger.effekt

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,20 @@ def logToConsoleRethrow() {body: () => Unit / emit[Event]}: Unit / emit[Event] =
2727
}
2828
}
2929

30+
/// An AnomalyDetection handler that logs all events to the console for pull-based streams
31+
def logToConsolePull() {body: () => Unit / read[Event]}: Unit / read[Event] = {
32+
with boundary
33+
try {
34+
body()
35+
} with read[Event] {
36+
val e = do read[Event]()
37+
resume {
38+
println("Event: " ++ e.value.show() ++ " at t=" ++ e.timestamp.show())
39+
e
40+
}
41+
}
42+
}
43+
3044
/// An AnomalyDetection handler that logs all events to a CSV file
3145
def logToFileCsv(filePath: String) {body: () => Unit / emit[Event]}: Unit / Exception[IOError] = {
3246
var buffer: List[CsvRow] = [["timestamp", "value"]]
@@ -82,6 +96,39 @@ def logToFileCsvRethrow(filePath: String) {body: () => Unit / emit[Event]}: Unit
8296
}
8397
}
8498

99+
def logToFileCsvPull(filePath: String) {body: () => Unit / read[Event]}: Unit / { read[Event], Exception[IOError] } = {
100+
var buffer: List[CsvRow] = [["timestamp", "value"]]
101+
var first = true
102+
val maxBufferSize = 1000
103+
try { // Double try is required because the read in the with block throws stop when the stream ends
104+
try {
105+
body()
106+
} with read[Event] {
107+
val e = do read[Event]()
108+
resume {
109+
val row: CsvRow = [
110+
e.timestamp.show(),
111+
e.value.show()
112+
]
113+
buffer = Cons(row, buffer)
114+
if (buffer.size >= maxBufferSize) {
115+
// Flush buffer to file
116+
internal::saveCsvRows(buffer.reverse, filePath, first)
117+
buffer = []
118+
first = false
119+
}
120+
e
121+
}
122+
}
123+
} with stop {
124+
()
125+
}
126+
// Flush remaining buffer to file
127+
if (not(buffer.isEmpty())) {
128+
internal::saveCsvRows(buffer.reverse, filePath, first)
129+
}
130+
}
131+
85132

86133
namespace examples {
87134
def example1(): Unit = {

src/lib/stream_input.effekt

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,31 @@ def pushToPullStream[T]() {pushStr: () => Unit / { emit[T] } } {pullStr: () => U
1616
stream::source[T] { pushStr() } { pullStr() }
1717
}
1818

19+
def pullStreamDelayer(delayMs: Int) {body: () => Unit / { read[Event] } }: Unit / { read[Event] } = {
20+
try body() with read[Event] {
21+
resume {
22+
val e = do read[Event]()
23+
time::sleep(delayMs)
24+
e
25+
}
26+
}
27+
}
28+
29+
def pullStreamLimiter(maxEvents: Int) {body: () => Unit / { read[Event] } }: Unit / { read[Event] } = {
30+
var count = 0
31+
try body() with read[Event] {
32+
if (count >= maxEvents) {
33+
resume { do stop() }
34+
} else {
35+
resume {
36+
count = count + 1
37+
val e = do read[Event]()
38+
e
39+
}
40+
}
41+
}
42+
}
43+
1944
def csvFeed(path: String, columnNameValue: String, delayMs: Int): Unit / { emit[Event], Exception[IOError], Exception[WrongFormat] } = {
2045
readFileUTF8(path) {
2146
returning::scanner[Char, Unit] {

src/main.effekt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
module main // must be named same as the file!
22

3-
import src/lib/aggregation
3+
import src/examples
44

55

66
def main(): Unit = {
7-
examples::example2()
7+
examples::exponentialDistributionWithZScoreAnomalyDetectionExample()
88
}

0 commit comments

Comments
 (0)