Skip to content

Commit f5641f7

Browse files
committed
- Added demo adder multi step orchestration config files
- Added time series data files - Added postman collection for demo adder orchestration - Minor changes in miner's rule documentation for clarity
1 parent 0d8fc67 commit f5641f7

10 files changed

Lines changed: 1290 additions & 4 deletions

orchestrations/demoAdderMultiStepOrchestration/Demo_adder_Multistep_Orchestration.postman_collection

Lines changed: 948 additions & 0 deletions
Large diffs are not rendered by default.
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
{
2+
"messageId": "SampleData1",
3+
"body": [
4+
{
5+
"name": "KW",
6+
"datapoints": [
7+
[
8+
"1455733669601",
9+
1.0
10+
],
11+
[
12+
"1455733669602",
13+
2.0
14+
],
15+
[
16+
"1455733669603",
17+
3.0
18+
],
19+
[
20+
"1455733669604",
21+
4.0
22+
],
23+
[
24+
"1455733669605",
25+
5.0
26+
],
27+
[
28+
"1455733669606",
29+
6.0
30+
],
31+
[
32+
"1455733669607",
33+
7.0
34+
],
35+
[
36+
"1455733669608",
37+
8.0
38+
],
39+
[
40+
"1455733669609",
41+
9.0
42+
],
43+
[
44+
"1455733669610",
45+
10.0
46+
]
47+
],
48+
"attributes": {
49+
"source1": "analytics",
50+
"source2": "test"
51+
}
52+
}
53+
]
54+
}
Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
{
2+
"messageId": "SampleData2",
3+
"body": [
4+
{
5+
"name": "vibration",
6+
"datapoints": [
7+
[
8+
"1455733669601",
9+
100.0
10+
],
11+
[
12+
"1455733669602",
13+
200.0
14+
],
15+
[
16+
"1455733669603",
17+
300.0
18+
],
19+
[
20+
"1455733669604",
21+
400.0
22+
],
23+
[
24+
"1455733669605",
25+
500.0
26+
],
27+
[
28+
"1455733669606",
29+
600.0
30+
],
31+
[
32+
"1455733669607",
33+
700.0
34+
],
35+
[
36+
"1455733669608",
37+
800.0
38+
],
39+
[
40+
"1455733669609",
41+
900.0
42+
],
43+
[
44+
"1455733669610",
45+
1000.0
46+
]
47+
],
48+
"attributes": {
49+
"source1": "analytics",
50+
"source2": "test"
51+
}
52+
}
53+
]
54+
}
55+
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
<?xml version="1.0" encoding="UTF-8"?>
2+
<definitions xmlns="http://www.omg.org/spec/BPMN/20100524/MODEL"
3+
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
4+
expressionLanguage="http://www.w3.org/1999/XPath" id="sid-81430087-7a44-4be3-8517-914faf923256"
5+
targetNamespace="DSP-PM" typeLanguage="http://www.w3.org/2001/XMLSchema"
6+
xsi:schemaLocation="http://www.omg.org/spec/BPMN/20100524/MODEL http://www.omg.org/spec/BPMN/2.0/20100501/BPMN20.xsd"
7+
xmlns:activiti="http://activiti.org/bpmn">
8+
9+
<process id="DemoAdderWorklow" isExecutable="true">
10+
11+
<startEvent id="sid-start"
12+
name="">
13+
<outgoing>sid-flow1</outgoing>
14+
</startEvent>
15+
16+
<serviceTask completionQuantity="1" id="sid-10001"
17+
isForCompensation="false" name="<ANALYTIC_ID>::<ANALYTIC_NAME>::<ANALYTIC_VERSION>" startQuantity="1"
18+
activiti:delegateExpression="${javaDelegate}"
19+
xmlns:activiti="http://activiti.org/bpmn">
20+
<incoming>sid-flow1</incoming>
21+
<outgoing>sid-flow2</outgoing>
22+
</serviceTask>
23+
24+
<serviceTask completionQuantity="1" id="sid-10002"
25+
isForCompensation="false" name="<ANALYTIC_ID>::<ANALYTIC_NAME>::<ANALYTIC_VERSION>" startQuantity="1"
26+
activiti:delegateExpression="${javaDelegate}"
27+
xmlns:activiti="http://activiti.org/bpmn">
28+
<incoming>sid-flow2</incoming>
29+
<outgoing>sid-flow3</outgoing>
30+
</serviceTask>
31+
32+
33+
<endEvent id="sid-end" name="">
34+
<incoming>sid-flow3</incoming>
35+
</endEvent>
36+
37+
38+
<sequenceFlow id="sid-flow1"
39+
name="" sourceRef="sid-start"
40+
targetRef="sid-10001"/>
41+
42+
<sequenceFlow id="sid-flow2"
43+
name="" sourceRef="sid-10001" targetRef="sid-10002"/>
44+
45+
<sequenceFlow id="sid-flow3"
46+
name="" sourceRef="sid-10002" targetRef="sid-end"/>
47+
48+
</process>
49+
50+
</definitions>
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
{
2+
"analyticName": "java-timeseries-demo-adder",
3+
"analyticVersion": "1.0",
4+
"orchestrationStepId": "sid-10001",
5+
"iterations": [
6+
{
7+
"inputMaps": [
8+
{
9+
"valueSourceType": "DATA_CONNECTOR",
10+
"fullyQualifiedPortName": "data.time_series.numberArray1",
11+
"fieldId": "KW",
12+
"queryCriteria": {
13+
"start": 1455733669601,
14+
"end": 1455733669610
15+
},
16+
"engUnit": "kw",
17+
"required": true,
18+
"dataSourceId": "Predix Time Series"
19+
},
20+
{
21+
"valueSourceType": "DATA_CONNECTOR",
22+
"fullyQualifiedPortName": "data.time_series.numberArray2",
23+
"fieldId": "vibration",
24+
"queryCriteria": {
25+
"start": 1455733669601,
26+
"end": 1455733669610
27+
},
28+
"engUnit": "hertz",
29+
"required": true,
30+
"dataSourceId": "Predix Time Series"
31+
}
32+
],
33+
"outputMaps": [
34+
{
35+
"fullyQualifiedPortName": "data.time_series.sum",
36+
"fieldId": "bearing_temperature",
37+
"engUnit": "Celsius",
38+
"dataSourceId": "Temporary, Predix Time Series"
39+
}
40+
]
41+
}
42+
]
43+
}
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
{
2+
"analyticName": "java-timeseries-demo-adder",
3+
"analyticVersion": "1.0",
4+
"orchestrationStepId": "sid-10002",
5+
"iterations": [
6+
{
7+
"inputMaps": [
8+
{
9+
"valueSourceType": "DATA_CONNECTOR",
10+
"fullyQualifiedPortName": "data.time_series.numberArray1",
11+
"fieldId": "bearing_temperature",
12+
"queryCriteria": {
13+
"start": 1455733669601,
14+
"end": 1455733669610
15+
},
16+
"engUnit": "Celsius",
17+
"required": true,
18+
"dataSourceId": "Predix Time Series"
19+
},
20+
{
21+
"valueSourceType": "DATA_CONNECTOR",
22+
"fullyQualifiedPortName": "data.time_series.numberArray2",
23+
"fieldId": "vibration",
24+
"queryCriteria": {
25+
"start": 1455733669601,
26+
"end": 1455733669610
27+
},
28+
"engUnit": "hertz",
29+
"required": true,
30+
"dataSourceId": "Predix Time Series"
31+
}
32+
],
33+
"inputModelMaps": [
34+
{
35+
"modelPortName": "threshold",
36+
"modelName": "model-name",
37+
"modelVersion": "v1"
38+
}
39+
],
40+
"outputMaps": [
41+
{
42+
"fullyQualifiedPortName": "data.time_series.sum",
43+
"fieldId": "bearing_temperature_final",
44+
"engUnit": "Celsius",
45+
"dataSourceId": "Predix Time Series"
46+
}
47+
]
48+
}
49+
]
50+
}
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
500
Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
# Demo Adder Multi-step Orchestration
2+
- The demo adder analytic has been modified to take as input a trained model. This model is in the form of a threshold value. The addition is performed and if the result is greater than the threshold value,
3+
a -1 value will be written to the output.
4+
- The first step adds KW and vibration tag values and writes output to bearing temperature tag in timeseries.
5+
- The second step adds bearing temperature from first step output with vibration tag value and writes output to bearing temperature final tag in timeseries.
6+
7+
# File list
8+
| File/Directory | Description |
9+
| -------------- | ----------- |
10+
| InitialData | The data that needs to be loaded into Timeseries to support the orchestration and the asset group and tag map queries that will used by the runtime.|
11+
| Orchestration | The orchestration configuration files. |
12+
13+
# Steps to load and run the orchestration
14+
15+
1. Load the data into time series
16+
- Use the websocket connection in the Predix Tool Kit (basic: https://predix-starter.run.aws-usw02-pr.ice.predix.io/#!/wsClient) to load the sample data into your Predix Timeseries instance.
17+
1. log in as a user from your UAA
18+
2. use Time series ingest to load the data from the following files in the InitialData folder
19+
- rawTimeSeriesData_KW.json
20+
- rawTimeSeriesData_vibration.json
21+
- predix-zone-id is your Time series guid (zone id/instance id)
22+
1. open the socket
23+
2. use the contents of rawTimeSeriesData_KW.json as the request body and send the message
24+
3. repeat for the rest of the timeseries data files listed above
25+
4. close the socket
26+
3. use get timeseries tag values request from validateDataLoaded folder in the postman collection to verify that timeseries data has been loaded.
27+
28+
2. Load the Timeseries Demo Adder with model Analytic to your Analytics Catalog
29+
1. get the timeseries-adder-java code from https://github.com/PredixDev/predix-analytics-sample/tree/master/analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-java
30+
2. build it (mvn clean install)
31+
3. use the requests from addAnalytics folder in the postman collection or the UI to create the catalog entry and upload the jar and template file (see instructions for adding an analytic to the catalog)
32+
4. update Orchestration/TwoStepOrchestration.bpmn20.xml to reference this analytic
33+
* update analyticInstanceId, analyticName, and analyticVersion in the demoAdder serviceTask with the catalog entry instance id, entry name and version
34+
5. test the analytic (this will deploy it) (see Predix IO Analytics Catalog docs for instructions on deploying an analytic) (sample data for testing the analytic is included in the analytic's github repository)
35+
36+
3. Load the orchestration configuration
37+
1. create the orchestration entry using the Create Orchestration Configuration Entry request in addOrchestrationConfiguration folder in the postman collection.
38+
2. validate the bpmn using the Validate Orchestration request in addOrchestrationConfiguration folder in the postman collection.
39+
* the bpmn file should have been updated with the catalog entry id for the analytic (Both steps use same catalog entry id)
40+
* the expected response should appear as show below (the analyticIds will be the catalog entry ids from the bpmn)
41+
* if the response does not contain status "200" for each analytic, the analytic is not running or the runtime is not set up properly. Redeploy the analytic and validate the runtime setup.
42+
<pre>
43+
{
44+
"analyticValidationStatusMap": {
45+
"http://analyticId.grc-apps.svc.ice.ge.com/api/v1/analytic/execution": "200",
46+
"http://analyticId.grc-apps.svc.ice.ge.com/api/v1/analytic/execution": "200"
47+
},
48+
"id": "<responseId>"
49+
}
50+
</pre>
51+
3. upload the bpmn using Create Orchestration BPMN Artifact request in the addOrchestrationConfiguration postman folder
52+
4. upload the first step's port to field map using Create Orchestration Port to Field Map Artifact request in the addOrchestrationConfiguration postman folder after updating the request as follows:
53+
* fill in the orchestrationEntryId
54+
* set the name to the step name from the bpmn (sid-10001)
55+
* select the Orchestration/step1-portToFieldMap.json
56+
5. upload the second step's port to field map using Create Orchestration Port to Field Map Artifact request in the addOrchestrationConfiguration postman folder after updating the request as follows:
57+
* fill in the orchestrationEntryId
58+
* set the name to the step name from the bpmn (sid-10002)
59+
* select the Orchestration/step2-portToFieldMap.json
60+
6. verify that the configuration has been loaded using the Get Artifact Metadata for an Orchestration Configuration request in the addOrchestrationConfiguration postman folder
61+
* update the <orchestration entry id> in the url with the id from step 8.i above
62+
* the response should contain entries for the bpmn and 2 port to field maps
63+
7. upload the threshold model using the postman Upload Model request in the addOrchestrationConfiguration postman folder after updating the request as follows:
64+
* set modelKey to 'model-key'
65+
* select the thresholdModel.json file in the Orchestrations directory
66+
8. validate that the models have been uploaded with the Get Orchestrations Models in the addOrchestrationConfiguration postman folder
67+
68+
4. Run the orchestration
69+
* use the postman Run Orchestration request in the postman folder after updating the request as follows:
70+
* put the following json in the request body. Updating the orchestrationEntryId with the orchestration entry id from step 8.i above.
71+
<pre>
72+
{
73+
"orchestrationConfigurationId": "<orchestrationEntryId>",
74+
"assetId": null,
75+
"assetGroup": null,
76+
"assetDataFieldsMap": {"KW": "KW", "vibration": "vibration", "bearing_temperature" : "bearing_temperature", "bearing_temperature_final" : "bearing_temperature_final"},
77+
"modelGroupKey":"model-key"
78+
}
79+
</pre>
80+
* note down the orchestration request id in the response
81+
82+
5. View the orchestration status using the Orchestration Status request in the postman folder.
83+
* replace orchestrationRequestId in the uri with the orchestration request id from the response in step 4.
84+
85+
# Congratulations! You have a 2 step orchestration running!

orchestrations/multiStepOrchestration/README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@
9494
* update analyticInstanceId, analyticName, and analyticVersion in the demoAdder serviceTask with the catalog entry instance id, entry name and version
9595
5. test the analytic (this will deploy it) (see Predix IO Analytics Catalog docs for instructions on deploying an analytic) (sample data for testing the analytic is included in the analytic's github repository)
9696
7. load the Miners Rule v2 analytic to the catalog
97-
1. get Miners Rule v2 from ???github link???
97+
1. get Miners Rule v2 from https://github.com/PredixDev/predix-analytics-sample/tree/master/analytics/miners-rule
9898
2. build it (mvn clean install)
9999
3. use requests from addAnalytics folder in the postman collection or the UI to create the catalog entry and upload the jar and template file (see instructions for adding an analytic to the catalog)
100100
4. update Orchestration/MinersRuleOrchestration.bpmn20.xml to reference this analytic
@@ -112,7 +112,7 @@
112112
"http://anlayticId.grc-apps.svc.ice.ge.com/api/v1/analytic/execution": "200",
113113
"http://analyticId.grc-apps.svc.ice.ge.com/api/v1/analytic/execution": "200"
114114
},
115-
"id": "8cede08a-6480-11e6-92be-fac19b3e939e"
115+
"id": "<responseId>"
116116
}
117117
</pre>
118118
2. upload the bpmn using Create Orchestration BPMN Artifact request in the addOrchestrationConfiguration postman folder
@@ -140,7 +140,7 @@
140140
* put the following json in the request body. Updating the orchestrationEntryId with the orchestration entry id from step 8.i above.
141141
<pre>
142142
{
143-
"orchestrationConfigurationId": "orchestrationEntryId",
143+
"orchestrationConfigurationId": "<orchestrationEntryId>",
144144
"assetGroup": {
145145
"dataSourceId": "PredixAsset",
146146
"assetSelectionFilter": "/assets?filter=classification=/classifications/turbine:uri=/assets/minersRuleOrch*&fields=uri"

0 commit comments

Comments
 (0)