-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathdetailed_cycle_time.go
More file actions
279 lines (257 loc) · 7.3 KB
/
detailed_cycle_time.go
File metadata and controls
279 lines (257 loc) · 7.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
package data
import (
"context"
"database/sql"
"fmt"
"sort"
)
type CycleTimeStatistics struct {
CodingTime Statistics `json:"coding_time"`
PickupTime Statistics `json:"pickup_time"`
ReviewTime Statistics `json:"review_time"`
DeployTime Statistics `json:"deploy_time"`
}
type AggregatedCycleTimeStatistics = OverallWeeklyData[CycleTimeStatistics]
type WeeklyCycleTimeStatistics = WeeklyData[CycleTimeStatistics]
type CycleTimeStatisticsKey struct{}
type CycleTimeStatisticsQuery = Query[CycleTimeStatisticsKey]
func (CycleTimeStatisticsKey) Execute(
ctx context.Context,
db *DB,
q CycleTimeStatisticsQuery,
org, repo string,
weeks []string,
team *int64,
) (any, error) {
return db.GetDetailedCycleTime(ctx, q, org, repo, weeks, team)
}
func BuildDetailedCycleTimeQuery(weeks []string, team *int64) CycleTimeStatisticsQuery {
teamQuery := ""
if team != nil {
teamQuery = getTeamSubquery()
}
weeksPlaceholder := getWeeksPlaceholder(len(weeks))
return CycleTimeStatisticsQuery{value: fmt.Sprintf(`
WITH has_deployment AS (
SELECT DISTINCT repository_external_id, forge_type
FROM tenant_deployment_environments
UNION
SELECT DISTINCT repository_external_id, forge_type
FROM tenant_cicd_deploy_workflows
),
dataset AS (
SELECT
mergedAt.week AS week,
metrics.coding_duration AS coding_time,
metrics.review_start_delay AS pickup_time,
metrics.review_duration AS review_time,
CASE
WHEN has_deployment.repository_external_id IS NULL THEN NULL
WHEN metrics.deploy_duration = 0 THEN
(unixepoch(date('now')) - unixepoch(
CONCAT(dates.year, '-', LPAD(dates.month, 2, '0'), '-', LPAD(dates.day, 2, '0'))
)) * 1000
ELSE metrics.deploy_duration
END AS deploy_time
FROM transform_merge_request_metrics AS metrics
JOIN transform_repositories AS repo
ON repo.id = metrics.repository
LEFT JOIN has_deployment
ON has_deployment.repository_external_id = repo.external_id AND has_deployment.forge_type = repo.forge_type - 1
JOIN transform_merge_request_fact_dates_junk AS dj
ON metrics.dates_junk = dj.id
JOIN transform_dates AS mergedAt
ON dj.merged_at = mergedAt.id
JOIN transform_dates AS dates
ON dj.merged_at = dates.id -- Join the dates table to get the actual day, month, and year
JOIN transform_merge_request_fact_users_junk AS uj
ON metrics.users_junk = uj.id
JOIN transform_forge_users AS author
ON uj.author = author.id
JOIN transform_merge_requests AS mrs
ON metrics.merge_request = mrs.id
JOIN transform_branches AS branch
ON mrs.target_branch = branch.id
WHERE mergedAt.week IN (%s)
AND repo.namespace_name = ?
AND repo.name = ?
AND branch.id = repo.default_branch
%s
AND author.bot = 0
),
data_by_week AS (
SELECT
week AS week,
AVG(coding_time) AS avg_coding_time,
AVG(pickup_time) AS avg_pickup_time,
AVG(review_time) AS avg_review_time,
AVG(deploy_time) AS avg_deploy_time,
MEDIAN(coding_time) AS p50_coding_time,
MEDIAN(pickup_time) AS p50_pickup_time,
MEDIAN(review_time) AS p50_review_time,
MEDIAN(deploy_time) AS p50_deploy_time,
PERCENTILE_75(coding_time) AS p75_coding_time,
PERCENTILE_75(pickup_time) AS p75_pickup_time,
PERCENTILE_75(review_time) AS p75_review_time,
PERCENTILE_75(deploy_time) AS p75_deploy_time,
PERCENTILE_95(coding_time) AS p95_coding_time,
PERCENTILE_95(pickup_time) AS p95_pickup_time,
PERCENTILE_95(review_time) AS p95_review_time,
PERCENTILE_95(deploy_time) AS p95_deploy_time
FROM dataset
GROUP BY week
),
data_total AS (
SELECT
AVG(coding_time) AS avg_coding_time,
AVG(pickup_time) AS avg_pickup_time,
AVG(review_time) AS avg_review_time,
AVG(deploy_time) AS avg_deploy_time,
MEDIAN(coding_time) AS p50_coding_time,
MEDIAN(pickup_time) AS p50_pickup_time,
MEDIAN(review_time) AS p50_review_time,
MEDIAN(deploy_time) AS p50_deploy_time,
PERCENTILE_75(coding_time) AS p75_coding_time,
PERCENTILE_75(pickup_time) AS p75_pickup_time,
PERCENTILE_75(review_time) AS p75_review_time,
PERCENTILE_75(deploy_time) AS p75_deploy_time,
PERCENTILE_95(coding_time) AS p95_coding_time,
PERCENTILE_95(pickup_time) AS p95_pickup_time,
PERCENTILE_95(review_time) AS p95_review_time,
PERCENTILE_95(deploy_time) AS p95_deploy_time
FROM dataset
)
SELECT
NULL AS week,
avg_coding_time,
avg_pickup_time,
avg_review_time,
avg_deploy_time,
p50_coding_time,
p50_pickup_time,
p50_review_time,
p50_deploy_time,
p75_coding_time,
p75_pickup_time,
p75_review_time,
p75_deploy_time,
p95_coding_time,
p95_pickup_time,
p95_review_time,
p95_deploy_time
FROM data_total
UNION ALL
SELECT
week,
avg_coding_time,
avg_pickup_time,
avg_review_time,
avg_deploy_time,
p50_coding_time,
p50_pickup_time,
p50_review_time,
p50_deploy_time,
p75_coding_time,
p75_pickup_time,
p75_review_time,
p75_deploy_time,
p95_coding_time,
p95_pickup_time,
p95_review_time,
p95_deploy_time
FROM data_by_week;
`,
weeksPlaceholder,
teamQuery,
)}
}
func (d DB) GetDetailedCycleTime(
ctx context.Context,
query CycleTimeStatisticsQuery,
namespace string,
repository string,
weeks []string,
team *int64,
) (*AggregatedCycleTimeStatistics, error) {
queryParamLength := len(weeks)
queryParams := make([]interface{}, queryParamLength)
for i, v := range weeks {
queryParams[i] = v
}
queryParams = append(queryParams, namespace)
queryParams = append(queryParams, repository)
if team != nil {
queryParams = append(queryParams, team)
}
rows, err := d.db.QueryContext(ctx, query.Get(), queryParams...)
if err != nil {
return nil, err
}
defer rows.Close()
wcts := make([]WeeklyCycleTimeStatistics, 0)
acts := &AggregatedCycleTimeStatistics{}
weekSet := make(map[string]bool)
for rows.Next() {
var week sql.NullString
var codingTime, pickupTime, reviewTime, deployTime Statistics
if err := rows.Scan(
&week,
&codingTime.Average,
&pickupTime.Average,
&reviewTime.Average,
&deployTime.Average,
&codingTime.Median,
&pickupTime.Median,
&reviewTime.Median,
&deployTime.Median,
&codingTime.Percentile75,
&pickupTime.Percentile75,
&reviewTime.Percentile75,
&deployTime.Percentile75,
&codingTime.Percentile95,
&pickupTime.Percentile95,
&reviewTime.Percentile95,
&deployTime.Percentile95,
); err != nil {
return nil, err
}
if week.Valid {
weekSet[week.String] = true
wcts = append(wcts, WeeklyCycleTimeStatistics{
Week: week.String,
Data: CycleTimeStatistics{
CodingTime: codingTime,
PickupTime: pickupTime,
ReviewTime: reviewTime,
DeployTime: deployTime,
},
})
} else {
acts.Overall.CodingTime = codingTime
acts.Overall.PickupTime = pickupTime
acts.Overall.ReviewTime = reviewTime
acts.Overall.DeployTime = deployTime
}
}
if err := rows.Err(); err != nil {
return nil, err
}
for _, inputWeek := range weeks {
if !weekSet[inputWeek] {
wcts = append(wcts, WeeklyCycleTimeStatistics{
Week: inputWeek,
Data: CycleTimeStatistics{
CodingTime: Statistics{},
PickupTime: Statistics{},
ReviewTime: Statistics{},
DeployTime: Statistics{},
},
})
}
}
sort.Slice(wcts, func(i, j int) bool {
return wcts[i].Week < wcts[j].Week
})
acts.Weekly = wcts
return acts, nil
}