Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 01fad98

Browse files
committed
Factor code to create aggregate into a separate function.
1 parent e725993 commit 01fad98

File tree

1 file changed

+71
-68
lines changed

1 file changed

+71
-68
lines changed

.ci/metrics/metrics.py

Lines changed: 71 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -100,29 +100,72 @@ class AggregateMetric:
100100
workflow_id: int
101101

102102

103-
def create_and_append_libcxx_aggregates(
104-
workflow_metrics: list[JobMetrics]) -> list[JobMetrics,AggregateMetric]:
105-
"""Find libc++ JobMetric entries and create aggregate metrics for them.
103+
def _construct_aggregate(ag_name: str, job_list: list[JobMetrics]) -> AggregateMetric:
104+
"""Create a libc++ AggregateMetric from a list of libc++ JobMetrics
105+
106+
How aggregates are computed:
107+
queue time: Time from when first job in group is created until last job in
108+
group has started.
109+
run time: Time from when first job in group starts running until last job
110+
in group finishes running.
111+
status: logical 'and' of all the job statuses in the group.
106112
107113
Args:
108-
workflow_metrics: A list of JobMetrics entries collected so far.
114+
ag_name: The name for this particular AggregateMetric
115+
116+
job_list: This list of JobMetrics to be combined into the AggregateMetric.
117+
The input list should contain all (and only!) the libc++ JobMetrics
118+
for a particular stage and a particular workflow_id.
109119
110120
Returns:
111-
Returns a list of JobMetrics and AggregateMetric entries. It should
112-
be the input list with the newly create AggregateMetric entries
113-
appended to it.
121+
Returns the AggregateMetric constructed from the inputs.
122+
"""
123+
124+
# Initialize the aggregate values
125+
earliest_create = job_list[0].created_at_ns
126+
earliest_start = job_list[0].started_at_ns
127+
earliest_complete = job_list[0].completed_at_ns
128+
latest_start = job_list[0].started_at_ns
129+
latest_complete = job_list[0].completed_at_ns
130+
ag_status = job_list[0].status
131+
ag_workflow_id = job_list[0].workflow_id
132+
133+
# Go through rest of jobs for this workflow id, if any, updating stats
134+
if len(job_list) > 1:
135+
for job in job_list[1:]:
136+
# Update the status
137+
ag_status = ag_status and job.status
138+
# Get the earliest & latest times
139+
if job.created_at_ns < earliest_create:
140+
earliest_create = job.created_at_ns
141+
if job.completed_at_ns < earliest_complete:
142+
earliest_complete = job.completed_at_ns
143+
if job.started_at_ns > latest_start:
144+
latest_start = job.started_at_ns
145+
if job.started_at_ns < earliest_start:
146+
earliest_start = job.started_at_ns
147+
if job.completed_at_ns > latest_complete:
148+
latest_complete = job.completed_at_ns
149+
150+
# Compute aggregate run time (in seconds, not ns)
151+
ag_run_time = (latest_complete - earliest_start) / 1000000000
152+
# Compute aggregate queue time (in seconds, not ns)
153+
ag_queue_time = (latest_start - earliest_create) / 1000000000
154+
# Append the aggregate metrics to the workflow metrics list.
155+
aggregate = AggregateMetric(ag_name, ag_queue_time, ag_run_time, ag_status,
156+
latest_complete, ag_workflow_id)
157+
return aggregate
158+
159+
def create_and_append_libcxx_aggregates(workflow_metrics: list[JobMetrics]):
160+
"""Find libc++ JobMetric entries and create aggregate metrics for them.
114161
115162
Sort the libc++ JobMetric entries by workflow id, and for each workflow
116-
id group them by stages. Create an aggreate metric for each stage for each
117-
unique workflow id. Append each aggregate metric to the workflow_metrics
118-
list.
163+
id group them by stages. Call _construct_aggregate to reate an aggregate
164+
metric for each stage for each unique workflow id. Append each aggregate
165+
metric to the input workflow_metrics list.
119166
120-
How aggreates are computed:
121-
queue time: Time from when first job in group is created until last job in
122-
group has started.
123-
run time: Time from when first job in group starts running until last job
124-
in group finishes running.
125-
status: logical 'or' of all the job statuses in the group.
167+
Args:
168+
workflow_metrics: A list of JobMetrics entries collected so far.
126169
"""
127170
# Separate the jobs by workflow_id. Only look at JobMetrics entries.
128171
aggregate_data = dict()
@@ -154,58 +197,18 @@ def create_and_append_libcxx_aggregates(
154197
elif job.job_name.find('stage3') > 0:
155198
stage3_jobs.append(job)
156199

157-
for job_list in [ stage1_jobs, stage2_jobs, stage3_jobs]:
158-
if len(job_list) < 1:
159-
# No jobs in that stage this time around.
160-
continue
161-
162-
# Get the aggregate name.
163-
ag_name = "github_libcxx_premerge_checks_"
164-
if job_list[0].job_name.find('stage1') > 0:
165-
ag_name = ag_name + "stage1_aggregate"
166-
elif job_list[0].job_name.find('stage2') > 0:
167-
ag_name = ag_name + "stage2_aggregate"
168-
elif job_list[0].job_name.find('stage3') > 0:
169-
ag_name = ag_name + "stage3_aggregate"
170-
else:
171-
ag_name = ag_name + "unknown_aggregate"
172-
173-
# Initialize the rest of the aggregate values
174-
earliest_create = job_list[0].created_at_ns
175-
earliest_start = job_list[0].started_at_ns
176-
earliest_complete = job_list[0].completed_at_ns
177-
latest_start = job_list[0].started_at_ns
178-
latest_complete = job_list[0].completed_at_ns
179-
ag_status = job_list[0].status
180-
181-
# Go through rest of jobs for this workflow id, updating stats
182-
for job in job_list[1:]:
183-
# Update the status
184-
ag_status = ag_status and job.status
185-
# Get the earliest & latest times
186-
if job.created_at_ns < earliest_create:
187-
earliest_create = job.created_at_ns
188-
if job.completed_at_ns < earliest_complete:
189-
earliest_complete = job.completed_at_ns
190-
if job.started_at_ns > latest_start:
191-
latest_start = job.started_at_ns
192-
if job.started_at_ns < earliest_start:
193-
earliest_start = job.started_at_ns
194-
if job.completed_at_ns > latest_complete:
195-
latest_complete = job.completed_at_ns
196-
197-
# Compute aggregate run time (in seconds, not ns)
198-
ag_run_time = (latest_complete - earliest_start) / 1000000000
199-
# Compute aggregate queue time (in seconds, not ns)
200-
ag_queue_time = (latest_start - earliest_create) / 1000000000
201-
# Append the aggregate metrics to the workflow metrics list.
202-
workflow_metrics.append(
203-
AggregateMetric(
204-
ag_name, ag_queue_time, ag_run_time, ag_status,
205-
latest_complete, ag_workflow_id
206-
)
207-
)
208-
return
200+
if len(stage1_jobs) > 0:
201+
aggregate = _construct_aggregate(
202+
"github_libcxx_premerge_checks_stage1_aggregate", stage1_jobs)
203+
workflow_metrics.append(aggregate)
204+
if len(stage2_jobs) > 0:
205+
aggregate = _construct_aggregate(
206+
"github_libcxx_premerge_checks_stage2_aggregate", stage2_jobs)
207+
workflow_metrics.append(aggregate)
208+
if len(stage3_jobs) > 0:
209+
aggregate = _construct_aggregate(
210+
"github_libcxx_premerge_checks_stage3_aggregate", stage3_jobs)
211+
workflow_metrics.append(aggregate)
209212

210213
def clean_up_libcxx_job_name(old_name: str) -> str:
211214
"""Convert libcxx job names to generically legal strings.

0 commit comments

Comments
 (0)