Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support more than one statistic for summarizing time lists #127

Merged
merged 5 commits into from
Oct 16, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 18 additions & 18 deletions issue_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,14 @@
from classes import IssueWithMetrics
from discussions import get_discussions
from json_writer import write_to_json
from labels import get_average_time_in_labels, get_label_metrics
from labels import get_stats_time_in_labels, get_label_metrics
from markdown_writer import write_to_markdown
from time_to_answer import get_average_time_to_answer, measure_time_to_answer
from time_to_close import get_average_time_to_close, measure_time_to_close
from time_to_answer import get_stats_time_to_answer, measure_time_to_answer
from time_to_close import get_stats_time_to_close, measure_time_to_close
from time_to_ready_for_review import get_time_to_ready_for_review
from time_to_merge import measure_time_to_merge
from time_to_first_response import (
get_average_time_to_first_response,
get_stats_time_to_first_response,
measure_time_to_first_response,
)

Expand Down Expand Up @@ -317,36 +317,36 @@ def main():
ignore_users=ignore_users,
)

average_time_to_first_response = get_average_time_to_first_response(
stats_time_to_first_response = get_stats_time_to_first_response(
issues_with_metrics
)
average_time_to_close = None
stats_time_to_close = None
if num_issues_closed > 0:
average_time_to_close = get_average_time_to_close(issues_with_metrics)
stats_time_to_close = get_stats_time_to_close(issues_with_metrics)

average_time_to_answer = get_average_time_to_answer(issues_with_metrics)
stats_time_to_answer = get_stats_time_to_answer(issues_with_metrics)

# Get the average time in label for each label and store it in a dictionary
# Get stats describing the time in label for each label and store it in a dictionary
# where the key is the label and the value is the average time
average_time_in_labels = get_average_time_in_labels(issues_with_metrics, labels)
stats_time_in_labels = get_stats_time_in_labels(issues_with_metrics, labels)

# Write the results to json and a markdown file
write_to_json(
issues_with_metrics,
average_time_to_first_response,
average_time_to_close,
average_time_to_answer,
average_time_in_labels,
stats_time_to_first_response,
stats_time_to_close,
stats_time_to_answer,
stats_time_in_labels,
num_issues_open,
num_issues_closed,
search_query,
)
write_to_markdown(
issues_with_metrics,
average_time_to_first_response,
average_time_to_close,
average_time_to_answer,
average_time_in_labels,
stats_time_to_first_response,
stats_time_to_close,
stats_time_to_answer,
stats_time_in_labels,
num_issues_open,
num_issues_closed,
labels,
Expand Down
38 changes: 23 additions & 15 deletions labels.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from typing import List

import github3
import numpy
import pytz

from classes import IssueWithMetrics
Expand Down Expand Up @@ -88,32 +89,39 @@ def get_label_metrics(issue: github3.issues.Issue, labels: List[str]) -> dict:
return label_metrics


def get_average_time_in_labels(
def get_stats_time_in_labels(
issues_with_metrics: List[IssueWithMetrics],
labels: List[str],
) -> dict[str, timedelta]:
"""Calculate the average time spent in each label."""
average_time_in_labels = {}
number_of_issues_in_labels = {}
"""Calculate stats describing time spent in each label."""
time_in_labels = {}
for issue in issues_with_metrics:
if issue.label_metrics:
for label in issue.label_metrics:
if issue.label_metrics[label] is None:
continue
if label not in average_time_in_labels:
average_time_in_labels[label] = issue.label_metrics[label]
number_of_issues_in_labels[label] = 1
if label not in time_in_labels:
time_in_labels[label] = [issue.label_metrics[label]]
else:
average_time_in_labels[label] += issue.label_metrics[label]
number_of_issues_in_labels[label] += 1
time_in_labels[label].append(issue.label_metrics[label])

for label in average_time_in_labels:
average_time_in_labels[label] = (
average_time_in_labels[label] / number_of_issues_in_labels[label]
)
average_time_in_labels = {}
med_time_in_labels = {}
ninety_percentile_in_labels = {}
for label, time_list in time_in_labels.items():
average_time_in_labels[label] = numpy.average(time_list)
med_time_in_labels[label] = numpy.median(time_list)
ninety_percentile_in_labels[label] = numpy.percentile(time_list, 90, axis=0)

for label in labels:
if label not in average_time_in_labels:
average_time_in_labels[label] = None

return average_time_in_labels
med_time_in_labels[label] = None
ninety_percentile_in_labels[label] = None

stats = {
'avg': average_time_in_labels,
'med': med_time_in_labels,
'90p': ninety_percentile_in_labels
}
return stats
53 changes: 39 additions & 14 deletions markdown_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,32 +170,57 @@ def write_to_markdown(

def write_overall_metrics_table(
issues_with_metrics,
average_time_to_first_response,
average_time_to_close,
average_time_to_answer,
average_time_in_labels,
stats_time_to_first_response,
stats_time_to_close,
stats_time_to_answer,
stats_time_in_labels,
num_issues_opened,
num_issues_closed,
labels,
columns,
file,
):
"""Write the overall metrics table to the markdown file."""
file.write("| Metric | Value |\n")
file.write("| --- | ---: |\n")
file.write("| Metric | Average | Median | 90th percentile |\n")
file.write("| --- | --- | --- | ---: |\n")
if "Time to first response" in columns:
file.write(
f"| Average time to first response | {average_time_to_first_response} |\n"
)
if stats_time_to_first_response is not None:
file.write(
f"| Time to first response "
f"| {stats_time_to_first_response['avg']} "
f"| {stats_time_to_first_response['med']} "
f"| {stats_time_to_first_response['90p']} |\n"
)
else:
file.write("| Time to first response | None | None | None |\n")
if "Time to close" in columns:
file.write(f"| Average time to close | {average_time_to_close} |\n")
if stats_time_to_close is not None:
file.write(
f"| Time to close "
f"| {stats_time_to_close['avg']} "
f"| {stats_time_to_close['med']} "
f"| {stats_time_to_close['90p']} |\n"
)
else:
file.write("| Time to close | None | None | None |\n")
if "Time to answer" in columns:
file.write(f"| Average time to answer | {average_time_to_answer} |\n")
if labels and average_time_in_labels:
if stats_time_to_answer is not None:
file.write(
f"| Time to answer "
f"| {stats_time_to_answer['avg']} "
f"| {stats_time_to_answer['med']} "
f"| {stats_time_to_answer['90p']} |\n"
)
else:
file.write("| Time to answer | None | None | None |\n")
if labels and stats_time_in_labels:
for label in labels:
if f"Time spent in {label}" in columns and label in average_time_in_labels:
if f"Time spent in {label}" in columns and label in stats_time_in_labels['avg']:
file.write(
f"| Average time spent in {label} | {average_time_in_labels[label]} |\n"
f"| Time spent in {label} "
f"| {stats_time_in_labels['avg'][label]} "
f"| {stats_time_in_labels['med'][label]} "
f"| {stats_time_in_labels['90p'][label]} |\n"
)
file.write(f"| Number of items that remain open | {num_issues_opened} |\n")
file.write(f"| Number of items closed | {num_issues_closed} |\n")
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@ pytest==7.4.2
pytest-cov==4.1.0
flake8==6.1.0
pylint==3.0.1
numpy==1.26.0
12 changes: 6 additions & 6 deletions test_issue_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ class TestMain(unittest.TestCase):
@patch("issue_metrics.auth_to_github")
@patch("issue_metrics.search_issues")
@patch("issue_metrics.measure_time_to_first_response")
@patch("issue_metrics.get_average_time_to_first_response")
@patch("issue_metrics.get_stats_time_to_first_response")
@patch.dict(
os.environ,
{
Expand All @@ -142,7 +142,7 @@ class TestMain(unittest.TestCase):
)
def test_main(
self,
mock_get_average_time_to_first_response,
mock_get_stats_time_to_first_response,
mock_measure_time_to_first_response,
mock_search_issues,
mock_auth_to_github,
Expand Down Expand Up @@ -179,10 +179,10 @@ def test_main(
]
mock_measure_time_to_first_response.return_value = mock_issues_with_ttfr

# Set up the mock get_average_time_to_first_response function
mock_average_time_to_first_response = 15
mock_get_average_time_to_first_response.return_value = (
mock_average_time_to_first_response
# Set up the mock get_stats_time_to_first_response function
mock_stats_time_to_first_response = 15
mock_get_stats_time_to_first_response.return_value = (
mock_stats_time_to_first_response
)

# Call main and check that it runs without errors
Expand Down
17 changes: 9 additions & 8 deletions test_labels.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import pytz
from classes import IssueWithMetrics

from labels import get_average_time_in_labels, get_label_events, get_label_metrics
from labels import get_stats_time_in_labels, get_label_events, get_label_metrics


class TestLabels(unittest.TestCase):
Expand Down Expand Up @@ -69,7 +69,7 @@ def test_get_label_metrics_open_issue(self):


class TestGetAverageTimeInLabels(unittest.TestCase):
"""Unit tests for get_average_time_in_labels"""
"""Unit tests for get_stats_time_in_labels"""

def setUp(self):
self.issues_with_metrics = MagicMock()
Expand All @@ -79,13 +79,14 @@ def setUp(self):
),
]

def test_get_average_time_in_labels(self):
"""Test get_average_time_in_labels"""
def test_get_stats_time_in_labels(self):
"""Test get_stats_time_in_labels"""
labels = ["bug", "feature"]
metrics = get_average_time_in_labels(self.issues_with_metrics, labels)
self.assertEqual(len(metrics), 2)
self.assertEqual(metrics["bug"], timedelta(days=2))
self.assertIsNone(metrics.get("feature"))
metrics = get_stats_time_in_labels(self.issues_with_metrics, labels)
print(metrics)
self.assertEqual(len(metrics['avg']), 2)
self.assertEqual(metrics['avg']["bug"], timedelta(days=2))
self.assertIsNone(metrics['avg'].get("feature"))


if __name__ == "__main__":
Expand Down
Loading