Skip to content

Commit

Permalink
feat: add time in draft tracking to main and markdown_writer function…
Browse files Browse the repository at this point in the history
…s and tests
  • Loading branch information
zkoppert committed Oct 23, 2024
1 parent 4b6b4bc commit c035f14
Show file tree
Hide file tree
Showing 3 changed files with 77 additions and 21 deletions.
3 changes: 3 additions & 0 deletions issue_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,6 +244,7 @@ def main(): # pragma: no cover
average_time_to_first_response=None,
average_time_to_close=None,
average_time_to_answer=None,
average_time_in_draft=None,
average_time_in_labels=None,
num_issues_opened=None,
num_issues_closed=None,
Expand All @@ -268,6 +269,7 @@ def main(): # pragma: no cover
average_time_to_first_response=None,
average_time_to_close=None,
average_time_to_answer=None,
average_time_in_draft=None,
average_time_in_labels=None,
num_issues_opened=None,
num_issues_closed=None,
Expand Down Expand Up @@ -329,6 +331,7 @@ def main(): # pragma: no cover
average_time_to_first_response=stats_time_to_first_response,
average_time_to_close=stats_time_to_close,
average_time_to_answer=stats_time_to_answer,
average_time_in_draft=stats_time_in_draft,
average_time_in_labels=stats_time_in_labels,
num_issues_opened=num_issues_open,
num_issues_closed=num_issues_closed,
Expand Down
50 changes: 39 additions & 11 deletions markdown_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,10 @@ def get_non_hidden_columns(labels) -> List[str]:
if not hide_time_to_answer:
columns.append("Time to answer")

enable_time_in_draft = env_vars.draft_pr_tracking
if enable_time_in_draft:
columns.append("Time in draft")

hide_label_metrics = env_vars.hide_label_metrics
if not hide_label_metrics and labels:
for label in labels:
Expand All @@ -84,6 +88,7 @@ def write_to_markdown(
average_time_to_first_response: Union[dict[str, timedelta], None],
average_time_to_close: Union[dict[str, timedelta], None],
average_time_to_answer: Union[dict[str, timedelta], None],
average_time_in_draft: Union[dict[str, timedelta], None],
average_time_in_labels: Union[dict, None],
num_issues_opened: Union[int, None],
num_issues_closed: Union[int, None],
Expand All @@ -104,6 +109,7 @@ def write_to_markdown(
response for the issues.
average_time_to_close (datetime.timedelta): The average time to close for the issues.
average_time_to_answer (datetime.timedelta): The average time to answer the discussions.
average_time_in_draft (datetime.timedelta): The average time spent in draft for the issues.
average_time_in_labels (dict): A dictionary containing the average time spent in each label.
file (file object, optional): The file object to write to. If not provided,
a file named "issue_metrics.md" will be created.
Expand All @@ -112,9 +118,12 @@ def write_to_markdown(
num_mentor_count (int): The number of very active commentors.
labels (List[str]): A list of the labels that are used in the issues.
search_query (str): The search query used to find the issues.
hide_label_metrics (bool): Represents whether the user has chosen to hide label metrics in the output
hide_items_closed_count (bool): Represents whether the user has chosen to hide the number of items closed
non_mentioning_links (bool): Represents whether links do not cause a notification in the desitnation repository
hide_label_metrics (bool): Represents whether the user has chosen to hide label
metrics in the output
hide_items_closed_count (bool): Represents whether the user has chosen to hide
the number of items closed
non_mentioning_links (bool): Represents whether links do not cause a notification
in the destination repository
report_title (str): The title of the report
output_file (str): The name of the file to write the report to
Expand All @@ -131,7 +140,8 @@ def write_to_markdown(
if not issues_with_metrics or len(issues_with_metrics) == 0:
file.write("no issues found for the given search criteria\n\n")
file.write(
"\n_This report was generated with the [Issue Metrics Action](https://github.com/github/issue-metrics)_\n"
"\n_This report was generated with the \
[Issue Metrics Action](https://github.com/github/issue-metrics)_\n"
)
if search_query:
file.write(f"Search query used to find these items: `{search_query}`\n")
Expand All @@ -143,6 +153,7 @@ def write_to_markdown(
average_time_to_first_response,
average_time_to_close,
average_time_to_answer,
average_time_in_draft,
average_time_in_labels,
num_issues_opened,
num_issues_closed,
Expand Down Expand Up @@ -189,13 +200,16 @@ def write_to_markdown(
file.write(f" {issue.time_to_close} |")
if "Time to answer" in columns:
file.write(f" {issue.time_to_answer} |")
if "Time in draft" in columns:
file.write(f" {issue.time_in_draft} |")
if labels and issue.label_metrics:
for label in labels:
if f"Time spent in {label}" in columns:
file.write(f" {issue.label_metrics[label]} |")
file.write("\n")
file.write(
"\n_This report was generated with the [Issue Metrics Action](https://github.com/github/issue-metrics)_\n"
"\n_This report was generated with the \
[Issue Metrics Action](https://github.com/github/issue-metrics)_\n"
)
if search_query:
file.write(f"Search query used to find these items: `{search_query}`\n")
Expand All @@ -208,6 +222,7 @@ def write_overall_metrics_tables(
stats_time_to_first_response,
stats_time_to_close,
stats_time_to_answer,
average_time_in_draft,
stats_time_in_labels,
num_issues_opened,
num_issues_closed,
Expand All @@ -219,12 +234,15 @@ def write_overall_metrics_tables(
hide_items_closed_count=False,
):
"""Write the overall metrics tables to the markdown file."""
if (
"Time to first response" in columns
or "Time to close" in columns
or "Time to answer" in columns
or (hide_label_metrics is False and len(labels) > 0)
):
if any(
column in columns
for column in [
"Time to first response",
"Time to close",
"Time to answer",
"Time in draft",
]
) or (hide_label_metrics is False and len(labels) > 0):
file.write("| Metric | Average | Median | 90th percentile |\n")
file.write("| --- | --- | --- | ---: |\n")
if "Time to first response" in columns:
Expand Down Expand Up @@ -257,6 +275,16 @@ def write_overall_metrics_tables(
)
else:
file.write("| Time to answer | None | None | None |\n")
if "Time in draft" in columns:
if average_time_in_draft is not None:
file.write(
f"| Time in draft "
f"| {average_time_in_draft['avg']} "
f"| {average_time_in_draft['med']} "
f"| {average_time_in_draft['90p']} |\n"
)
else:
file.write("| Time in draft | None | None | None |\n")
if labels and stats_time_in_labels:
for label in labels:
if (
Expand Down
45 changes: 35 additions & 10 deletions test_markdown_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,11 @@

@patch.dict(
os.environ,
{"SEARCH_QUERY": "is:open repo:user/repo", "GH_TOKEN": "test_token"},
{
"SEARCH_QUERY": "is:open repo:user/repo",
"GH_TOKEN": "test_token",
"DRAFT_PR_TRACKING": "True",
},
)
class TestWriteToMarkdown(unittest.TestCase):
"""Test the write_to_markdown function."""
Expand All @@ -43,7 +47,8 @@ def test_write_to_markdown(self):
time_to_first_response=timedelta(days=1),
time_to_close=timedelta(days=2),
time_to_answer=timedelta(days=3),
labels_metrics={"bug": timedelta(days=1)},
time_in_draft=timedelta(days=1),
labels_metrics={"bug": timedelta(days=4)},
),
IssueWithMetrics(
title="Issue 2\r",
Expand All @@ -52,6 +57,7 @@ def test_write_to_markdown(self):
time_to_first_response=timedelta(days=3),
time_to_close=timedelta(days=4),
time_to_answer=timedelta(days=5),
time_in_draft=timedelta(days=1),
labels_metrics={"bug": timedelta(days=2)},
),
]
Expand All @@ -70,6 +76,11 @@ def test_write_to_markdown(self):
"med": timedelta(days=4),
"90p": timedelta(days=4),
}
time_in_draft = {
"avg": timedelta(days=1),
"med": timedelta(days=1),
"90p": timedelta(days=1),
}
time_in_labels = {
"avg": {"bug": "1 day, 12:00:00"},
"med": {"bug": "1 day, 12:00:00"},
Expand All @@ -86,6 +97,7 @@ def test_write_to_markdown(self):
average_time_to_first_response=time_to_first_response,
average_time_to_close=time_to_close,
average_time_to_answer=time_to_answer,
average_time_in_draft=time_in_draft,
average_time_in_labels=time_in_labels,
num_issues_opened=num_issues_opened,
num_issues_closed=num_issues_closed,
Expand All @@ -106,6 +118,7 @@ def test_write_to_markdown(self):
"| Time to first response | 2 days, 0:00:00 | 2 days, 0:00:00 | 2 days, 0:00:00 |\n"
"| Time to close | 3 days, 0:00:00 | 3 days, 0:00:00 | 3 days, 0:00:00 |\n"
"| Time to answer | 4 days, 0:00:00 | 4 days, 0:00:00 | 4 days, 0:00:00 |\n"
"| Time in draft | 1 day, 0:00:00 | 1 day, 0:00:00 | 1 day, 0:00:00 |\n"
"| Time spent in bug | 1 day, 12:00:00 | 1 day, 12:00:00 | 1 day, 12:00:00 |\n"
"\n"
"| Metric | Count |\n"
Expand All @@ -115,12 +128,12 @@ def test_write_to_markdown(self):
"| Number of most active mentors | 5 |\n"
"| Total number of items created | 2 |\n\n"
"| Title | URL | Author | Time to first response | Time to close |"
" Time to answer | Time spent in bug |\n"
"| --- | --- | --- | --- | --- | --- | --- |\n"
" Time to answer | Time in draft | Time spent in bug |\n"
"| --- | --- | --- | --- | --- | --- | --- | --- |\n"
"| Issue 1 | https://github.com/user/repo/issues/1 | [alice](https://github.com/alice) | 1 day, 0:00:00 | "
"2 days, 0:00:00 | 3 days, 0:00:00 | 1 day, 0:00:00 |\n"
"2 days, 0:00:00 | 3 days, 0:00:00 | 1 day, 0:00:00 | 4 days, 0:00:00 |\n"
"| Issue 2 | https://github.com/user/repo/issues/2 | [bob](https://github.com/bob) | 3 days, 0:00:00 | "
"4 days, 0:00:00 | 5 days, 0:00:00 | 2 days, 0:00:00 |\n\n"
"4 days, 0:00:00 | 5 days, 0:00:00 | 1 day, 0:00:00 | 2 days, 0:00:00 |\n\n"
"_This report was generated with the [Issue Metrics Action](https://github.com/github/issue-metrics)_\n"
"Search query used to find these items: `is:issue is:open label:bug`\n"
)
Expand All @@ -145,6 +158,7 @@ def test_write_to_markdown_with_vertical_bar_in_title(self):
time_to_first_response=timedelta(days=1),
time_to_close=timedelta(days=2),
time_to_answer=timedelta(days=3),
time_in_draft=timedelta(days=1),
labels_metrics={"bug": timedelta(days=1)},
),
IssueWithMetrics(
Expand Down Expand Up @@ -172,6 +186,11 @@ def test_write_to_markdown_with_vertical_bar_in_title(self):
"med": timedelta(days=4),
"90p": timedelta(days=4),
}
average_time_in_draft = {
"avg": timedelta(days=1),
"med": timedelta(days=1),
"90p": timedelta(days=1),
}
average_time_in_labels = {
"avg": {"bug": "1 day, 12:00:00"},
"med": {"bug": "1 day, 12:00:00"},
Expand All @@ -188,6 +207,7 @@ def test_write_to_markdown_with_vertical_bar_in_title(self):
average_time_to_first_response=average_time_to_first_response,
average_time_to_close=average_time_to_close,
average_time_to_answer=average_time_to_answer,
average_time_in_draft=average_time_in_draft,
average_time_in_labels=average_time_in_labels,
num_issues_opened=num_issues_opened,
num_issues_closed=num_issues_closed,
Expand All @@ -207,6 +227,7 @@ def test_write_to_markdown_with_vertical_bar_in_title(self):
"| Time to first response | 2 days, 0:00:00 | 2 days, 0:00:00 | 2 days, 0:00:00 |\n"
"| Time to close | 3 days, 0:00:00 | 3 days, 0:00:00 | 3 days, 0:00:00 |\n"
"| Time to answer | 4 days, 0:00:00 | 4 days, 0:00:00 | 4 days, 0:00:00 |\n"
"| Time in draft | 1 day, 0:00:00 | 1 day, 0:00:00 | 1 day, 0:00:00 |\n"
"| Time spent in bug | 1 day, 12:00:00 | 1 day, 12:00:00 | 1 day, 12:00:00 |\n"
"\n"
"| Metric | Count |\n"
Expand All @@ -216,12 +237,12 @@ def test_write_to_markdown_with_vertical_bar_in_title(self):
"| Number of most active mentors | 5 |\n"
"| Total number of items created | 2 |\n\n"
"| Title | URL | Author | Time to first response | Time to close |"
" Time to answer | Time spent in bug |\n"
"| --- | --- | --- | --- | --- | --- | --- |\n"
" Time to answer | Time in draft | Time spent in bug |\n"
"| --- | --- | --- | --- | --- | --- | --- | --- |\n"
"| Issue 1 | https://github.com/user/repo/issues/1 | [alice](https://github.com/alice) | 1 day, 0:00:00 | "
"2 days, 0:00:00 | 3 days, 0:00:00 | 1 day, 0:00:00 |\n"
"2 days, 0:00:00 | 3 days, 0:00:00 | 1 day, 0:00:00 | 1 day, 0:00:00 |\n"
"| feat| Issue 2 | https://github.com/user/repo/issues/2 | [bob](https://github.com/bob) | 3 days, 0:00:00 | "
"4 days, 0:00:00 | 5 days, 0:00:00 | 2 days, 0:00:00 |\n\n"
"4 days, 0:00:00 | 5 days, 0:00:00 | None | 2 days, 0:00:00 |\n\n"
"_This report was generated with the [Issue Metrics Action](https://github.com/github/issue-metrics)_\n"
)
self.assertEqual(content, expected_content)
Expand All @@ -240,6 +261,7 @@ def test_write_to_markdown_no_issues(self):
None,
None,
None,
None,
report_title="Issue Metrics",
)

Expand Down Expand Up @@ -289,6 +311,7 @@ def test_writes_markdown_file_with_non_hidden_columns_only(self):
time_to_first_response=timedelta(minutes=10),
time_to_close=timedelta(days=1),
time_to_answer=timedelta(hours=2),
time_in_draft=timedelta(days=1),
labels_metrics={
"label1": timedelta(days=1),
},
Expand All @@ -308,6 +331,7 @@ def test_writes_markdown_file_with_non_hidden_columns_only(self):
average_time_to_first_response = timedelta(minutes=15)
average_time_to_close = timedelta(days=1.5)
average_time_to_answer = timedelta(hours=3)
average_time_in_draft = timedelta(days=1)
average_time_in_labels = {
"label1": timedelta(days=1),
}
Expand All @@ -322,6 +346,7 @@ def test_writes_markdown_file_with_non_hidden_columns_only(self):
average_time_to_close=average_time_to_close,
average_time_to_answer=average_time_to_answer,
average_time_in_labels=average_time_in_labels,
average_time_in_draft=average_time_in_draft,
num_issues_opened=num_issues_opened,
num_issues_closed=num_issues_closed,
num_mentor_count=num_mentor_count,
Expand Down

0 comments on commit c035f14

Please sign in to comment.