Skip to content

Commit

Permalink
WIP: First working implementation of a customizable REPORT_TITLE
Browse files Browse the repository at this point in the history
  • Loading branch information
spier committed Oct 29, 2023
1 parent 9291773 commit 1693462
Show file tree
Hide file tree
Showing 4 changed files with 138 additions and 5 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@ Below are the allowed configuration options:
| `GH_TOKEN` | True | | The GitHub Token used to scan the repository. Must have read access to all repository you are interested in scanning. |
| `SEARCH_QUERY` | True | | The query by which you can filter issues/prs which must contain a `repo:`, `org:`, `owner:`, or a `user:` entry. For discussions, include `type:discussions` in the query. |
| `LABELS_TO_MEASURE` | False | | A comma separated list of labels to measure how much time the label is applied. If not provided, no labels durations will be measured. Not compatible with discussions at this time. |
| `REPORT_TITLE` | False | "Issue Metrics" | A custom title that will be printed at the very top of the report. _Note:_ Most useful when combining multiple reports into a single GitHub issue. |
| `HIDE_AUTHOR` | False | | If set to any value, the author will not be displayed in the generated markdown file. |
| `HIDE_TIME_TO_FIRST_RESPONSE` | False | | If set to any value, the time to first response will not be displayed in the generated markdown file. |
| `HIDE_TIME_TO_CLOSE` | False | | If set to any value, the time to close will not be displayed in the generated markdown file. |
Expand Down
9 changes: 8 additions & 1 deletion issue_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ def get_env_vars() -> tuple[str, str, List[str]]:
str: the search query used to filter issues, prs, and discussions
str: the github token used to authenticate to github.com
List[str]: a list of users to ignore when calculating metrics
str: the title to use at the very top of the markdown output
"""
search_query = os.getenv("SEARCH_QUERY")
if not search_query:
Expand All @@ -67,7 +68,11 @@ def get_env_vars() -> tuple[str, str, List[str]]:
else:
ignore_users = []

return search_query, token, ignore_users
report_title = os.getenv("REPORT_TITLE")
if not report_title:
report_title = "Issue Metrics"

return search_query, token, ignore_users, report_title


def search_issues(
Expand Down Expand Up @@ -272,6 +277,7 @@ def main():
search_query = env_vars[0]
token = env_vars[1]
ignore_users = env_vars[2]
report_title = env_vars[3]

# Get the repository owner and name from the search query
owner = get_owner(search_query)
Expand Down Expand Up @@ -351,6 +357,7 @@ def main():
num_issues_closed,
labels,
search_query,
report_title,
)


Expand Down
8 changes: 5 additions & 3 deletions markdown_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ def write_to_markdown(
num_issues_closed: Union[int, None],
labels=None,
search_query=None,
report_title=None,
hide_label_metrics=False,
) -> None:
"""Write the issues with metrics to a markdown file.
Expand All @@ -91,19 +92,20 @@ def write_to_markdown(
average_time_in_labels (dict): A dictionary containing the average time spent in each label.
file (file object, optional): The file object to write to. If not provided,
a file named "issue_metrics.md" will be created.
num_issues_opened (int): The Number of items that remain opened.
num_issues_opened (int): The number of items that remain opened.
num_issues_closed (int): The number of issues that were closed.
labels (List[str]): A list of the labels that are used in the issues.
search_query (str): The search query used to find the issues.
hide_label_metrics (bool): Represents whether the user has chosen to hide label metrics in the output
report_title (str): The title to use at the top of the markdown output.
hide_label_metrics (bool): Represents whether the user has chosen to hide label metrics in the output.
Returns:
None.
"""
columns = get_non_hidden_columns(labels)
with open("issue_metrics.md", "w", encoding="utf-8") as file:
file.write("# Issue Metrics\n\n")
file.write(f"# {report_title}\n\n")

# If all the metrics are None, then there are no issues
if not issues_with_metrics or len(issues_with_metrics) == 0:
Expand Down
125 changes: 124 additions & 1 deletion test_markdown_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ def test_write_to_markdown(self):
num_issues_closed=num_issues_closed,
labels=["bug"],
search_query="is:issue is:open label:bug",
report_title="Issue Metrics",
)

# Check that the function writes the correct markdown file
Expand Down Expand Up @@ -181,6 +182,7 @@ def test_write_to_markdown_with_vertical_bar_in_title(self):
num_issues_opened=num_issues_opened,
num_issues_closed=num_issues_closed,
labels=["bug"],
report_title="Issue Metrics",
)

# Check that the function writes the correct markdown file
Expand Down Expand Up @@ -216,7 +218,17 @@ def test_write_to_markdown_no_issues(self):
"""Test that write_to_markdown writes the correct markdown file when no issues are found."""
# Call the function with no issues
with patch("builtins.open", mock_open()) as mock_open_file:
write_to_markdown(None, None, None, None, None, None, None)
# write_to_markdown(None, None, None, None, None, None, None)
write_to_markdown(
issues_with_metrics=None,
average_time_to_first_response=None,
average_time_to_close=None,
average_time_to_answer=None,
average_time_in_labels=None,
num_issues_opened=None,
num_issues_closed=None,
report_title="Issue Metrics",
)

# Check that the file was written correctly
expected_output = [
Expand Down Expand Up @@ -303,6 +315,7 @@ def test_writes_markdown_file_with_non_hidden_columns_only(self):
labels=["label1"],
search_query="repo:user/repo is:issue",
hide_label_metrics=True,
report_title="Issue Metrics",
)

# Check that the function writes the correct markdown file
Expand All @@ -324,3 +337,113 @@ def test_writes_markdown_file_with_non_hidden_columns_only(self):
)
self.assertEqual(content, expected_content)
os.remove("issue_metrics.md")


class TestWriteToMarkdownWithOutputCustomization(unittest.TestCase):
"""Test the write_to_markdown function with the REPORT_TITLE environment variables set."""

def setUp(self):
os.environ["REPORT_TITLE"] = "My Custom Report Title"
# os.environ["HIDE_TIME_TO_CLOSE"] = "True"
# os.environ["HIDE_TIME_TO_ANSWER"] = "True"
# os.environ["HIDE_LABEL_METRICS"] = "True"

def tearDown(self):
# Unset the environment variables
os.environ.pop("REPORT_TITLE")
# os.environ.pop("HIDE_TIME_TO_CLOSE")
# os.environ.pop("HIDE_TIME_TO_ANSWER")
# os.environ.pop("HIDE_LABEL_METRICS")

def test_writes_markdown_file_with_output_customization(self):
"""
Test that write_to_markdown writes the correct
markdown file.
"""

# Create mock data
issues_with_metrics = [
IssueWithMetrics(
"Issue 1",
"https://github.com/user/repo/issues/1",
"alice",
timedelta(days=1),
timedelta(days=2),
timedelta(days=3),
{"bug": timedelta(days=1)},
),
IssueWithMetrics(
"feat| Issue 2", # title contains a vertical bar
"https://github.com/user/repo/issues/2",
"bob",
timedelta(days=3),
timedelta(days=4),
timedelta(days=5),
{"bug": timedelta(days=2)},
),
]
average_time_to_first_response = {
"avg": timedelta(days=2),
"med": timedelta(days=2),
"90p": timedelta(days=2),
}
average_time_to_close = {
"avg": timedelta(days=3),
"med": timedelta(days=3),
"90p": timedelta(days=3),
}
average_time_to_answer = {
"avg": timedelta(days=4),
"med": timedelta(days=4),
"90p": timedelta(days=4),
}
average_time_in_labels = {
"avg": {"bug": "1 day, 12:00:00"},
"med": {"bug": "1 day, 12:00:00"},
"90p": {"bug": "1 day, 12:00:00"},
}

num_issues_opened = 2
num_issues_closed = 1

# Call the function
write_to_markdown(
issues_with_metrics=issues_with_metrics,
average_time_to_first_response=average_time_to_first_response,
average_time_to_close=average_time_to_close,
average_time_to_answer=average_time_to_answer,
average_time_in_labels=average_time_in_labels,
num_issues_opened=num_issues_opened,
num_issues_closed=num_issues_closed,
labels=["bug"],
report_title="My Custom Report Title",
)

# Check that the function writes the correct markdown file
with open("issue_metrics.md", "r", encoding="utf-8") as file:
content = file.read()
expected_content = (
"# My Custom Report Title\n\n"
"| Metric | Average | Median | 90th percentile |\n"
"| --- | --- | --- | ---: |\n"
"| Time to first response | 2 days, 0:00:00 | 2 days, 0:00:00 | 2 days, 0:00:00 |\n"
"| Time to close | 3 days, 0:00:00 | 3 days, 0:00:00 | 3 days, 0:00:00 |\n"
"| Time to answer | 4 days, 0:00:00 | 4 days, 0:00:00 | 4 days, 0:00:00 |\n"
"| Time spent in bug | 1 day, 12:00:00 | 1 day, 12:00:00 | 1 day, 12:00:00 |\n"
"\n"
"| Metric | Count |\n"
"| --- | ---: |\n"
"| Number of items that remain open | 2 |\n"
"| Number of items closed | 1 |\n"
"| Total number of items created | 2 |\n\n"
"| Title | URL | Author | Time to first response | Time to close |"
" Time to answer | Time spent in bug |\n"
"| --- | --- | --- | --- | --- | --- | --- |\n"
"| Issue 1 | https://github.com/user/repo/issues/1 | alice | 1 day, 0:00:00 | "
"2 days, 0:00:00 | 3 days, 0:00:00 | 1 day, 0:00:00 |\n"
"| feat| Issue 2 | https://github.com/user/repo/issues/2 | bob | 3 days, 0:00:00 | "
"4 days, 0:00:00 | 5 days, 0:00:00 | 2 days, 0:00:00 |\n\n"
"_This report was generated with the [Issue Metrics Action](https://github.com/github/issue-metrics)_\n"
)
self.assertEqual(content, expected_content)
os.remove("issue_metrics.md")

0 comments on commit 1693462

Please sign in to comment.