Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Output customizations: REPORT_TITLE and OUTPUT_FILE #160

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@ Below are the allowed configuration options:
| `GH_TOKEN` | True | | The GitHub Token used to scan the repository. Must have read access to all repository you are interested in scanning. |
| `SEARCH_QUERY` | True | | The query by which you can filter issues/prs which must contain a `repo:`, `org:`, `owner:`, or a `user:` entry. For discussions, include `type:discussions` in the query. |
| `LABELS_TO_MEASURE` | False | | A comma separated list of labels to measure how much time the label is applied. If not provided, no labels durations will be measured. Not compatible with discussions at this time. |
| `REPORT_TITLE` | False | "Issue Metrics" | A custom title that will be printed at the very top of the report. _Note:_ Most useful when combining multiple reports into a single GitHub issue. |
| `HIDE_AUTHOR` | False | | If set to any value, the author will not be displayed in the generated markdown file. |
| `HIDE_TIME_TO_FIRST_RESPONSE` | False | | If set to any value, the time to first response will not be displayed in the generated markdown file. |
| `HIDE_TIME_TO_CLOSE` | False | | If set to any value, the time to close will not be displayed in the generated markdown file. |
Expand Down
9 changes: 8 additions & 1 deletion issue_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ def get_env_vars() -> tuple[str, str, List[str]]:
str: the search query used to filter issues, prs, and discussions
str: the github token used to authenticate to github.com
List[str]: a list of users to ignore when calculating metrics
str: the title to use at the very top of the markdown output
"""
search_query = os.getenv("SEARCH_QUERY")
if not search_query:
Expand All @@ -67,7 +68,11 @@ def get_env_vars() -> tuple[str, str, List[str]]:
else:
ignore_users = []

return search_query, token, ignore_users
report_title = os.getenv("REPORT_TITLE")
if not report_title:
report_title = "Issue Metrics"
spier marked this conversation as resolved.
Show resolved Hide resolved
spier marked this conversation as resolved.
Show resolved Hide resolved

return search_query, token, ignore_users, report_title


def search_issues(
Expand Down Expand Up @@ -272,6 +277,7 @@ def main():
search_query = env_vars[0]
token = env_vars[1]
ignore_users = env_vars[2]
report_title = env_vars[3]

# Get the repository owner and name from the search query
owner = get_owner(search_query)
Expand Down Expand Up @@ -351,6 +357,7 @@ def main():
num_issues_closed,
labels,
search_query,
report_title,
)


Expand Down
8 changes: 5 additions & 3 deletions markdown_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ def write_to_markdown(
num_issues_closed: Union[int, None],
labels=None,
search_query=None,
report_title=None,
spier marked this conversation as resolved.
Show resolved Hide resolved
hide_label_metrics=False,
) -> None:
"""Write the issues with metrics to a markdown file.
Expand All @@ -91,19 +92,20 @@ def write_to_markdown(
average_time_in_labels (dict): A dictionary containing the average time spent in each label.
file (file object, optional): The file object to write to. If not provided,
a file named "issue_metrics.md" will be created.
num_issues_opened (int): The Number of items that remain opened.
num_issues_opened (int): The number of items that remain opened.
num_issues_closed (int): The number of issues that were closed.
labels (List[str]): A list of the labels that are used in the issues.
search_query (str): The search query used to find the issues.
hide_label_metrics (bool): Represents whether the user has chosen to hide label metrics in the output
report_title (str): The title to use at the top of the markdown output.
hide_label_metrics (bool): Represents whether the user has chosen to hide label metrics in the output.

Returns:
None.

"""
columns = get_non_hidden_columns(labels)
with open("issue_metrics.md", "w", encoding="utf-8") as file:
file.write("# Issue Metrics\n\n")
file.write(f"# {report_title}\n\n")

# If all the metrics are None, then there are no issues
if not issues_with_metrics or len(issues_with_metrics) == 0:
Expand Down
125 changes: 124 additions & 1 deletion test_markdown_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ def test_write_to_markdown(self):
num_issues_closed=num_issues_closed,
labels=["bug"],
search_query="is:issue is:open label:bug",
report_title="Issue Metrics",
spier marked this conversation as resolved.
Show resolved Hide resolved
)

# Check that the function writes the correct markdown file
Expand Down Expand Up @@ -181,6 +182,7 @@ def test_write_to_markdown_with_vertical_bar_in_title(self):
num_issues_opened=num_issues_opened,
num_issues_closed=num_issues_closed,
labels=["bug"],
report_title="Issue Metrics",
spier marked this conversation as resolved.
Show resolved Hide resolved
)

# Check that the function writes the correct markdown file
Expand Down Expand Up @@ -216,7 +218,17 @@ def test_write_to_markdown_no_issues(self):
"""Test that write_to_markdown writes the correct markdown file when no issues are found."""
# Call the function with no issues
with patch("builtins.open", mock_open()) as mock_open_file:
write_to_markdown(None, None, None, None, None, None, None)
# write_to_markdown(None, None, None, None, None, None, None)
write_to_markdown(
issues_with_metrics=None,
average_time_to_first_response=None,
average_time_to_close=None,
average_time_to_answer=None,
average_time_in_labels=None,
num_issues_opened=None,
num_issues_closed=None,
report_title="Issue Metrics",
spier marked this conversation as resolved.
Show resolved Hide resolved
spier marked this conversation as resolved.
Show resolved Hide resolved
)

# Check that the file was written correctly
expected_output = [
Expand Down Expand Up @@ -303,6 +315,7 @@ def test_writes_markdown_file_with_non_hidden_columns_only(self):
labels=["label1"],
search_query="repo:user/repo is:issue",
hide_label_metrics=True,
report_title="Issue Metrics",
spier marked this conversation as resolved.
Show resolved Hide resolved
)

# Check that the function writes the correct markdown file
Expand All @@ -324,3 +337,113 @@ def test_writes_markdown_file_with_non_hidden_columns_only(self):
)
self.assertEqual(content, expected_content)
os.remove("issue_metrics.md")


class TestWriteToMarkdownWithOutputCustomization(unittest.TestCase):
"""Test the write_to_markdown function with the REPORT_TITLE environment variables set."""

def setUp(self):
os.environ["REPORT_TITLE"] = "My Custom Report Title"
# os.environ["HIDE_TIME_TO_CLOSE"] = "True"
# os.environ["HIDE_TIME_TO_ANSWER"] = "True"
# os.environ["HIDE_LABEL_METRICS"] = "True"
spier marked this conversation as resolved.
Show resolved Hide resolved

def tearDown(self):
# Unset the environment variables
os.environ.pop("REPORT_TITLE")
# os.environ.pop("HIDE_TIME_TO_CLOSE")
# os.environ.pop("HIDE_TIME_TO_ANSWER")
# os.environ.pop("HIDE_LABEL_METRICS")
spier marked this conversation as resolved.
Show resolved Hide resolved

def test_writes_markdown_file_with_output_customization(self):
"""
Test that write_to_markdown writes the correct
markdown file.
"""

# Create mock data
issues_with_metrics = [
IssueWithMetrics(
"Issue 1",
"https://github.com/user/repo/issues/1",
"alice",
timedelta(days=1),
timedelta(days=2),
timedelta(days=3),
{"bug": timedelta(days=1)},
),
IssueWithMetrics(
"feat| Issue 2", # title contains a vertical bar
"https://github.com/user/repo/issues/2",
"bob",
timedelta(days=3),
timedelta(days=4),
timedelta(days=5),
{"bug": timedelta(days=2)},
),
]
average_time_to_first_response = {
"avg": timedelta(days=2),
"med": timedelta(days=2),
"90p": timedelta(days=2),
}
average_time_to_close = {
"avg": timedelta(days=3),
"med": timedelta(days=3),
"90p": timedelta(days=3),
}
average_time_to_answer = {
"avg": timedelta(days=4),
"med": timedelta(days=4),
"90p": timedelta(days=4),
}
average_time_in_labels = {
"avg": {"bug": "1 day, 12:00:00"},
"med": {"bug": "1 day, 12:00:00"},
"90p": {"bug": "1 day, 12:00:00"},
}

num_issues_opened = 2
num_issues_closed = 1

# Call the function
write_to_markdown(
issues_with_metrics=issues_with_metrics,
average_time_to_first_response=average_time_to_first_response,
average_time_to_close=average_time_to_close,
average_time_to_answer=average_time_to_answer,
average_time_in_labels=average_time_in_labels,
num_issues_opened=num_issues_opened,
num_issues_closed=num_issues_closed,
labels=["bug"],
report_title="My Custom Report Title",
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@zkoppert I applied the suggested changes. That means moving the default value for the report title into the write_to_markdown() signature. It does lead to a better code structure overall, I think.

However one issue remains in this area:
We feed in the custom title as a parameter here.
Therefore we are not actually testing if it is passed through from the REPORT_TITLE env variable correctly.

Btw the same issue exists in the testing logic for the hide_label_metrics in row 305 above.
There I am not sure either if this test really covers what it should.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think overall I am a bit unclear about the intended logic of using ENV vars in the code.

Some ENV vars are read in various functions in issue_metrics.py - via main(), get_env_vars(), and others.
Yet again other ENV vars are read directly in markdown_writer.py.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ideally we would consolidate all env variables being read from the get_env_vars() function.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For this PR it would be good to follow that pattern. We can open a separate PR to get everything using the proper get_env_vars() function.

)

# Check that the function writes the correct markdown file
with open("issue_metrics.md", "r", encoding="utf-8") as file:
content = file.read()
expected_content = (
"# My Custom Report Title\n\n"
"| Metric | Average | Median | 90th percentile |\n"
"| --- | --- | --- | ---: |\n"
"| Time to first response | 2 days, 0:00:00 | 2 days, 0:00:00 | 2 days, 0:00:00 |\n"
"| Time to close | 3 days, 0:00:00 | 3 days, 0:00:00 | 3 days, 0:00:00 |\n"
"| Time to answer | 4 days, 0:00:00 | 4 days, 0:00:00 | 4 days, 0:00:00 |\n"
"| Time spent in bug | 1 day, 12:00:00 | 1 day, 12:00:00 | 1 day, 12:00:00 |\n"
"\n"
"| Metric | Count |\n"
"| --- | ---: |\n"
"| Number of items that remain open | 2 |\n"
"| Number of items closed | 1 |\n"
"| Total number of items created | 2 |\n\n"
"| Title | URL | Author | Time to first response | Time to close |"
" Time to answer | Time spent in bug |\n"
"| --- | --- | --- | --- | --- | --- | --- |\n"
"| Issue 1 | https://github.com/user/repo/issues/1 | alice | 1 day, 0:00:00 | "
"2 days, 0:00:00 | 3 days, 0:00:00 | 1 day, 0:00:00 |\n"
"| feat| Issue 2 | https://github.com/user/repo/issues/2 | bob | 3 days, 0:00:00 | "
"4 days, 0:00:00 | 5 days, 0:00:00 | 2 days, 0:00:00 |\n\n"
"_This report was generated with the [Issue Metrics Action](https://github.com/github/issue-metrics)_\n"
)
self.assertEqual(content, expected_content)
os.remove("issue_metrics.md")