Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add JSON output #42

Merged
merged 5 commits into from
Jun 30, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# Output files
issue_metrics.md
issue_metrics.json

# Byte-compiled / optimized / DLL files
__pycache__/
Expand Down
29 changes: 29 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,35 @@ Here is the output with all hidable columns hidden:

```

## Example using the JSON output instead of the markdown output

There is JSON output available as well. You could use it for any number of possibilities, but here is one example that demonstrates retreiving the JSON output and then printing it out.

```yaml
name: Monthly issue metrics
on:
workflow_dispatch:
schedule:
- cron: '3 2 1 * *'

jobs:
build:
name: issue metrics
runs-on: ubuntu-latest

steps:
- name: Run issue-metrics tool
id: issue-metrics
uses: github/issue-metrics@v2
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
SEARCH_QUERY: 'repo:owner/repo is:issue created:2023-05-01..2023-05-31 -reason:"not planned"'

- name: Print output of issue metrics tool
run: echo "${{ steps.issue-metrics.outputs.metrics }}"

```

## Local usage without Docker

1. Copy `.env-example` to `.env`
Expand Down
21 changes: 15 additions & 6 deletions issue_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,15 +29,16 @@
import github3
from dotenv import load_dotenv

from classes import IssueWithMetrics
from discussions import get_discussions
from time_to_close import measure_time_to_close, get_average_time_to_close
from json_writer import write_to_json
from markdown_writer import write_to_markdown
from time_to_answer import get_average_time_to_answer, measure_time_to_answer
from time_to_close import get_average_time_to_close, measure_time_to_close
from time_to_first_response import (
measure_time_to_first_response,
get_average_time_to_first_response,
measure_time_to_first_response,
)
from time_to_answer import measure_time_to_answer, get_average_time_to_answer
from markdown_writer import write_to_markdown
from classes import IssueWithMetrics


def get_env_vars() -> tuple[str, str]:
Expand Down Expand Up @@ -274,7 +275,15 @@ def main():

average_time_to_answer = get_average_time_to_answer(issues_with_metrics)

# Write the results to a markdown file
# Write the results to json and a markdown file
write_to_json(
issues_with_metrics,
average_time_to_first_response,
average_time_to_close,
average_time_to_answer,
num_issues_open,
num_issues_closed,
)
write_to_markdown(
issues_with_metrics,
average_time_to_first_response,
Expand Down
105 changes: 105 additions & 0 deletions json_writer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
"""A module for writing GitHub issue metrics to a json file.

Functions:
write_to_json(
issues_with_metrics: List[IssueWithMetrics],
average_time_to_first_response: timedelta,
average_time_to_close: timedelta,
average_time_to_answer: timedelta,
num_issues_opened: int,
num_issues_closed: int,
) -> str:
Write the issues with metrics to a json file.

"""


import json
from datetime import timedelta
import os
from typing import List, Union

from classes import IssueWithMetrics


def write_to_json(
issues_with_metrics: Union[List[IssueWithMetrics], None],
average_time_to_first_response: Union[timedelta, None],
average_time_to_close: Union[timedelta, None],
average_time_to_answer: Union[timedelta, None],
num_issues_opened: Union[int, None],
num_issues_closed: Union[int, None],
) -> str:
"""
Write the issues with metrics to a JSON file called issue_metrics.json.

json structure is like following
{
"average_time_to_first_response": "2 days, 12:00:00",
"average_time_to_close": "5 days, 0:00:00",
"average_time_to_answer": "1 day, 0:00:00",
"num_items_opened": 2,
"num_items_closed": 1,
"total_item_count": 2,
"issues": [
{
"title": "Issue 1",
"html_url": "https://github.com/owner/repo/issues/1",
"time_to_first_response": "3 days, 0:00:00",
"time_to_close": "6 days, 0:00:00",
"time_to_answer": "None",
},
{
"title": "Issue 2",
"html_url": "https://github.com/owner/repo/issues/2",
"time_to_first_response": "2 days, 0:00:00",
"time_to_close": "4 days, 0:00:00",
"time_to_answer": "1 day, 0:00:00",
},
],
}

"""

# Ensure issues_with_metrics is not None
if not issues_with_metrics:
return ""

# Create a dictionary with the metrics
metrics = {
"average_time_to_first_response": str(average_time_to_first_response),
"average_time_to_close": str(average_time_to_close),
"average_time_to_answer": str(average_time_to_answer),
"num_items_opened": num_issues_opened,
"num_items_closed": num_issues_closed,
"total_item_count": len(issues_with_metrics),
}

# Create a list of dictionaries with the issues and metrics
issues = []
for issue in issues_with_metrics:
issues.append(
{
"title": issue.title,
"html_url": issue.html_url,
"time_to_first_response": str(issue.time_to_first_response),
"time_to_close": str(issue.time_to_close),
"time_to_answer": str(issue.time_to_answer),
}
)

# Add the issues to the metrics dictionary
metrics["issues"] = issues

# add output to github action output
# pylint: disable=unspecified-encoding
metrics_json = json.dumps(metrics)
if os.environ.get("GITHUB_OUTPUT"):
with open(os.environ["GITHUB_OUTPUT"], "a") as file_handle:
print(f"metrics={metrics_json}", file=file_handle)

# Write the metrics to a JSON file
with open("issue_metrics.json", "w", encoding="utf-8") as file:
json.dump(metrics, file, indent=4)

return metrics_json
77 changes: 77 additions & 0 deletions test_json_writer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
"""Tests for the write_to_json function in json_writer.py."""

import json
import unittest
from datetime import timedelta
from classes import IssueWithMetrics
from json_writer import write_to_json


class TestWriteToJson(unittest.TestCase):
"""Tests for the write_to_json function."""

def test_write_to_json(self):
"""Test that write_to_json writes the correct JSON file."""
issues_with_metrics = [
IssueWithMetrics(
title="Issue 1",
html_url="https://github.com/owner/repo/issues/1",
time_to_first_response=timedelta(days=3),
time_to_close=timedelta(days=6),
time_to_answer=None,
),
IssueWithMetrics(
title="Issue 2",
html_url="https://github.com/owner/repo/issues/2",
time_to_first_response=timedelta(days=2),
time_to_close=timedelta(days=4),
time_to_answer=timedelta(days=1),
),
]
average_time_to_first_response = timedelta(days=2.5)
average_time_to_close = timedelta(days=5)
average_time_to_answer = timedelta(days=1)
num_issues_opened = 2
num_issues_closed = 1

expected_output = {
"average_time_to_first_response": "2 days, 12:00:00",
"average_time_to_close": "5 days, 0:00:00",
"average_time_to_answer": "1 day, 0:00:00",
"num_items_opened": 2,
"num_items_closed": 1,
"total_item_count": 2,
"issues": [
{
"title": "Issue 1",
"html_url": "https://github.com/owner/repo/issues/1",
"time_to_first_response": "3 days, 0:00:00",
"time_to_close": "6 days, 0:00:00",
"time_to_answer": "None",
},
{
"title": "Issue 2",
"html_url": "https://github.com/owner/repo/issues/2",
"time_to_first_response": "2 days, 0:00:00",
"time_to_close": "4 days, 0:00:00",
"time_to_answer": "1 day, 0:00:00",
},
],
}

# Call the function and check the output
self.assertEqual(
write_to_json(
issues_with_metrics,
average_time_to_first_response,
average_time_to_close,
average_time_to_answer,
num_issues_opened,
num_issues_closed,
),
json.dumps(expected_output),
)


if __name__ == "__main__":
unittest.main()
2 changes: 2 additions & 0 deletions test_markdown_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

Classes:
TestWriteToMarkdown: A class to test the write_to_markdown function with mock data.
TestWriteToMarkdownWithEnv: A class to test the write_to_markdown function with
environment variables set.

"""
import os
Expand Down