Skip to content

Commit

Permalink
Merge pull request #5 from github/cleanup
Browse files Browse the repository at this point in the history
clean up and formatting
  • Loading branch information
zkoppert authored Jun 2, 2023
2 parents 2a06eaa + 8295015 commit 71ba920
Show file tree
Hide file tree
Showing 9 changed files with 299 additions and 173 deletions.
2 changes: 2 additions & 0 deletions .coveragerc
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[run]
omit = test*.py
1 change: 1 addition & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
---
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
Expand Down
1 change: 1 addition & 0 deletions .github/release-drafter.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
---
name-template: 'v$RESOLVED_VERSION'
tag-template: 'v$RESOLVED_VERSION'
template: |
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/release-drafter.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
---
name: Release Drafter

on:
Expand Down
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# Output files
issue_metrics.md

# Byte-compiled / optimized / DLL files
__pycache__/
Expand Down Expand Up @@ -138,3 +139,6 @@ dmypy.json

# Cython debug symbols
cython_debug/

# Mac
.DS_Store
7 changes: 6 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
@@ -1,2 +1,7 @@
.PHONY: test
test:
pytest -v --cov=. --cov-fail-under=80
pytest -v --cov=. --cov-config=.coveragerc --cov-fail-under=80 --cov-report term-missing

.PHONY: clean
clean:
rm -rf .pytest_cache .coverage __pycache__
16 changes: 14 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,13 +57,25 @@ jobs:

### Example stale_repos.md output

TODO
```markdown
# Issue Metrics

Average time to first response: 2 days, 3:30:00
Number of issues: 2

| Title | URL | TTFR |
| --- | --- | ---: |
| Issue 2 | https://github.com/user/repo/issues/2 | 3 days, 4:30:00 |
| Issue 1 | https://github.com/user/repo/issues/1 | 1 day, 2:30:00 |

```

## Local usage without Docker

1. Copy `.env-example` to `.env`
1. Fill out the `.env` file with a _token_ from a user that has access to the organization to scan (listed below). Tokens should have admin:org or read:org access.
TODO: Make sure this is accurate
1. Fill out the `.env` file with the _repository_url_ of the repository to scan
1. Fill out the `.env` file with the _search_query_ to filter issues by
1. `pip install -r requirements.txt`
1. Run `python3 ./issue_metrics.py`, which will output issue metrics data

Expand Down
104 changes: 65 additions & 39 deletions issue_metrics.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,22 @@
"""A script for measuring time to first response for GitHub issues.
This script uses the GitHub API to search for issues in a repository and measure
the time to first response for each issue. It then calculates the average time
to first response and writes the issues with their time to first response to a
markdown file.
Functions:
search_issues: Search for issues in a GitHub repository.
auth_to_github: Authenticate to the GitHub API.
measure_time_to_first_response: Measure the time to first response for a GitHub issue.
get_average_time_to_first_response: Calculate the average time to first response for
a list of issues.
write_to_markdown: Write the issues with metrics to a markdown file.
"""

import os
from datetime import datetime
from datetime import datetime, timedelta
from os.path import dirname, join
from urllib.parse import urlparse

Expand All @@ -13,24 +30,25 @@ def search_issues(repository_url, issue_search_query, github_connection):
Args:
repository_url (str): The URL of the repository to search in.
ie https://github.com/user/repo
issue_search_query (str): The search query to use for finding issues.
github_connection (github3.GitHub): A connection to the GitHub API.
Returns:
List[github3.issues.Issue]: A list of issues that match the search query.
"""
print("Searching for issues...")
# Parse the repository owner and name from the URL
parsed_url = urlparse(repository_url)
path = parsed_url.path.strip("/")

print(f"parsing URL: {repository_url}")
# Split the path into owner and repo
owner, repo = path.split("/")

# Get the repository object
repo = github_connection.repository(owner, repo) # type: ignore
print(f"owner: {owner}, repo: {repo}")

# Search for issues that match the query
issues = repo.search_issues(issue_search_query) # type: ignore
full_query = f"repo:{owner}/{repo} {issue_search_query}"
issues = github_connection.search_issues(full_query) # type: ignore

# Print the issue titles
for issue in issues:
Expand Down Expand Up @@ -59,8 +77,8 @@ def measure_time_to_first_response(issues):
issues (list of github3.Issue): A list of GitHub issues.
Returns:
list of github3.Issue: A list of GitHub issues with the time to first response
added as an attribute.
list of tuple: A list of tuples containing a GitHub issue
title, url, and its time to first response.
Raises:
TypeError: If the input is not a list of GitHub issues.
Expand All @@ -69,22 +87,31 @@ def measure_time_to_first_response(issues):
issues_with_metrics = []
for issue in issues:
# Get the first comment
first_comment = issue.comments()[0] # type: ignore

# Get the created_at time for the first comment
first_comment_time = datetime.fromisoformat(first_comment.created_at) # type: ignore

# Get the created_at time for the issue
issue_time = datetime.fromisoformat(issue.created_at) # type: ignore

# Calculate the time between the issue and the first comment
time_to_first_response = first_comment_time - issue_time

# Add the time to the issue
issue.time_to_first_response = time_to_first_response
if issue.comments <= 0:
first_comment_time = None
time_to_first_response = None
else:
comments = issue.issue.comments(
number=1, sort="created", direction="asc"
) # type: ignore
for comment in comments:
# Get the created_at time for the first comment
first_comment_time = comment.created_at # type: ignore

# Get the created_at time for the issue
issue_time = datetime.fromisoformat(issue.created_at) # type: ignore

# Calculate the time between the issue and the first comment
time_to_first_response = first_comment_time - issue_time # type: ignore

# Add the issue to the list of issues with metrics
issues_with_metrics.append(issue)
issues_with_metrics.append(
[
issue.title,
issue.html_url,
time_to_first_response,
]
)

return issues_with_metrics

Expand All @@ -97,21 +124,26 @@ def get_average_time_to_first_response(issues):
first response added as an attribute.
Returns:
datetime.timedelta: The average time to first response for the issues.
datetime.timedelta: The average time to first response for the issues in seconds.
Raises:
TypeError: If the input is not a list of GitHub issues.
"""
total_time_to_first_response = 0
for issue in issues:
total_time_to_first_response += issue.time_to_first_response.total_seconds()
total_time_to_first_response += issue[2].total_seconds()

average_time_to_first_response = total_time_to_first_response / len(
average_seconds_to_first_response = total_time_to_first_response / len(
issues
) # type: ignore

return average_time_to_first_response
# Print the average time to first response converting seconds to a readable time format
print(
f"Average time to first response: {timedelta(seconds=average_seconds_to_first_response)}"
)

return timedelta(seconds=average_seconds_to_first_response)


def write_to_markdown(issues_with_metrics, average_time_to_first_response, file=None):
Expand All @@ -136,10 +168,10 @@ def write_to_markdown(issues_with_metrics, average_time_to_first_response, file=
f"Average time to first response: {average_time_to_first_response}\n"
)
file.write(f"Number of issues: {len(issues_with_metrics)}\n\n")
file.write("| Issue | TTFR |\n")
file.write("| --- | ---: |\n")
for issue, ttfr in issues_with_metrics:
file.write(f"| {issue} | {ttfr} |\n")
file.write("| Title | URL | TTFR |\n")
file.write("| --- | --- | ---: |\n")
for title, url, ttfr in issues_with_metrics:
file.write(f"| {title} | {url} | {ttfr} |\n")
print("Wrote issue metrics to issue_metrics.md")


Expand Down Expand Up @@ -170,25 +202,19 @@ def main():
if not issue_search_query:
raise ValueError("ISSUE_SEARCH_QUERY environment variable not set")

issue_search_query = os.getenv("REPOSITORY_URL")
if not issue_search_query:
repo_url = os.getenv("REPOSITORY_URL")
if not repo_url:
raise ValueError("REPOSITORY_URL environment variable not set")

# Search for issues
issues = search_issues(issue_search_query, issue_search_query, github_connection)

# Print the number of issues found
print(f"Found {len(issues)} issues")
issues = search_issues(repo_url, issue_search_query, github_connection)

# Find the time to first response
issues_with_ttfr = measure_time_to_first_response(issues)
average_time_to_first_response = get_average_time_to_first_response(
issues_with_ttfr
)

# Print the average time to first response
print(f"Average time to first response: {average_time_to_first_response}")

# Write the results to a markdown file
write_to_markdown(issues_with_ttfr, average_time_to_first_response)

Expand Down
Loading

0 comments on commit 71ba920

Please sign in to comment.