Skip to content

Commit

Permalink
Merge pull request #415 from github/draft-tracking
Browse files Browse the repository at this point in the history
  • Loading branch information
zkoppert authored Oct 24, 2024
2 parents aab6341 + fb362c0 commit 4888b60
Show file tree
Hide file tree
Showing 13 changed files with 508 additions and 187 deletions.
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ Feel free to inquire about its usage by creating an issue in this repository.
| Time to Close | The period from creation to closure.\* |
| Time to Answer (Discussions Only) | The time from creation to an answer. |
| Time in Label | The duration from label application to removal, requires `LABELS_TO_MEASURE` env variable. |
| Time in Draft (PRs Only) | The duration from creation to the PR being marked as ready for review. |

\*For pull requests, these metrics exclude the time the PR was in draft mode.

Expand Down Expand Up @@ -151,6 +152,7 @@ This action can be configured to authenticate with GitHub App Installation or Pe
| `HIDE_TIME_TO_ANSWER` | False | False | If set to `true`, the time to answer a discussion will not be displayed in the generated Markdown file. |
| `HIDE_TIME_TO_CLOSE` | False | False | If set to `true`, the time to close will not be displayed in the generated Markdown file. |
| `HIDE_TIME_TO_FIRST_RESPONSE` | False | False | If set to `true`, the time to first response will not be displayed in the generated Markdown file. |
| `DRAFT_PR_TRACKING` | False | False | If set to `true`, draft PRs will be included in the metrics as a new column and in the summary stats. |
| `IGNORE_USERS` | False | False | A comma separated list of users to ignore when calculating metrics. (ie. `IGNORE_USERS: 'user1,user2'`). To ignore bots, append `[bot]` to the user (ie. `IGNORE_USERS: 'github-actions[bot]'`) Users in this list will also have their authored issues and pull requests removed from the Markdown table. |
| `ENABLE_MENTOR_COUNT` | False | False | If set to 'TRUE' count number of comments users left on discussions, issues and PRs and display number of active mentors |
| `MIN_MENTOR_COMMENTS` | False | 10 | Minimum number of comments to count as a mentor |
Expand Down
3 changes: 3 additions & 0 deletions classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ class IssueWithMetrics:
time_to_close (timedelta, optional): The time it took to close the issue.
time_to_answer (timedelta, optional): The time it took to answer the
discussions in the issue.
time_in_draft (timedelta, optional): The time the PR was in draft state.
label_metrics (dict, optional): A dictionary containing the label metrics
mentor_activity (dict, optional): A dictionary containing active mentors
Expand All @@ -33,6 +34,7 @@ def __init__(
time_to_first_response=None,
time_to_close=None,
time_to_answer=None,
time_in_draft=None,
labels_metrics=None,
mentor_activity=None,
):
Expand All @@ -42,5 +44,6 @@ def __init__(
self.time_to_first_response = time_to_first_response
self.time_to_close = time_to_close
self.time_to_answer = time_to_answer
self.time_in_draft = time_in_draft
self.label_metrics = labels_metrics
self.mentor_activity = mentor_activity
31 changes: 23 additions & 8 deletions config.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""A module for managing environment variables used in GitHub metrics calculation.
This module defines a class for encapsulating environment variables and a function to retrieve these variables.
This module defines a class for encapsulating environment variables
and a function to retrieve these variables.
Classes:
EnvVars: Represents the collection of environment variables used in the script.
Expand All @@ -23,27 +24,36 @@ class EnvVars:
Attributes:
gh_app_id (int | None): The GitHub App ID to use for authentication
gh_app_installation_id (int | None): The GitHub App Installation ID to use for authentication
gh_app_private_key_bytes (bytes): The GitHub App Private Key as bytes to use for authentication
gh_app_installation_id (int | None): The GitHub App Installation ID to use for
authentication
gh_app_private_key_bytes (bytes): The GitHub App Private Key as bytes to use for
authentication
gh_token (str | None): GitHub personal access token (PAT) for API authentication
ghe (str): The GitHub Enterprise URL to use for authentication
hide_author (bool): If true, the author's information is hidden in the output
hide_items_closed_count (bool): If true, the number of items closed metric is hidden in the output
hide_items_closed_count (bool): If true, the number of items closed metric is hidden
in the output
hide_label_metrics (bool): If true, the label metrics are hidden in the output
hide_time_to_answer (bool): If true, the time to answer discussions is hidden in the output
hide_time_to_close (bool): If true, the time to close metric is hidden in the output
hide_time_to_first_response (bool): If true, the time to first response metric is hidden in the output
hide_time_to_first_response (bool): If true, the time to first response metric is hidden
in the output
ignore_users (List[str]): List of usernames to ignore when calculating metrics
labels_to_measure (List[str]): List of labels to measure how much time the lable is applied
enable_mentor_count (bool): If set to TRUE, compute number of mentors
min_mentor_comments (str): If set, defines the minimum number of comments for mentors
max_comments_eval (str): If set, defines the maximum number of comments to look at for mentor evaluation
heavily_involved_cutoff (str): If set, defines the cutoff after which heavily involved commentors in
max_comments_eval (str): If set, defines the maximum number of comments to look
at for mentor evaluation
heavily_involved_cutoff (str): If set, defines the cutoff after which heavily
involved commentors in
search_query (str): Search query used to filter issues/prs/discussions on GitHub
non_mentioning_links (bool): If set to TRUE, links do not cause a notification in the desitnation repository
non_mentioning_links (bool): If set to TRUE, links do not cause a notification
in the desitnation repository
report_title (str): The title of the report
output_file (str): The name of the file to write the report to
rate_limit_bypass (bool): If set to TRUE, bypass the rate limit for the GitHub API
draft_pr_tracking (bool): If set to TRUE, track PR time in draft state
in addition to other metrics
"""

def __init__(
Expand All @@ -70,6 +80,7 @@ def __init__(
report_title: str,
output_file: str,
rate_limit_bypass: bool = False,
draft_pr_tracking: bool = False,
):
self.gh_app_id = gh_app_id
self.gh_app_installation_id = gh_app_installation_id
Expand All @@ -93,6 +104,7 @@ def __init__(
self.report_title = report_title
self.output_file = output_file
self.rate_limit_bypass = rate_limit_bypass
self.draft_pr_tracking = draft_pr_tracking

def __repr__(self):
return (
Expand All @@ -119,6 +131,7 @@ def __repr__(self):
f"{self.report_title}"
f"{self.output_file}"
f"{self.rate_limit_bypass}"
f"{self.draft_pr_tracking}"
)


Expand Down Expand Up @@ -203,6 +216,7 @@ def get_env_vars(test: bool = False) -> EnvVars:
report_title = os.getenv("REPORT_TITLE", "Issue Metrics")
output_file = os.getenv("OUTPUT_FILE", "")
rate_limit_bypass = get_bool_env_var("RATE_LIMIT_BYPASS", False)
draft_pr_tracking = get_bool_env_var("DRAFT_PR_TRACKING", False)

# Hidden columns
hide_author = get_bool_env_var("HIDE_AUTHOR", False)
Expand Down Expand Up @@ -240,4 +254,5 @@ def get_env_vars(test: bool = False) -> EnvVars:
report_title,
output_file,
rate_limit_bypass,
draft_pr_tracking,
)
47 changes: 27 additions & 20 deletions issue_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
from markdown_writer import write_to_markdown
from most_active_mentors import count_comments_per_user, get_mentor_count
from search import get_owners_and_repositories, search_issues
from time_in_draft import get_stats_time_in_draft, measure_time_in_draft
from time_to_answer import get_stats_time_to_answer, measure_time_to_answer
from time_to_close import get_stats_time_to_close, measure_time_to_close
from time_to_first_response import (
Expand Down Expand Up @@ -112,20 +113,21 @@ def get_per_issue_metrics(
continue

issue_with_metrics = IssueWithMetrics(
issue.title, # type: ignore
issue.html_url, # type: ignore
issue.user["login"], # type: ignore
None,
None,
None,
None,
title=issue.title, # type: ignore
html_url=issue.html_url, # type: ignore
author=issue.user["login"], # type: ignore
)

# Check if issue is actually a pull request
pull_request, ready_for_review_at = None, None
if issue.issue.pull_request_urls: # type: ignore
pull_request = issue.issue.pull_request() # type: ignore
ready_for_review_at = get_time_to_ready_for_review(issue, pull_request)
if env_vars.draft_pr_tracking:
issue_with_metrics.time_in_draft = measure_time_in_draft(
issue=issue,
ready_for_review_at=ready_for_review_at,
)

if env_vars.hide_time_to_first_response is False:
issue_with_metrics.time_to_first_response = (
Expand Down Expand Up @@ -242,6 +244,7 @@ def main(): # pragma: no cover
average_time_to_first_response=None,
average_time_to_close=None,
average_time_to_answer=None,
average_time_in_draft=None,
average_time_in_labels=None,
num_issues_opened=None,
num_issues_closed=None,
Expand All @@ -266,6 +269,7 @@ def main(): # pragma: no cover
average_time_to_first_response=None,
average_time_to_close=None,
average_time_to_answer=None,
average_time_in_draft=None,
average_time_in_labels=None,
num_issues_opened=None,
num_issues_closed=None,
Expand Down Expand Up @@ -297,6 +301,7 @@ def main(): # pragma: no cover
stats_time_to_close = get_stats_time_to_close(issues_with_metrics)

stats_time_to_answer = get_stats_time_to_answer(issues_with_metrics)
stats_time_in_draft = get_stats_time_in_draft(issues_with_metrics)

num_mentor_count = 0
if enable_mentor_count:
Expand All @@ -308,23 +313,25 @@ def main(): # pragma: no cover

# Write the results to json and a markdown file
write_to_json(
issues_with_metrics,
stats_time_to_first_response,
stats_time_to_close,
stats_time_to_answer,
stats_time_in_labels,
num_issues_open,
num_issues_closed,
num_mentor_count,
search_query,
output_file,
issues_with_metrics=issues_with_metrics,
stats_time_to_first_response=stats_time_to_first_response,
stats_time_to_close=stats_time_to_close,
stats_time_to_answer=stats_time_to_answer,
stats_time_in_draft=stats_time_in_draft,
stats_time_in_labels=stats_time_in_labels,
num_issues_opened=num_issues_open,
num_issues_closed=num_issues_closed,
num_mentor_count=num_mentor_count,
search_query=search_query,
output_file=output_file,
)

write_to_markdown(
issues_with_metrics=issues_with_metrics,
average_time_to_first_response=stats_time_to_first_response,
average_time_to_close=stats_time_to_close,
average_time_to_answer=stats_time_to_answer,
average_time_in_draft=stats_time_in_draft,
average_time_in_labels=stats_time_in_labels,
num_issues_opened=num_issues_open,
num_issues_closed=num_issues_closed,
Expand All @@ -345,9 +352,9 @@ def main(): # pragma: no cover
shutil.move("issue_metrics_0.md", "issue_metrics.md")
print(
"Issue metrics markdown file is too large for GitHub issue body and has been \
split into multiple files. ie. issue_metrics.md, issue_metrics_1.md, etc. \
The full file is saved as issue_metrics_full.md\n\
See https://github.com/github/issue-metrics/blob/main/docs/dealing-with-large-issue-metrics.md"
split into multiple files. ie. issue_metrics.md, issue_metrics_1.md, etc. \
The full file is saved as issue_metrics_full.md\n\
See https://github.com/github/issue-metrics/blob/main/docs/dealing-with-large-issue-metrics.md"
)


Expand Down
77 changes: 53 additions & 24 deletions json_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,15 @@
Functions:
write_to_json(
issues_with_metrics: List[IssueWithMetrics],
average_time_to_first_response: timedelta,
average_time_to_close: timedelta,
average_time_to_answer: timedelta,
num_issues_opened: int,
num_issues_closed: int,
issues_with_metrics: Union[List[IssueWithMetrics], None],
stats_time_to_first_response: Union[dict[str, timedelta], None],
stats_time_to_close: Union[dict[str, timedelta], None],
stats_time_to_answer: Union[dict[str, timedelta], None],
stats_time_in_draft: Union[dict[str, timedelta], None],
stats_time_in_labels: Union[dict[str, dict[str, timedelta]], None],
num_issues_opened: Union[int, None],
num_issues_closed: Union[int, None],
num_mentor_count: Union[int, None],
search_query: str,
output_file: str,
) -> str:
Expand All @@ -28,6 +31,7 @@ def write_to_json(
stats_time_to_first_response: Union[dict[str, timedelta], None],
stats_time_to_close: Union[dict[str, timedelta], None],
stats_time_to_answer: Union[dict[str, timedelta], None],
stats_time_in_draft: Union[dict[str, timedelta], None],
stats_time_in_labels: Union[dict[str, dict[str, timedelta]], None],
num_issues_opened: Union[int, None],
num_issues_closed: Union[int, None],
Expand All @@ -40,37 +44,48 @@ def write_to_json(
json structure is like following
{
"average_time_to_first_response": "2 days, 12:00:00",
"average_time_to_close": "5 days, 0:00:00",
"average_time_to_answer": "1 day, 0:00:00",
"average_time_to_first_response": "None",
"average_time_to_close": "None",
"average_time_to_answer": "None",
"average_time_in_draft": "None",
"average_time_in_labels": {},
"median_time_to_first_response": "None",
"median_time_to_close": "None",
"median_time_to_answer": "None",
"median_time_in_draft": "None",
"median_time_in_labels": {},
"90_percentile_time_to_first_response": "None",
"90_percentile_time_to_close": "None",
"90_percentile_time_to_answer": "None",
"90_percentile_time_in_draft": "None",
"90_percentile_time_in_labels": {},
"num_items_opened": 2,
"num_items_closed": 1,
"num_items_closed": 0,
"num_mentor_count": 5,
"total_item_count": 2,
"issues": [
{
"title": "Issue 1",
"html_url": "https://github.com/owner/repo/issues/1",
"author": "author",
"time_to_first_response": "3 days, 0:00:00",
"time_to_close": "6 days, 0:00:00",
"author": "alice",
"time_to_first_response": "None",
"time_to_close": "None",
"time_to_answer": "None",
"label_metrics": {
"bug": "1 day, 16:24:12"
}
"time_in_draft": "None",
"label_metrics": {}
},
{
"title": "Issue 2",
"html_url": "https://github.com/owner/repo/issues/2",
"author": "author",
"time_to_first_response": "2 days, 0:00:00",
"time_to_close": "4 days, 0:00:00",
"time_to_answer": "1 day, 0:00:00",
"label_metrics": {
}
},
"author": "bob",
"time_to_first_response": "None",
"time_to_close": "None",
"time_to_answer": "None",
"time_in_draft": "None",
"label_metrics": {}
}
],
"search_query": "is:issue is:open repo:owner/repo"
"search_query": "is:issue repo:owner/repo"
}
"""
Expand Down Expand Up @@ -106,6 +121,16 @@ def write_to_json(
med_time_to_answer = stats_time_to_answer["med"]
p90_time_to_answer = stats_time_to_answer["90p"]

# time in draft
average_time_in_draft = None
med_time_in_draft = None
p90_time_in_draft = None
if stats_time_in_draft is not None:
average_time_in_draft = stats_time_in_draft["avg"]
med_time_in_draft = stats_time_in_draft["med"]
p90_time_in_draft = stats_time_in_draft["90p"]

# time in labels
average_time_in_labels = {}
med_time_in_labels = {}
p90_time_in_labels = {}
Expand All @@ -122,14 +147,17 @@ def write_to_json(
"average_time_to_first_response": str(average_time_to_first_response),
"average_time_to_close": str(average_time_to_close),
"average_time_to_answer": str(average_time_to_answer),
"average_time_in_draft": str(average_time_in_draft),
"average_time_in_labels": average_time_in_labels,
"median_time_to_first_response": str(med_time_to_first_response),
"median_time_to_close": str(med_time_to_close),
"median_time_to_answer": str(med_time_to_answer),
"median_time_in_draft": str(med_time_in_draft),
"median_time_in_labels": med_time_in_labels,
"90_percentile_time_to_first_response": str(p90_time_to_first_response),
"90_percentile_time_to_close": str(p90_time_to_close),
"90_percentile_time_to_answer": str(p90_time_to_answer),
"90_percentile_time_in_draft": str(p90_time_in_draft),
"90_percentile_time_in_labels": p90_time_in_labels,
"num_items_opened": num_issues_opened,
"num_items_closed": num_issues_closed,
Expand All @@ -152,6 +180,7 @@ def write_to_json(
"time_to_first_response": str(issue.time_to_first_response),
"time_to_close": str(issue.time_to_close),
"time_to_answer": str(issue.time_to_answer),
"time_in_draft": str(issue.time_in_draft),
"label_metrics": formatted_label_metrics,
}
)
Expand Down
Loading

0 comments on commit 4888b60

Please sign in to comment.