Class: ForceMergeReport
- Inherits:
-
Object
- Object
- ForceMergeReport
- Defined in:
- lib/github_repo_statistics/force_merge_report.rb
Instance Method Summary collapse
-
#initialize(token:, repo:, branch:, duration:, ci:) ⇒ ForceMergeReport
constructor
A new instance of ForceMergeReport.
- #report ⇒ Object
Constructor Details
#initialize(token:, repo:, branch:, duration:, ci:) ⇒ ForceMergeReport
Returns a new instance of ForceMergeReport.
7 8 9 10 11 12 13 |
# File 'lib/github_repo_statistics/force_merge_report.rb', line 7 def initialize(token:, repo:, branch:, duration:, ci:) @token = token @repo = repo @branch = branch @duration_in_days = duration @ci = ci end |
Instance Method Details
#report ⇒ Object
15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 |
# File 'lib/github_repo_statistics/force_merge_report.rb', line 15 def report client = Octokit::Client.new(access_token: @token) client.auto_paginate = true # Get all pull requests created after the target date pull_requests = client.list_issues(@repo, state: 'closed', since: DateTime.now - @duration_in_days) # Sort PRs into monthly chunks weekly_pull_requests = pull_requests.group_by { |pr| pr.closed_at.strftime('%Y-%W') } weekly_pull_requests[Time.now.strftime('%Y-%W')] = [] if weekly_pull_requests[Time.now.strftime('%Y-%W')].nil? # Initialize a hash to store monthly summaries weekly_summaries = Hash.new { |hash, key| hash[key] = { total: 0, failed: 0, workflows: Hash.new(0) } } weeks = @duration_in_days / 7 (weekly_pull_requests.keys.sort[-weeks..] || []).each do |month| pull_requests_for_month = weekly_pull_requests[month] # Iterate through pull requests for the current month pull_requests_for_month.each do |issue| # Get the pull request details pr = client.pull_request(@repo, issue.number) # Check if the pull request is merged next unless pr.merged_at # Get the check runs for the pull request's head commit check_runs = client.check_runs_for_ref(@repo, pr.head.sha) latest_check_runs = check_runs.check_runs.uniq(&:name) statuses = client.statuses(@repo, pr.head.sha) latest_statuses = statuses.uniq(&:context) all_checks = latest_check_runs + latest_statuses # Filter checks without meeting the required status checks failed_checks = all_checks.reject { |check| check.conclusion == 'success' || check.state == 'success' } failed_checks.select! do |check| ['UI Tests', 'Unit Tests'].include?(check.context) || ['Mergeable: Size check', 'SonarQube Code Analysis'].include?(check.name) end # Update monthly summary weekly_summaries[month][:total] += 1 weekly_summaries[month][:failed] += 1 unless failed_checks.empty? # Update workflow counts failed_checks.each do |check| workflow_name = check.name || check.context weekly_summaries[month][:workflows][workflow_name] += 1 end # Print details of merged pull requests without meeting the required criteria for the last 6 months next if failed_checks.empty? next if @ci puts "PR ##{pr.number} - Merged at: #{pr.merged_at}" puts 'Failed Checks:' failed_checks.each do |check| puts "- Workflow: #{check.name || check.context}, Conclusion: #{check.conclusion || check.state}" end puts '---' puts end end # Print the weekly summary puts 'Weekly Summary:' weekly_summaries.each do |week, summary| puts "Calendar week #{week}: Total PRs: #{summary[:total]}, Force-merged PRs: #{summary[:failed]}" puts 'Failed Workflows:' summary[:workflows].each do |workflow, count| puts " - #{workflow}: #{count}" end # ENV['BQ_CREDENTIALS'] = `cat /Users/serghei.moret/.config/gcloud/application_default_credentials.json` next unless ENV['BQ_CREDENTIALS'] require 'google/cloud/bigquery' require 'json' creds = JSON.parse(ENV['BQ_CREDENTIALS']) bigquery = Google::Cloud::Bigquery.new( project_id: 'hellofresh-android', credentials: creds ) dataset = bigquery.dataset 'github_data' failed_count = summary[:failed] size_check = summary[:workflows]['Mergeable: Size check'] sonarqube_check = summary[:workflows]['SonarQube Code Analysis'] ui_tests_check = summary[:workflows]['UI Tests'] unit_tests_check = summary[:workflows]['Unit Tests'] total_prs = summary[:total] query = <<~SQL MERGE INTO force_merges AS target USING (SELECT '#{week}' AS calendar_week, '#{@repo}' AS platform) AS source ON target.calendar_week = source.calendar_week AND target.platform = source.platform WHEN MATCHED THEN UPDATE SET target.force_merges_count = #{failed_count}, target.ui_tests_count = #{ui_tests_check}, target.unit_tests_count = #{unit_tests_check}, target.size_check_count = #{size_check}, target.sonarqube_count = #{sonarqube_check}, target.total_prs = #{total_prs}, target.platform = '#{@repo}' WHEN NOT MATCHED THEN INSERT (calendar_week, force_merges_count, ui_tests_count, unit_tests_count, size_check_count, sonarqube_count, total_prs, platform) VALUES ('#{week}', #{failed_count}, #{ui_tests_check}, #{unit_tests_check}, #{size_check}, #{sonarqube_check}, #{total_prs}, '#{@repo}'); SQL dataset.query(query) end end |