-
Notifications
You must be signed in to change notification settings - Fork 26
/
bot.rb
511 lines (419 loc) · 20.5 KB
/
bot.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
require 'colored'
require 'date'
require 'excon'
require 'json'
require 'octokit'
require 'pry'
require "graphql/client"
require "graphql/client/http"
module Fastlane
class Bot
SLUG = "fastlane/fastlane"
REPOSITORY_OWNER = SLUG.split("/")[0]
REPOSITORY_NAME = SLUG.split("/")[1]
ISSUE_WARNING = 1 # in months
ISSUE_CLOSED = 1 # plus the x months from ISSUE_WARNING
ISSUE_LOCK = 4 # lock all issues with no activity within the last 3 months
NEEDS_ATTENTION_PR_LIFESPAN_DAYS = 14 # threshold for marking a PR as needing attention
# Labels
AWAITING_REPLY = "status: waiting-for-reply"
AUTO_CLOSED = "status: auto-closed"
NEEDS_ATTENTION = 'status: needs-attention'
RELEASED = 'status: released'
INCLUDED_IN_NEXT_RELEASE = 'status: included-in-next-release'
HAS_PR = 'status: has-pr'
# Issue closing keywords: https://help.github.com/en/articles/closing-issues-using-keywords
ISSUE_CLOSING_KEYWORDS = [
"close",
"closes",
"closed",
"fix",
"fixes",
"fixed",
"resolve",
"resolves",
"resolved"
]
ACTION_CHANNEL_SLACK_WEB_HOOK_URL = ENV['ACTION_CHANNEL_SLACK_WEB_HOOK_URL']
NEEDS_ATTENTION_PR_QUERY = "https://github.com/#{SLUG}/pulls?q=is%3Aopen+is%3Apr+label%3A%22#{NEEDS_ATTENTION}%22"
attr_reader :logger
def self.should_send_trivial_slack_notification?
return true unless Date.today.saturday? || Date.today.sunday?
return false
end
def initialize(logger)
@logger = logger
end
def client
@client ||= Octokit::Client.new(access_token: ENV["GITHUB_API_TOKEN"])
end
def start(process: :issues)
# Heroku is already complaining about memory size, and auto_paginate
# makes the client bring all of the objects into memory at once. This
# can only continue to get worse, since we look at every issue ever.
client.auto_paginate = false
logger.info("Fetching release information for '#{SLUG}'...")
# We only want to consider the 5 most recent releases, so no sense downloading more data than that.
# We consider the 5 most recent in case we have done multiple releases since the last run of the bot.
releases = client.releases(SLUG, per_page: 5)
prs_to_releases = map_prs_to_releases(releases)
logger.info("Fetching issues and PRs from '#{SLUG}'...")
needs_attention_prs = []
# Doing pagination ourself is a pain, but it's important for keeping a
# reasonable memory footprint
page = 1
issues_page = fetch_issues(page)
while issues_page && issues_page.any?
# It's important that we check this immediately, as calls we make during
# processing will affect the last_response
has_next_page = !!client.last_response.rels[:next]
issues_page.each do |issue|
if process == :issues && issue.pull_request.nil?
logger.info("Investigating issue ##{issue.number}...")
process_open_issue(issue) if issue.state == "open"
process_closed_issue(issue) if issue.state == "closed"
elsif process == :prs && issue.pull_request
logger.info("Investigating PR ##{issue.number}...")
process_open_pr(issue, needs_attention_prs) if issue.state == "open"
process_closed_pr(issue, prs_to_releases) if issue.state == "closed" # includes merged
end
end
page += 1
# If there's a next page, keep going
issues_page = has_next_page ? fetch_issues(page) : nil
end
fetch_and_process_pinned_issues()
notify_action_channel_about(needs_attention_prs)
logger.info("[SUCCESS] I worked through issues / PRs, much faster than human beings, bots will take over")
end
def fetch_issues(page = 1)
# issues includes PRs, and since the pull_requests API doesn't include
# labels, it's actually important that we query everything this way!
client.issues(SLUG, per_page: 100, state: "all", page: page)
end
def process_open_issue(issue)
if has_label?(issue, HAS_PR)
logger.info("https://github.com/#{SLUG}/issues/#{issue.number} has PR 👍")
return
end
bot_actions = []
process_inactive(issue)
return if issue.comments > 0 # there maybe already some bot replies
bot_actions << process_env_check(issue)
new_body = fix_checkboxes(issue.body)
client.update_issue(SLUG, issue.number, issue.title, new_body) unless new_body.nil?
bot_actions.each do |bot_reply|
client.add_comment(SLUG, issue.number, bot_reply) if bot_reply.to_s.length > 0
smart_sleep
end
end
def process_closed_issue(issue)
lock_old_issues(issue)
end
def process_open_pr(pr, needs_attention_prs)
days_since_created = (Time.now - pr.created_at) / 60.0 / 60.0 / 24.0
should_have_needs_attention_label = days_since_created > NEEDS_ATTENTION_PR_LIFESPAN_DAYS
has_needs_attention_label = has_label?(pr, NEEDS_ATTENTION)
if should_have_needs_attention_label || has_needs_attention_label
add_needs_attention_to(pr) unless has_needs_attention_label
needs_attention_prs << pr
end
new_body = fix_checkboxes(pr.body)
client.update_issue(SLUG, pr.number, pr.title, new_body) unless new_body.nil?
end
def process_closed_pr(pr, prs_to_releases)
remove_needs_attention_from(pr) if has_label?(pr, NEEDS_ATTENTION)
# When we mark something as released, it doesn't update the already in-memory representation
# of that PR. So we need to keep track of whether we just marked as released, so that we don't
# immediately also mark it as merged.
just_marked_released = false
if prs_to_releases.key?(pr.number.to_s) && !has_label?(pr, RELEASED)
mark_as_released(pr, prs_to_releases)
just_marked_released = true
end
# If we just marked this PR as released, we can skip saying that it was merged
if !just_marked_released && should_mark_as_merged?(pr)
mark_as_merged(pr)
end
# Lock old, inactive PRs (same as with issues)
# only for PRs that are merged of course
lock_old_issues(pr)
end
def should_mark_as_merged?(pr)
now = Time.now
# In order to avoid marking all PRs since the beginning of time, we need to make sure
# that the PR was merged recently. However, the merged_at field is not available on the
# basic "issue" object from GitHub (our PR object is actually an "issue")
#
# To get the merged_at date, we'll need to make another web request, so to cut down on the
# number of requests that get made, we'll first check the closed_at date to eliminate
# PRs we don't need to consider.
hours_pr_was_closed_ago = (now - pr.closed_at) / 60.0 / 60.0
return false unless hours_pr_was_closed_ago < 24
# Now we're reasonably sure we need to check the merged_at date for this PR, so fetch the details
pr_details = client.pull_request(SLUG, pr.number) # as the issue metadata doesn't contain that information
# If the PR wasn't merged, we don't need to consider it
return false unless pr_details.merged_at
# The final stage of our safety check skips PRs that were merged, but not recently
hours_pr_was_merged_ago = (now - pr_details.merged_at) / 60.0 / 60.0
return false unless hours_pr_was_merged_ago < 24
return !has_label?(pr, RELEASED) && !has_label?(pr, INCLUDED_IN_NEXT_RELEASE)
end
def notify_action_channel_about(needs_attention_prs)
return unless Bot.should_send_trivial_slack_notification?
return unless needs_attention_prs.any?
logger.info("Notifying the Slack room about PRs that need attention...")
pr_count = needs_attention_prs.size
pr_pluralized = pr_count == 1 ? "PR" : "PRs"
verb_pluralized = pr_count == 1 ? "has" : "have"
pr_query_link = "<#{NEEDS_ATTENTION_PR_QUERY}|#{pr_count} #{pr_pluralized}>"
post_body = {
text: "#{pr_query_link} #{verb_pluralized} been alive for more than #{NEEDS_ATTENTION_PR_LIFESPAN_DAYS} days."
}.to_json
response = Excon.post(ACTION_CHANNEL_SLACK_WEB_HOOK_URL, body: post_body, headers: { "Content-Type" => "application/json" })
if response.status == 200
logger.info("Successfully notified the Slack room about PRs that need attention")
else
logger.info("Failed to notify the Slack room about PRs that need attention")
end
end
def myself
client.user.login
end
def has_label?(issue, label_name)
issue.labels? && !!issue.labels.find { |label| label.name == label_name }
end
def add_needs_attention_to(issue)
logger.info("Adding #{NEEDS_ATTENTION} label on ##{issue.number}")
client.add_labels_to_an_issue(SLUG, issue.number, [NEEDS_ATTENTION])
end
def remove_needs_attention_from(issue)
logger.info("Removing #{NEEDS_ATTENTION} label on ##{issue.number}")
client.remove_label(SLUG, issue.number, NEEDS_ATTENTION)
end
def mark_as_merged(pr)
congrats_on_merging = []
congrats_on_merging << "Hey @#{pr.user.login} :wave:\n"
congrats_on_merging << "Thank you for your contribution to _fastlane_ and congrats on getting this pull request merged :tada:"
congrats_on_merging << "The code change now lives in the `master` branch, however it wasn't released to [RubyGems](https://rubygems.org/gems/fastlane) yet."
congrats_on_merging << "We usually ship about once a week, and your PR will be included in the next one.\n"
congrats_on_merging << "Please let us know if this change requires an immediate release by adding a comment here :+1:"
congrats_on_merging << "We'll notify you once we shipped a new release with your changes :rocket:"
client.add_comment(SLUG, pr.number, congrats_on_merging.join("\n"))
client.add_labels_to_an_issue(SLUG, pr.number, [INCLUDED_IN_NEXT_RELEASE])
end
def mark_as_released(pr, prs_to_releases)
version = prs_to_releases[pr.number.to_s]
release_url = "https://github.com/#{SLUG}/releases/tag/#{version}"
logger.info("Marking #{pr.number} as having been released in version #{version}")
# This doesn't wind up modifying the in-memory object, so we will still find this label applied
# when we check for it in the next step.
client.remove_label(SLUG, pr.number, INCLUDED_IN_NEXT_RELEASE) if has_label?(pr, INCLUDED_IN_NEXT_RELEASE)
client.add_labels_to_an_issue(SLUG, pr.number, [RELEASED])
client.add_comment(SLUG, pr.number, "Congratulations! :tada: This was released as part of [_fastlane_ #{version}](#{release_url}) :rocket:")
issue_number = referenced_issue_number?(pr)
if issue_number
logger.info("Adding a comment to the issue #{issue_number} that pull request #{pr.number} has been released")
body = []
body << "The pull request ##{pr.number} that closed this issue was merged and released as part of [_fastlane_ #{version}](#{release_url}) :rocket:"
body << "Please let us know if the functionality works as expected as a reply here. If it does not, please open a new issue. Thanks!"
client.add_comment(SLUG, issue_number, body.join("\n"))
end
smart_sleep
end
# Lock old, inactive conversations
def lock_old_issues(issue)
return if issue.locked # already locked, nothing to do here
diff_in_months = (Time.now - issue.updated_at) / 60.0 / 60.0 / 24.0 / 30.0
return if diff_in_months < ISSUE_LOCK
logger.info("Locking conversations for https://github.com/#{SLUG}/issues/#{issue.number} since it hasn't been updated in #{diff_in_months.round} months")
# Currently in beta https://developer.github.com/changes/2016-02-11-issue-locking-api/
cmd = "curl 'https://api.github.com/repos/#{SLUG}/issues/#{issue.number}/lock' \
-X PUT \
-H 'Authorization: token #{ENV['GITHUB_API_TOKEN']}' \
-H 'Content-Length: 0' \
-H 'Accept: application/vnd.github.the-key-preview'"
`#{cmd} > /dev/null`
logger.info("Done locking the conversation")
smart_sleep
end
# Responsible for commenting to inactive issues, and closing them after a while
def process_inactive(issue)
diff_in_months = (Time.now - issue.updated_at) / 60.0 / 60.0 / 24.0 / 30.0
warning_sent = !!issue.labels.find { |a| a.name == AWAITING_REPLY }
if warning_sent && diff_in_months > ISSUE_CLOSED
# We sent off a warning, but we have to check if the user replied
if last_responding_user(issue) == myself
# No reply from the user, let's close the issue
logger.info("https://github.com/#{SLUG}/issues/#{issue.number} (#{issue.title}) is #{diff_in_months.round(1)} months old, closing now")
body = []
body << "This issue will be auto-closed because there hasn't been any activity for a few months. Feel free to [open a new one](https://github.com/#{SLUG}/issues/new) if you still experience this problem :+1:"
client.add_comment(SLUG, issue.number, body.join("\n\n"))
client.close_issue(SLUG, issue.number)
client.add_labels_to_an_issue(SLUG, issue.number, [AUTO_CLOSED])
else
# User replied, let's remove the label
logger.info("https://github.com/#{SLUG}/issues/#{issue.number} (#{issue.title}) was replied to by a different user")
client.remove_label(SLUG, issue.number, AWAITING_REPLY)
end
smart_sleep
elsif diff_in_months > ISSUE_WARNING
return if issue.labels.find { |a| a.name == AWAITING_REPLY }
logger.info("https://github.com/#{SLUG}/issues/#{issue.number} (#{issue.title}) is #{diff_in_months.round(1)} months old, pinging now")
body = []
body << "There hasn't been any activity on this issue recently. Due to the high number of incoming GitHub notifications, we have to clean some of the old issues, as many of them have already been resolved with the latest updates."
body << "Please make sure to update to the latest `fastlane` version and check if that solves the issue. Let us know if that works for you by adding a comment :+1:"
body << "Friendly reminder: contributions are always welcome! Check out [CONTRIBUTING.md](https://github.com/fastlane/fastlane/blob/master/CONTRIBUTING.md) for more information on how to help with `fastlane` and feel free to tackle this issue yourself :muscle:"
body << "\n\nThis issue will be auto-closed if there is no reply within #{months(ISSUE_CLOSED)}."
client.add_comment(SLUG, issue.number, body.join("\n\n"))
client.add_labels_to_an_issue(SLUG, issue.number, [AWAITING_REPLY])
smart_sleep
end
end
# Remind people to include `fastlane env`
def process_env_check(issue)
body = issue.body + issue.title
unless body.include?("Loaded fastlane plugins") || body.include?("### Feature Request")
logger.info("https://github.com/#{SLUG}/issues/#{issue.number} (#{issue.title}) seems to be missing env report")
body = []
body << "It seems like you have not included the output of `fastlane env`"
body << "To make it easier for us help you resolve this issue, please update the issue to include the output of `fastlane env` :+1:"
return body.join("\n\n")
end
return nil
end
def fetch_and_process_pinned_issues
logger.info("Fetching pinned issues from '#{SLUG}'...")
result = GitHubAPI::Client.query(GitHubAPI::Query::PINNED_ISSUES, variables: { owner: REPOSITORY_OWNER, name: REPOSITORY_NAME })
result.data.repository.pinned_issues.edges.each do |edge|
pinned_issue = edge.node.issue
if pinned_issue.closed
slack_about_closed_pinned_issues(pinned_issue)
end
end
end
def slack_about_closed_pinned_issues(pinned_issue)
return unless Bot.should_send_trivial_slack_notification?
logger.info("Notifying the Slack room about pinned issue #{pinned_issue.number} that is closed...")
post_body = {
text: "Caution: Item ##{pinned_issue.number} is still a pinned issue although it was recently closed: #{pinned_issue.url}"
}.to_json
response = Excon.post(ACTION_CHANNEL_SLACK_WEB_HOOK_URL, body: post_body, headers: { "Content-Type" => "application/json" })
if response.status == 200
logger.info("Successfully notified the Slack room about pinned issue that is closed")
else
logger.info("Failed to notify the Slack room about pinned issue that is closed. Response status: #{response.status}. Response body: #{response.body}")
end
end
def last_responding_user(issue)
first_page = client.issue_comments(SLUG, issue.number)
link_to_last_page = client.last_response.rels[:last]
last_page = link_to_last_page.get.data if link_to_last_page
return (last_page || first_page).last.user.login
end
def smart_sleep
sleep 5
end
# Create a hash of PR numbers (as strings) to release tag names indicating in which release
# a given PR was mentioned in the release notes. For example:
#
# {
# "8594"=>"2.22.0",
# "8592"=>"2.22.0",
# "8595"=>"2.22.0",
# "8593"=>"2.21.0",
# "8596"=>"2.21.0",
# "8564"=>"2.20.0"
# }
def map_prs_to_releases(releases)
prs_to_releases = {}
releases = releases.select { |r| !r.draft && !r.prerelease && !r.body.nil? && !r.tag_name.nil? }
releases.each do |release|
collect_pr_references_from_release(release, prs_to_releases)
end
prs_to_releases
end
# Populate the provided prs_to_releases hash with the PR references found in the given release's
# release notes
def collect_pr_references_from_release(release, prs_to_releases)
release.body.split("\n").each do |line|
collect_pr_references_from_line(line, release.tag_name, prs_to_releases)
end
end
def collect_pr_references_from_line(line, release_name, prs_to_releases)
# matches:
# (#8324)
# (#8324,#8325)
# (#8324, #8325)
# (#8324,#8325,#8326)
# (#8324, #8325, #8326)
# etc.
# captures inside the parens
# #8324, #8325, #8326
pr_numbers_match = line.match(/\((#\d+(?:,\s*#\d+)*)\)/)
if pr_numbers_match
pr_numbers = pr_numbers_match[1].split(/,\s*/).map { |n| n.sub('#', '') }
pr_numbers.each do |pr_number|
prs_to_releases[pr_number] = release_name
end
end
end
def fix_checkboxes(text)
return nil unless text
new_text = text.gsub(/^- \[\s*\S+\s*\]/, "- [x]")
return new_text if new_text != text
end
# Checks if a PR's description contains an issue reference.
# This only works for issues in the same repository.
def referenced_issue_number?(pr)
return unless pr.body
# Searching for issue closing keywords + issue identifier in PR's description, i.e. `fixes #1234`
issue_number = pr.body[/(#{ISSUE_CLOSING_KEYWORDS.join('|')}) #\d{1,}/i, 0]
issue_number = issue_number[/#\d{1,}/i, 0] if issue_number
issue_number = issue_number.tr('#', '') if issue_number
# Searching for issue closing keywords + issue URL in PR's description, i.e. `closes https://github.com/REPOSITORY_OWNER/REPOSITORY_NAME/issues/1234`
issue_number = pr.body[/(#{ISSUE_CLOSING_KEYWORDS.join('|')}) https:\/\/github.com\/#{REPOSITORY_OWNER}\/#{REPOSITORY_NAME}\/issues\/\d{1,}/i, 0] unless issue_number
issue_number = issue_number.split('/').last if issue_number
return issue_number
end
def months(number)
str = number == 1 ? "month" : "months"
return "#{number} #{str}"
end
end
end
module GitHubAPI
SCHEMA_PATH = "schema.json"
HTTP = GraphQL::Client::HTTP.new("https://api.github.com/graphql") do
def headers(context)
{
"Accept" => "application/vnd.github.elektra-preview+json",
"Authorization" => "Bearer #{ENV["GITHUB_API_TOKEN"]}"
}
end
end
Schema = GraphQL::Client.load_schema(HTTP)
Client = GraphQL::Client.new(schema: Schema, execute: HTTP)
def self.dump_schema()
GraphQL::Client.dump_schema(HTTP, SCHEMA_PATH)
end
class Query
PINNED_ISSUES = GitHubAPI::Client.parse <<-'GRAPHQL'
query($owner: String!, $name: String!) {
repository(owner:$owner, name:$name) {
pinnedIssues(first: 3) {
edges {
node {
issue {
number
closed
url
}
}
}
}
}
}
GRAPHQL
end
end