• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

mozilla / relman-auto-nag / #5138

04 Jul 2024 04:58PM CUT coverage: 21.835% (-0.02%) from 21.859%
#5138

push

coveralls-python

benjaminmah
Added check if the dependencies have been landed

716 of 3604 branches covered (19.87%)

0 of 8 new or added lines in 1 file covered. (0.0%)

1 existing line in 1 file now uncovered.

1932 of 8848 relevant lines covered (21.84%)

0.22 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.0
/bugbot/rules/not_landed.py
1
# This Source Code Form is subject to the terms of the Mozilla Public
2
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
3
# You can obtain one at http://mozilla.org/MPL/2.0/.
4

5
import base64
×
6
import random
×
7
import re
×
8

9
from libmozdata import utils as lmdutils
×
10
from libmozdata.bugzilla import Bugzilla, BugzillaUser
×
11
from libmozdata.phabricator import (
×
12
    PhabricatorAPI,
13
    PhabricatorBzNotFoundException,
14
    PhabricatorRevisionNotFoundException,
15
)
16

17
from bugbot import utils
×
18
from bugbot.bzcleaner import BzCleaner
×
19

20
PHAB_URL_PAT = re.compile(r"https://phabricator\.services\.mozilla\.com/D([0-9]+)")
×
21

22

23
class NotLanded(BzCleaner):
×
24
    def __init__(self):
×
25
        super(NotLanded, self).__init__()
×
26
        self.nweeks = utils.get_config(self.name(), "number_of_weeks", 2)
×
27
        self.nyears = utils.get_config(self.name(), "number_of_years", 2)
×
28
        self.phab = PhabricatorAPI(utils.get_login_info()["phab_api_key"])
×
29
        self.extra_ni = {}
×
30

31
    def description(self):
×
32
        return "Open bugs with no activity for {} weeks and a r+ patch which hasn't landed".format(
×
33
            self.nweeks
34
        )
35

36
    def has_assignee(self):
×
37
        return True
×
38

39
    def get_extra_for_template(self):
×
40
        return {"nweeks": self.nweeks}
×
41

42
    def get_extra_for_needinfo_template(self):
×
43
        self.extra_ni.update(self.get_extra_for_template())
×
44
        return self.extra_ni
×
45

46
    def columns(self):
×
47
        return ["id", "summary", "assignee"]
×
48

49
    def handle_bug(self, bug, data):
×
50
        if self.has_bot_set_ni(bug):
×
51
            return None
×
52

53
        bugid = str(bug["id"])
×
54
        assignee = bug.get("assigned_to", "")
×
55
        if utils.is_no_assignee(assignee):
×
56
            assignee = ""
×
57
            nickname = ""
×
58
        else:
59
            nickname = bug["assigned_to_detail"]["nick"]
×
60

61
        data[bugid] = {
×
62
            "assigned_to": assignee,
63
            "nickname": nickname,
64
            "deps": set(bug["depends_on"]),
65
        }
66

67
        return bug
×
68

69
    def filter_bugs(self, bugs):
×
70
        # We must remove bugs which have open dependencies (except meta bugs)
71
        # because devs may wait for those bugs to be fixed before their patch
72
        # can land.
73

74
        all_deps = set(dep for info in bugs.values() for dep in info["deps"])
×
75

76
        def bug_handler(bug, data):
×
77
            if (
×
78
                bug["status"] in {"RESOLVED", "VERIFIED", "CLOSED"}
79
                or "meta" in bug["keywords"]
80
            ):
81
                data.add(bug["id"])
×
82

83
        useless = set()
×
84
        Bugzilla(
×
85
            bugids=list(all_deps),
86
            include_fields=["id", "keywords", "status"],
87
            bughandler=bug_handler,
88
            bugdata=useless,
89
        ).get_data().wait()
90

91
        for bugid, info in bugs.items():
×
92
            # finally deps will contain open bugs which are not meta
93
            info["deps"] -= useless
×
94

95
        # keep bugs with no deps
96
        bugs = {bugid: info for bugid, info in bugs.items() if not info["deps"]}
×
97

98
        return bugs
×
99

100
    def check_phab(self, attachment, reviewers_phid):
×
101
        """Check if the patch in Phabricator has been r+"""
102
        if attachment["is_obsolete"] == 1:
×
103
            return None
×
104

105
        phab_url = base64.b64decode(attachment["data"]).decode("utf-8")
×
106

107
        # extract the revision
108
        rev = PHAB_URL_PAT.search(phab_url).group(1)
×
109
        try:
×
110
            data = self.phab.load_revision(
×
111
                rev_id=int(rev), queryKey="all", attachments={"reviewers": 1}
112
            )
113
        except PhabricatorRevisionNotFoundException:
×
114
            return None
×
115

116
        # this is a timestamp
117
        last_modified = data["fields"]["dateModified"]
×
118
        last_modified = lmdutils.get_date_from_timestamp(last_modified)
×
119
        if (self.date - last_modified).days <= self.nweeks * 7:
×
120
            # Do not do anything if recent changes in the bug
121
            return False
×
122

123
        reviewers = data["attachments"]["reviewers"]["reviewers"]
×
124

125
        if not reviewers:
×
126
            return False
×
127

128
        for reviewer in reviewers:
×
129
            if reviewer["status"] != "accepted":
×
130
                return False
×
131
            reviewers_phid.add(reviewer["reviewerPHID"])
×
132

133
        value = data["fields"]["status"].get("value", "")
×
134
        if value == "changes-planned":
×
135
            # even if the patch is r+ and not published, some changes may be required
136
            # so with the value 'changes-planned', the dev can say it's still a wip
137
            return False
×
138

139
        if value != "published":
×
140
            return True
×
141

142
        return False
×
143

144
    def handle_attachment(self, attachment, res):
×
145
        ct = attachment["content_type"]
×
146
        c = None
×
147
        if ct == "text/x-phabricator-request":
×
148
            if "phab" not in res or res["phab"]:
×
149
                c = self.check_phab(attachment, res["reviewers_phid"])
×
150
                if c is not None:
×
151
                    res["phab"] = c
×
152

153
        if c is not None:
×
154
            attacher = attachment["creator"]
×
155
            if "author" in res:
×
156
                if attacher in res["author"]:
×
157
                    res["author"][attacher] += 1
×
158
                else:
159
                    res["author"][attacher] = 1
×
160
            else:
161
                res["author"] = {attacher: 1}
×
162

163
            if "count" in res:
×
164
                res["count"] += 1
×
165
            else:
166
                res["count"] = 1
×
167

168
    def get_patch_data(self, bugs):
×
169
        """Get patch information in bugs"""
170
        nightly_pat = Bugzilla.get_landing_patterns(channels=["nightly"])[0][0]
×
171

172
        def comment_handler(bug, bugid, data):
×
173
            # if a comment contains a backout: don't nag
174
            for comment in bug["comments"]:
×
175
                comment = comment["text"].lower()
×
176
                if nightly_pat.match(comment) and (
×
177
                    "backed out" in comment or "backout" in comment
178
                ):
179
                    data[bugid]["backout"] = True
×
180

181
        def attachment_id_handler(attachments, bugid, data):
×
182
            for a in attachments:
×
183
                if (
×
184
                    a["content_type"] == "text/x-phabricator-request"
185
                    and a["is_obsolete"] == 0
186
                ):
187
                    data.append(a["id"])
×
188

189
        def attachment_handler(attachments, data):
×
190
            for attachment in attachments:
×
191
                bugid = str(attachment["bug_id"])
×
192
                if bugid in data:
×
193
                    data[bugid].append(attachment)
×
194
                else:
195
                    data[bugid] = [attachment]
×
196

NEW
197
        def has_blocking_dependencies(attachment):
×
198
            rev = PHAB_URL_PAT.search(
×
199
                base64.b64decode(attachment["data"]).decode("utf-8")
200
            ).group(1)
201
            try:
×
202
                revision_data = self.phab.load_revision(rev_id=int(rev))
×
203
            except PhabricatorRevisionNotFoundException:
×
204
                return None
×
205

206
            stack_graph = revision_data["fields"]["stackGraph"]
×
207
            current_revision_phid = revision_data["phid"]
×
208
            dependencies = stack_graph[current_revision_phid]
×
209

NEW
210
            for dep_phid in dependencies:
×
NEW
211
                dep_revision_data = self.phab.load_revision(rev_phid=dep_phid)
×
NEW
212
                dep_status = dep_revision_data["fields"]["status"]["value"]
×
NEW
213
                if dep_status != "published":
×
NEW
214
                    return True
×
215

NEW
216
            return False
×
217

218
        bugids = list(bugs.keys())
×
219
        data = {
×
220
            bugid: {"backout": False, "author": None, "count": 0, "dependencies": False}
221
            for bugid in bugids
222
        }
223

224
        # Get the ids of the attachments of interest
225
        # to avoid to download images, videos, ...
226
        attachment_ids = []
×
227
        Bugzilla(
×
228
            bugids=bugids,
229
            attachmenthandler=attachment_id_handler,
230
            attachmentdata=attachment_ids,
231
            attachment_include_fields=["is_obsolete", "content_type", "id"],
232
        ).get_data().wait()
233

234
        # Once we've the ids we can get the data
235
        attachments_by_bug = {}
×
236
        Bugzilla(
×
237
            attachmentids=attachment_ids,
238
            attachmenthandler=attachment_handler,
239
            attachmentdata=attachments_by_bug,
240
            attachment_include_fields=[
241
                "bug_id",
242
                "data",
243
                "is_obsolete",
244
                "content_type",
245
                "id",
246
                "creator",
247
            ],
248
        ).get_data().wait()
249

250
        for bugid, attachments in attachments_by_bug.items():
×
251
            res = {"reviewers_phid": set()}
×
252
            for attachment in attachments:
×
253
                self.handle_attachment(attachment, res)
×
254

255
            if "phab" in res:
×
256
                if res["phab"]:
×
NEW
257
                    data[bugid]["dependencies"] = has_blocking_dependencies(attachment)
×
258
                    data[bugid]["reviewers_phid"] = res["reviewers_phid"]
×
259
                    data[bugid]["author"] = res["author"]
×
260
                    data[bugid]["count"] = res["count"]
×
261

262
        data = {bugid: v for bugid, v in data.items() if v["author"]}
×
263

264
        if not data:
×
265
            return data
×
266

267
        Bugzilla(
×
268
            bugids=list(data.keys()),
269
            commenthandler=comment_handler,
270
            commentdata=data,
271
            comment_include_fields=["text"],
272
        ).get_data().wait()
273

274
        data = {
×
275
            bugid: v
276
            for bugid, v in data.items()
277
            if not v["backout"] and not v["dependencies"]
278
        }
279

280
        return data
×
281

282
    def get_bz_userid(self, phids):
×
283
        if not phids:
×
284
            return {}
×
285

286
        try:
×
287
            data = self.phab.load_bz_account(user_phids=list(phids))
×
288
            users = {x["phid"]: x["id"] for x in data}
×
289
        except PhabricatorBzNotFoundException:
×
290
            return {}
×
291

292
        def handler(user, data):
×
293
            data[str(user["id"])] = user["name"]
×
294

295
        data = {}
×
296
        BugzillaUser(
×
297
            user_names=list(users.values()),
298
            include_fields=["id", "name"],
299
            user_handler=handler,
300
            user_data=data,
301
        ).wait()
302

303
        return {phid: data[id] for phid, id in users.items()}
×
304

305
    def get_nicks(self, nicknames):
×
306
        def handler(user, data):
×
307
            data[user["name"]] = user["nick"]
×
308

309
        users = set(nicknames.values())
×
310
        data = {}
×
311
        if users:
×
312
            BugzillaUser(
×
313
                user_names=list(users),
314
                include_fields=["name", "nick"],
315
                user_handler=handler,
316
                user_data=data,
317
            ).wait()
318

319
        for bugid, name in nicknames.items():
×
320
            nicknames[bugid] = (name, data[name])
×
321

322
        return nicknames
×
323

324
    def get_bz_params(self, date):
×
325
        self.date = lmdutils.get_date_ymd(date)
×
326
        fields = ["flags", "depends_on"]
×
327
        params = {
×
328
            "include_fields": fields,
329
            "resolution": "---",
330
            "f1": "attachment.ispatch",
331
            "n2": 1,
332
            "f2": "attachments.isobsolete",
333
            "f3": "attachments.mimetype",
334
            "o3": "anywordssubstr",
335
            "v3": "text/x-phabricator-request",
336
            "f4": "creation_ts",
337
            "o4": "greaterthan",
338
            "v4": f"-{self.nyears}y",
339
            "f5": "days_elapsed",
340
            "o5": "greaterthaneq",
341
            "v5": self.nweeks * 7,
342
            "n6": 1,
343
            "f6": "longdesc",
344
            "o6": "casesubstring",
345
            "v6": "which didn't land and no activity in this bug for",
346
            "f7": "status_whiteboard",
347
            "o7": "notsubstring",
348
            "v7": "[reminder-test ",
349
        }
350

351
        return params
×
352

353
    def get_bugs(self, date="today", bug_ids=[]):
×
354
        bugs = super(NotLanded, self).get_bugs(date=date, bug_ids=bug_ids)
×
355
        bugs = self.filter_bugs(bugs)
×
356
        bugs_patch = self.get_patch_data(bugs)
×
357
        res = {}
×
358

359
        reviewers_phid = set()
×
360
        nicknames = {}
×
361
        for bugid, data in bugs_patch.items():
×
362
            reviewers_phid |= data["reviewers_phid"]
×
363
            assignee = bugs[bugid]["assigned_to"]
×
364
            if not assignee:
×
365
                assignee = max(data["author"], key=data["author"].get)
×
366
                nicknames[bugid] = assignee
×
367

368
        bz_reviewers = self.get_bz_userid(reviewers_phid)
×
369
        all_reviewers = set(bz_reviewers.keys())
×
370
        nicknames = self.get_nicks(nicknames)
×
371

372
        for bugid, data in bugs_patch.items():
×
373
            res[bugid] = d = bugs[bugid]
×
374
            self.extra_ni[bugid] = data["count"]
×
375
            assignee = d["assigned_to"]
×
376
            nickname = d["nickname"]
×
377

378
            if not assignee:
×
379
                assignee, nickname = nicknames[bugid]
×
380

381
            if not assignee:
×
382
                continue
×
383

384
            self.add_auto_ni(bugid, {"mail": assignee, "nickname": nickname})
×
385

386
            common = all_reviewers & data["reviewers_phid"]
×
387
            if common:
×
388
                reviewer = random.choice(list(common))
×
389
                self.add_auto_ni(
×
390
                    bugid, {"mail": bz_reviewers[reviewer], "nickname": None}
391
                )
392

393
        return res
×
394

395

396
if __name__ == "__main__":
×
397
    NotLanded().run()
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc