• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

mozilla / relman-auto-nag / #4522

pending completion
#4522

push

coveralls-python

suhaibmujahid
[duplicate_copy_metadata] Use the new access severity field instead of whiteboard tags

641 of 3222 branches covered (19.89%)

16 of 16 new or added lines in 1 file covered. (100.0%)

1821 of 8004 relevant lines covered (22.75%)

0.23 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

28.69
/bugbot/rules/duplicate_copy_metadata.py
1
# This Source Code Form is subject to the terms of the Mozilla Public
2
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
3
# You can obtain one at http://mozilla.org/MPL/2.0/.
4

5
from typing import Any, Dict, List, Set
1✔
6

7
from libmozdata import utils as lmdutils
1✔
8
from libmozdata.bugzilla import Bugzilla
1✔
9

10
from bugbot import utils
1✔
11
from bugbot.bzcleaner import BzCleaner
1✔
12
from bugbot.history import History
1✔
13
from bugbot.webcompat_priority import WebcompatPriority
1✔
14

15
FIELD_NAME_TO_LABEL = {
1✔
16
    "keywords": "Keywords",
17
    "severity": "Severity",
18
    "whiteboard": "Whiteboard",
19
    "cf_accessibility_severity": "Accessability Severity",
20
    "cf_performance_impact": "Performance Impact",
21
    "regressed_by": "Regressed by",
22
    "status": "Status",
23
    "cf_webcompat_priority": "Webcompat Priority",
24
}
25

26
FIELD_LABEL_TO_NAME = {label: name for name, label in FIELD_NAME_TO_LABEL.items()}
1✔
27

28

29
class DuplicateCopyMetadata(BzCleaner):
1✔
30
    def __init__(self, last_modification_days: int = 7):
1✔
31
        """Constructor
32
        Args:
33
            last_modification_days: Number of days to look back for bugs that
34
                were modified.
35
        """
36
        super().__init__()
1✔
37

38
        self.last_modification_date = lmdutils.get_date("today", last_modification_days)
1✔
39

40
    def description(self):
1✔
41
        return "Copied fields from duplicate bugs"
×
42

43
    def handle_bug(self, bug, data):
1✔
44
        if bug["last_change_time_non_bot"] < self.last_modification_date:
×
45
            return None
×
46

47
        data[str(bug["id"])] = bug
×
48

49
        return bug
×
50

51
    def get_bugs(self, date="today", bug_ids=[], chunk_size=None):
1✔
52
        dup_bugs = super().get_bugs(date, bug_ids, chunk_size)
×
53

54
        original_bug_ids = {bug["dupe_of"] for bug in dup_bugs.values()}
×
55
        original_bugs = {}
×
56

57
        def bughandler(bug, data):
×
58
            data: data[str(bug["id"])] = bug
×
59

60
        Bugzilla(
×
61
            original_bug_ids,
62
            include_fields=[
63
                "id",
64
                "summary",
65
                "keywords",
66
                "duplicates",
67
                "cf_accessibility_severity",
68
                "cf_performance_impact",
69
                "comments",
70
                "history",
71
                "status",
72
                "regressed_by",
73
                "is_open",
74
                "cf_webcompat_priority",
75
            ],
76
            bughandler=bughandler,
77
            bugdata=original_bugs,
78
        ).wait()
79

80
        results = {}
×
81
        for bug_id, bug in original_bugs.items():
×
82
            if not bug["is_open"]:
×
83
                continue
×
84

85
            copied_fields = {}
×
86
            for dup_bug_id in bug["duplicates"]:
×
87
                dup_bug_id = str(dup_bug_id)
×
88
                dup_bug = dup_bugs.get(dup_bug_id)
×
89
                if not dup_bug:
×
90
                    continue
×
91

92
                # TODO: Since the logic for copied fields is getting bigger,
93
                # consider refactoring it in a separate method.
94

95
                # Performance Impact: copy the assessment result from duplicates
96
                if bug.get("cf_performance_impact") == "---" and dup_bug.get(
×
97
                    "cf_performance_impact"
98
                ) not in ("---", "?", None):
99
                    if "cf_performance_impact" not in copied_fields:
×
100
                        copied_fields["cf_performance_impact"] = {
×
101
                            "from": [dup_bug["id"]],
102
                            "value": dup_bug["cf_performance_impact"],
103
                        }
104
                    else:
105
                        copied_fields["cf_performance_impact"]["from"].append(
×
106
                            dup_bug["id"]
107
                        )
108

109
                # Keywords: copy the `access` keyword from duplicates
110
                if "access" not in bug["keywords"] and "access" in dup_bug["keywords"]:
×
111
                    if "keywords" not in copied_fields:
×
112
                        copied_fields["keywords"] = {
×
113
                            "from": [dup_bug["id"]],
114
                            "value": "access",
115
                        }
116
                    else:
117
                        copied_fields["keywords"]["from"].append(dup_bug["id"])
×
118

119
                # Accessability severity: copy the rating from duplicates
120
                if (
×
121
                    bug.get("cl_accessibility_severity") == "---"
122
                    and dup_bug.get("cl_accessibility_severity") != "---"
123
                ):
124
                    new_access_severity = dup_bug["cl_accessibility_severity"]
×
125
                    assert new_access_severity in ("s1", "s2", "s3", "s4")
×
126

127
                    if (
×
128
                        "cf_accessibility_severity" not in copied_fields
129
                        or new_access_severity
130
                        < copied_fields["cf_accessibility_severity"]["value"]
131
                    ):
132
                        copied_fields["cf_accessibility_severity"] = {
×
133
                            "from": [dup_bug["id"]],
134
                            "value": new_access_severity,
135
                        }
136
                    elif (
×
137
                        new_access_severity
138
                        == copied_fields["cf_accessibility_severity"]["value"]
139
                    ):
140
                        copied_fields["cf_accessibility_severity"]["from"].append(
×
141
                            dup_bug["id"]
142
                        )
143

144
                # Webcompat Priority: copy the `cf_webcompat_priority` from duplicates
145
                if (
×
146
                    bug.get("cf_webcompat_priority") == "---"
147
                    and dup_bug.get("cf_webcompat_priority")
148
                    in WebcompatPriority.NOT_EMPTY_VALUES
149
                ):
150
                    new_priority = dup_bug["cf_webcompat_priority"]
×
151

152
                    # Since the bug does not have a priority, it does not make
153
                    # sense to set it to `revisit`. Instead, we set it to `?` to
154
                    # request triage.
155
                    if new_priority == "revisit":
×
156
                        new_priority = "?"
×
157

158
                    if (
×
159
                        "cf_webcompat_priority" not in copied_fields
160
                        or WebcompatPriority(new_priority)
161
                        > WebcompatPriority(
162
                            copied_fields["cf_webcompat_priority"]["value"]
163
                        )
164
                    ):
165
                        copied_fields["cf_webcompat_priority"] = {
×
166
                            "from": [dup_bug["id"]],
167
                            "value": new_priority,
168
                        }
169
                    elif (
×
170
                        new_priority == copied_fields["cf_webcompat_priority"]["value"]
171
                    ):
172
                        copied_fields["cf_webcompat_priority"]["from"].append(
×
173
                            dup_bug["id"]
174
                        )
175

176
                # Status: confirm the bug if the duplicate was confirmed
177
                if bug["status"] == "UNCONFIRMED" and self.was_confirmed(dup_bug):
×
178
                    if "status" not in copied_fields:
×
179
                        copied_fields["status"] = {
×
180
                            "from": [dup_bug["id"]],
181
                            "value": "NEW",
182
                        }
183
                    else:
184
                        copied_fields["status"]["from"].append(dup_bug["id"])
×
185

186
                # Regressed by: move the regressed_by field to the duplicate of
187
                if dup_bug["regressed_by"]:
×
188
                    added_regressed_by = self.get_previously_added_regressors(bug)
×
189
                    new_regressed_by = {
×
190
                        regression_bug_id
191
                        for regression_bug_id in dup_bug["regressed_by"]
192
                        if regression_bug_id not in added_regressed_by
193
                        and regression_bug_id < int(bug_id)
194
                    }
195
                    if new_regressed_by:
×
196
                        if "regressed_by" not in copied_fields:
×
197
                            copied_fields["regressed_by"] = {
×
198
                                "from": [dup_bug["id"]],
199
                                "value": new_regressed_by,
200
                            }
201
                        else:
202
                            copied_fields["regressed_by"]["from"].append(dup_bug["id"])
×
203
                            copied_fields["regressed_by"]["value"] |= new_regressed_by
×
204

205
            previously_copied_fields = self.get_previously_copied_fields(bug)
×
206
            # We do not need to ignore the `regressed_by` field because we
207
            # already check the history to avoid overwriting the engineers.
208
            previously_copied_fields.discard("regressed_by")
×
209
            copied_fields = sorted(
×
210
                (
211
                    field,
212
                    change["value"],
213
                    change["from"],
214
                )
215
                for field, change in copied_fields.items()
216
                if field not in previously_copied_fields
217
            )
218

219
            if copied_fields:
×
220
                results[bug_id] = {
×
221
                    "id": bug_id,
222
                    "summary": bug["summary"],
223
                    "copied_fields": copied_fields,
224
                }
225

226
                self.set_autofix(bug, copied_fields)
×
227

228
        return results
×
229

230
    def set_autofix(self, bug: dict, copied_fields: List[tuple]) -> None:
1✔
231
        """Set the autofix for a bug
232

233
        Args:
234
            bug: The bug to set the autofix for.
235
            copied_fields: The list of copied fields with their values and the
236
                bugs they were copied from (field, value, source).
237
        """
238
        bug_id = str(bug["id"])
1✔
239
        autofix: Dict[str, Any] = {}
1✔
240

241
        duplicates = {id for _, _, source in copied_fields for id in source}
1✔
242

243
        # NOTE: modifying the following comment template should also be
244
        # reflected in the `get_previously_copied_fields` method.
245
        comment = (
1✔
246
            f"The following {utils.plural('field has', copied_fields, 'fields have')} been copied "
247
            f"from {utils.plural('a duplicate bug', duplicates, 'duplicate bugs')}:\n\n"
248
            "| Field | Value | Source |\n"
249
            "| ----- | ----- | ------ |\n"
250
        )
251

252
        for field, value, source in copied_fields:
1✔
253
            if field == "keywords":
1!
254
                autofix["keywords"] = {"add": [value]}
×
255
            elif field == "cf_accessibility_severity":
1!
256
                autofix["cf_accessibility_severity"] = value
1✔
257
            elif field == "cf_performance_impact":
×
258
                autofix["cf_performance_impact"] = value
×
259
            elif field == "cf_webcompat_priority":
×
260
                autofix["cf_webcompat_priority"] = value
×
261
            elif field == "status":
×
262
                autofix["status"] = value
×
263
            elif field == "regressed_by":
×
264
                autofix["regressed_by"] = {"add": list(value)}
×
265
                value = utils.english_list(sorted(f"bug {id}" for id in value))
×
266
            else:
267
                raise ValueError(f"Unsupported field: {field}")
×
268

269
            field_label = FIELD_NAME_TO_LABEL[field]
1✔
270
            source = utils.english_list(sorted(f"bug {id}" for id in source))
1✔
271
            comment += f"| {field_label} | {value} | {source} |\n"
1✔
272

273
        comment += "\n\n" + self.get_documentation()
1✔
274
        autofix["comment"] = {"body": comment}
1✔
275
        # The following is to reduce noise by having the bot to comme later to
276
        # add the `regression` keyword.
277
        if "regressed_by" in autofix and "regression" not in bug["keywords"]:
1!
278
            if "keywords" not in autofix:
×
279
                autofix["keywords"] = {"add": ["regression"]}
×
280
            else:
281
                autofix["keywords"]["add"].append("regression")
×
282

283
        self.autofix_changes[bug_id] = autofix
1✔
284

285
    def get_previously_copied_fields(self, bug: dict) -> Set[str]:
1✔
286
        """Get the fields that have been copied from a bug's duplicates in the past.
287

288
        Args:
289
            bug: The bug to get the previously copied fields for.
290

291
        Returns:
292
            A set of previously copied fields.
293
        """
294
        previously_copied_fields = set()
1✔
295

296
        for comment in bug["comments"]:
1✔
297
            if comment["author"] != History.BOT or not comment["text"].startswith(
1!
298
                "The following field"
299
            ):
300
                continue
×
301

302
            lines = comment["text"].splitlines()
1✔
303
            try:
1✔
304
                table_first_line = lines.index("| Field | Value | Source |")
1✔
305
            except ValueError:
×
306
                continue
×
307

308
            for line in lines[table_first_line + 2 :]:
1!
309
                if not line.startswith("|"):
1✔
310
                    break
1✔
311
                field_label = line.split("|")[1].strip()
1✔
312
                field_name = FIELD_LABEL_TO_NAME[field_label]
1✔
313
                previously_copied_fields.add(field_name)
1✔
314

315
        return previously_copied_fields
1✔
316

317
    def get_previously_added_regressors(self, bug: dict) -> Set[int]:
1✔
318
        """Get the bug ids for regressors that have been added to a bug in the
319
        past.
320

321
        Args:
322
            bug: The bug to get the previously added regressors for.
323

324
        Returns:
325
            A set of ids for previously added regressors.
326
        """
327
        added_regressors = {
×
328
            int(bug_id)
329
            for entry in bug["history"]
330
            for change in entry["changes"]
331
            if change["field_name"] == "regressed_by"
332
            for bug_id in change["removed"].split(",")
333
            if bug_id
334
        }
335
        added_regressors.update(bug["regressed_by"])
×
336

337
        return added_regressors
×
338

339
    def was_confirmed(self, bug: dict) -> bool:
1✔
340
        """Check if the bug was confirmed."""
341

342
        for entry in reversed(bug["history"]):
×
343
            for change in entry["changes"]:
×
344
                if change["field_name"] != "status":
×
345
                    continue
×
346

347
                if change["removed"] in (
×
348
                    "REOPENED",
349
                    "CLOSED",
350
                    "RESOLVED",
351
                ):
352
                    break
×
353

354
                return change["removed"] != "UNCONFIRMED"
×
355

356
        return False
×
357

358
    def columns(self):
1✔
359
        return ["id", "summary", "copied_fields"]
×
360

361
    def get_bz_params(self, date):
1✔
362
        fields = [
×
363
            "history",
364
            "keywords",
365
            "cf_accessibility_severity",
366
            "cf_performance_impact",
367
            "dupe_of",
368
            "regressed_by",
369
            "cf_webcompat_priority",
370
            "last_change_time_non_bot",
371
        ]
372

373
        params = {
×
374
            "include_fields": fields,
375
            "resolution": "DUPLICATE",
376
            "chfieldfrom": self.last_modification_date,
377
            "chfield": [
378
                "resolution",
379
                "keywords",
380
                "status_whiteboard",
381
                "cf_accessibility_severity",
382
                "cf_performance_impact",
383
                "regressed_by",
384
                "cf_webcompat_priority",
385
            ],
386
        }
387

388
        return params
×
389

390

391
if __name__ == "__main__":
1!
392
    DuplicateCopyMetadata().run()
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc