• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

mozilla / relman-auto-nag / #4880

30 Dec 2023 05:52PM CUT coverage: 21.899%. Remained the same
#4880

push

coveralls-python

web-flow
Small improvements on the lot/several rules (#2313)

716 of 3590 branches covered (0.0%)

1928 of 8804 relevant lines covered (21.9%)

0.22 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.0
/bugbot/crash/analyzer.py
1
# This Source Code Form is subject to the terms of the Mozilla Public
2
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
3
# You can obtain one at http://mozilla.org/MPL/2.0/.
4

5
import itertools
×
6
import re
×
7
from collections import defaultdict
×
8
from datetime import date, datetime, timedelta
×
9
from functools import cached_property
×
10
from typing import Iterable, Iterator
×
11

12
from dateutil import parser
×
13
from libmozdata import bugzilla, clouseau, connection, socorro
×
14
from libmozdata import utils as lmdutils
×
15
from libmozdata.bugzilla import Bugzilla
×
16
from libmozdata.connection import Connection
×
17

18
from bugbot import logger, utils
×
19
from bugbot.bug.analyzer import BugAnalyzer, BugsStore
×
20
from bugbot.components import ComponentName
×
21
from bugbot.crash import socorro_util
×
22

23
# The max offset from a memory address to be considered "near".
24
OFFSET_64_BIT = 0x1000
×
25
OFFSET_32_BIT = 0x100
×
26
# Allocator poison value addresses.
27
ALLOCATOR_ADDRESSES_64_BIT = (
×
28
    (0xE5E5E5E5E5E5E5E5, OFFSET_64_BIT),
29
    # On 64-bit windows, sometimes it could be doing something with a 32-bit
30
    # value gotten from freed memory, so it'll be 0X00000000E5E5E5E5 +/-, and
31
    # because of the address limitation, quite often it will be
32
    # 0X0000E5E5E5E5E5E5 +/-.
33
    (0x00000000E5E5E5E5, OFFSET_32_BIT),
34
    (0x0000E5E5E5E5E5E5, OFFSET_64_BIT),
35
    (0x4B4B4B4B4B4B4B4B, OFFSET_64_BIT),
36
)
37
ALLOCATOR_ADDRESSES_32_BIT = (
×
38
    (0xE5E5E5E5, OFFSET_32_BIT),
39
    (0x4B4B4B4B, OFFSET_32_BIT),
40
)
41
# Ranges where addresses are considered near allocator poison values.
42
ALLOCATOR_RANGES_64_BIT = tuple(
×
43
    (addr - offset, addr + offset) for addr, offset in ALLOCATOR_ADDRESSES_64_BIT
44
)
45
ALLOCATOR_RANGES_32_BIT = tuple(
×
46
    (addr - offset, addr + offset) for addr, offset in ALLOCATOR_ADDRESSES_32_BIT
47
)
48

49

50
def is_near_null_address(str_address) -> bool:
×
51
    """Check if the address is near null.
52

53
    Args:
54
        str_address: The memory address to check.
55

56
    Returns:
57
        True if the address is near null, False otherwise.
58
    """
59
    address = int(str_address, 0)
×
60
    is_64_bit = len(str_address) >= 18
×
61

62
    if is_64_bit:
×
63
        return -OFFSET_64_BIT <= address <= OFFSET_64_BIT
×
64

65
    return -OFFSET_32_BIT <= address <= OFFSET_32_BIT
×
66

67

68
def is_near_allocator_address(str_address) -> bool:
×
69
    """Check if the address is near an allocator poison value.
70

71
    Args:
72
        str_address: The memory address to check.
73

74
    Returns:
75
        True if the address is near an allocator poison value, False otherwise.
76
    """
77
    address = int(str_address, 0)
×
78
    is_64_bit = len(str_address) >= 18
×
79

80
    return any(
×
81
        low <= address <= high
82
        for low, high in (
83
            ALLOCATOR_RANGES_64_BIT if is_64_bit else ALLOCATOR_RANGES_32_BIT
84
        )
85
    )
86

87

88
# TODO: Move this to libmozdata
89
def generate_signature_page_url(params: dict, tab: str) -> str:
×
90
    """Generate a URL to the signature page on Socorro
91

92
    Args:
93
        params: the parameters for the search query.
94
        tab: the page tab that should be selected.
95

96
    Returns:
97
        The URL of the signature page on Socorro
98
    """
99
    web_url = socorro.Socorro.CRASH_STATS_URL
×
100
    query = lmdutils.get_params_for_url(params)
×
101
    return f"{web_url}/signature/{query}#{tab}"
×
102

103

104
class NoCrashReportFoundError(Exception):
×
105
    """There are no crash reports that meet the required criteria."""
106

107

108
class ClouseauDataAnalyzer:
×
109
    """Analyze the data returned by Crash Clouseau about a specific crash
110
    signature.
111
    """
112

113
    MINIMUM_CLOUSEAU_SCORE_THRESHOLD: int = 8
×
114
    DEFAULT_CRASH_COMPONENT = ComponentName("Core", "General")
×
115

116
    def __init__(
×
117
        self, reports: Iterable[dict], bugs_store: BugsStore, first_crash_date: datetime
118
    ):
119
        self._clouseau_reports = reports
×
120
        self._first_crash_date = first_crash_date
×
121
        self.bugs_store = bugs_store
×
122

123
    @cached_property
×
124
    def max_clouseau_score(self):
×
125
        """The maximum Clouseau score in the crash reports."""
126
        if not self._clouseau_reports:
×
127
            return 0
×
128
        return max(report["max_score"] for report in self._clouseau_reports)
×
129

130
    @cached_property
×
131
    def regressed_by_potential_bug_ids(self) -> set[int]:
×
132
        """The IDs for the bugs that their patches could have caused the crash."""
133
        return {
×
134
            changeset["bug_id"] for changeset in self.regressed_by_potential_patches
135
        }
136

137
    @cached_property
×
138
    def regressed_by_patch(self) -> str | None:
×
139
        """The hash of the patch that could have caused the crash."""
140
        potential_patches = {
×
141
            changeset["changeset"] for changeset in self.regressed_by_potential_patches
142
        }
143
        if len(potential_patches) == 1:
×
144
            return next(iter(potential_patches))
×
145
        return None
×
146

147
    @cached_property
×
148
    def regressed_by(self) -> int | None:
×
149
        """The ID of the bug that one of its patches could have caused
150
        the crash.
151

152
        If there are multiple bugs, the value will be `None`.
153
        """
154
        bug_ids = self.regressed_by_potential_bug_ids
×
155
        if len(bug_ids) == 1:
×
156
            return next(iter(bug_ids))
×
157
        return None
×
158

159
    @cached_property
×
160
    def regressed_by_potential_bugs(self) -> list[BugAnalyzer]:
×
161
        """The bugs whose patches could have caused the crash."""
162
        self.bugs_store.fetch_bugs(
×
163
            self.regressed_by_potential_bug_ids,
164
            [
165
                "id",
166
                "groups",
167
                "assigned_to",
168
                "product",
169
                "component",
170
                "_custom",
171
            ],
172
        )
173
        return [
×
174
            self.bugs_store.get_bug_by_id(bug_id)
175
            for bug_id in self.regressed_by_potential_bug_ids
176
        ]
177

178
    @cached_property
×
179
    def regressed_by_author(self) -> dict | None:
×
180
        """The author of the patch that could have caused the crash.
181

182
        If there are multiple regressors, the value will be `None`.
183

184
        The regressor bug assignee is considered as the author, even if the
185
        assignee is not the patch author.
186
        """
187

188
        if not self.regressed_by:
×
189
            return None
×
190

191
        bug = self.regressed_by_potential_bugs[0]
×
192
        assert bug.id == self.regressed_by
×
193
        return bug.get_field("assigned_to_detail")
×
194

195
    @cached_property
×
196
    def crash_component(self) -> ComponentName:
×
197
        """The component that the crash belongs to.
198

199
        If there are multiple components, the value will be the default one.
200
        """
201
        potential_components = {
×
202
            bug.component for bug in self.regressed_by_potential_bugs
203
        }
204
        if len(potential_components) == 1:
×
205
            return next(iter(potential_components))
×
206
        return self.DEFAULT_CRASH_COMPONENT
×
207

208
    @property
×
209
    def regressed_by_potential_patches(self) -> Iterator[dict]:
×
210
        """The patches that could have caused the crash.
211

212
        Example of a patch data:
213
            {
214
                "bug_id": 1668136,
215
                "changeset": "aa66fda02aac",
216
                "channel": "nightly",
217
                "is_backedout": False,
218
                "is_merge": False,
219
                "max_score": 0,
220
                "push_date": "Tue, 31 Oct 2023 09:30:58 GMT",
221
            }
222
        """
223
        minimum_accepted_score = max(
×
224
            self.MINIMUM_CLOUSEAU_SCORE_THRESHOLD, self.max_clouseau_score
225
        )
226
        return (
×
227
            changeset
228
            for report in self._clouseau_reports
229
            if report["max_score"] >= minimum_accepted_score
230
            for changeset in report["changesets"]
231
            if changeset["max_score"] >= minimum_accepted_score
232
            and not changeset["is_merge"]
233
            and not changeset["is_backedout"]
234
            # NOTE(marco): This aims to reduce noise but could exclude valid
235
            # regressors, such as when a single signature refers to multiple
236
            # crash causes.
237
            and self._first_crash_date > parser.parse(changeset["push_date"])
238
        )
239

240

241
class SocorroDataAnalyzer(socorro_util.SignatureStats):
×
242
    """Analyze the data returned by Socorro."""
243

244
    _bugzilla_os_legal_values = None
×
245
    _bugzilla_cpu_legal_values_map = None
×
246
    _platforms = [
×
247
        {"short_name": "win", "name": "Windows"},
248
        {"short_name": "mac", "name": "Mac OS X"},
249
        {"short_name": "lin", "name": "Linux"},
250
        {"short_name": "and", "name": "Android"},
251
        {"short_name": "unknown", "name": "Unknown"},
252
    ]
253

254
    def __init__(
×
255
        self,
256
        signature: dict,
257
        num_total_crashes: int,
258
    ):
259
        super().__init__(signature, num_total_crashes, platforms=self._platforms)
×
260

261
    @classmethod
×
262
    def to_bugzilla_op_sys(cls, op_sys: str) -> str:
×
263
        """Return the corresponding OS name in Bugzilla for the provided OS name
264
        from Socorro.
265

266
        If the OS name is not recognized, return "Other".
267
        """
268
        if cls._bugzilla_os_legal_values is None:
×
269
            cls._bugzilla_os_legal_values = set(
×
270
                bugzilla.BugFields.fetch_field_values("op_sys")
271
            )
272

273
        if op_sys in cls._bugzilla_os_legal_values:
×
274
            return op_sys
×
275

276
        if op_sys.startswith("OS X ") or op_sys.startswith("macOS "):
×
277
            op_sys = "macOS"
×
278
        elif op_sys.startswith("Windows"):
×
279
            op_sys = "Windows"
×
280
        elif "Linux" in op_sys or op_sys.startswith("Ubuntu"):
×
281
            op_sys = "Linux"
×
282
        else:
283
            op_sys = "Other"
×
284

285
        return op_sys
×
286

287
    @cached_property
×
288
    def first_crash_date(self) -> datetime:
×
289
        """The date of the first crash within the query time range."""
290
        return parser.parse(self.signature["facets"]["histogram_date"][0]["term"])
×
291

292
    @property
×
293
    def first_crash_date_ymd(self) -> str:
×
294
        """The date of the first crash within the query time range.
295

296
        The date is in YYYY-MM-DD format.
297
        """
298
        return self.first_crash_date.strftime("%Y-%m-%d")
×
299

300
    @property
×
301
    def bugzilla_op_sys(self) -> str:
×
302
        """The name of the OS where the crash happens.
303

304
        The value is one of the legal values for Bugzilla's `op_sys` field.
305

306
        - If no OS name is found, the value will be "Unspecified".
307
        - If the OS name is not recognized, the value will be "Other".
308
        - If multiple OS names are found, the value will be "All". Unless the OS
309
          names can be resolved to a common name without a version. For example,
310
          "Windows 10" and "Windows 7" will become "Windows".
311
        """
312
        all_op_sys = {
×
313
            self.to_bugzilla_op_sys(op_sys["term"])
314
            for op_sys in self.signature["facets"]["platform_pretty_version"]
315
        }
316

317
        if len(all_op_sys) > 1:
×
318
            # Resolve to root OS name by removing the version number.
319
            all_op_sys = {op_sys.split(" ")[0] for op_sys in all_op_sys}
×
320

321
        if len(all_op_sys) == 2 and "Other" in all_op_sys:
×
322
            # TODO: explain this workaround.
323
            all_op_sys.remove("Other")
×
324

325
        if len(all_op_sys) == 1:
×
326
            return next(iter(all_op_sys))
×
327

328
        if len(all_op_sys) == 0:
×
329
            return "Unspecified"
×
330

331
        return "All"
×
332

333
    @classmethod
×
334
    def to_bugzilla_cpu(cls, cpu: str) -> str:
×
335
        """Return the corresponding CPU name in Bugzilla for the provided name
336
        from Socorro.
337

338
        If the CPU is not recognized, return "Other".
339
        """
340
        if cls._bugzilla_cpu_legal_values_map is None:
×
341
            cls._bugzilla_cpu_legal_values_map = {
×
342
                value.lower(): value
343
                for value in bugzilla.BugFields.fetch_field_values("rep_platform")
344
            }
345

346
        return cls._bugzilla_cpu_legal_values_map.get(cpu, "Other")
×
347

348
    @property
×
349
    def bugzilla_cpu_arch(self) -> str:
×
350
        """The CPU architecture of the devices where the crash happens.
351

352
        The value is one of the legal values for Bugzilla's `rep_platform` field.
353

354
        - If no CPU architecture is found, the value will be "Unspecified".
355
        - If the CPU architecture is not recognized, the value will be "Other".
356
        - If multiple CPU architectures are found, the value will "All".
357
        """
358
        all_cpu_arch = {
×
359
            self.to_bugzilla_cpu(cpu["term"])
360
            for cpu in self.signature["facets"]["cpu_arch"]
361
        }
362

363
        if len(all_cpu_arch) == 2 and "Other" in all_cpu_arch:
×
364
            all_cpu_arch.remove("Other")
×
365

366
        if len(all_cpu_arch) == 1:
×
367
            return next(iter(all_cpu_arch))
×
368

369
        if len(all_cpu_arch) == 0:
×
370
            return "Unspecified"
×
371

372
        return "All"
×
373

374
    @property
×
375
    def user_comments_page_url(self) -> str:
×
376
        """The URL to the Signature page on Socorro where the Comments tab is
377
        selected.
378
        """
379
        start_date = date.today() - timedelta(weeks=26)
×
380
        params = {
×
381
            "signature": self.signature_term,
382
            "date": socorro.SuperSearch.get_search_date(start_date),
383
        }
384
        return generate_signature_page_url(params, "comments")
×
385

386
    @property
×
387
    def num_user_comments(self) -> int:
×
388
        """The number of crash reports with user comments."""
389
        # TODO: count useful/interesting user comments (e.g., exclude one word comments)
390
        return self.signature["facets"]["cardinality_user_comments"]["value"]
×
391

392
    @property
×
393
    def has_user_comments(self) -> bool:
×
394
        """Whether the crash signature has any reports with a user comment."""
395
        return self.num_user_comments > 0
×
396

397
    @property
×
398
    def top_proto_signature(self) -> str:
×
399
        """The proto signature that occurs the most."""
400
        return self.signature["facets"]["proto_signature"][0]["term"]
×
401

402
    @property
×
403
    def num_top_proto_signature_crashes(self) -> int:
×
404
        """The number of crashes for the most occurring proto signature."""
405
        return self.signature["facets"]["proto_signature"][0]["count"]
×
406

407
    def _build_ids(self) -> Iterator[int]:
×
408
        """Yields the build IDs where the crash occurred."""
409
        for build_id in self.signature["facets"]["build_id"]:
×
410
            yield build_id["term"]
×
411

412
    @property
×
413
    def top_build_id(self) -> int:
×
414
        """The build ID where most crashes occurred."""
415
        return self.signature["facets"]["build_id"][0]["term"]
×
416

417
    @cached_property
×
418
    def num_near_null_crashes(self) -> int:
×
419
        """The number of crashes that occurred on addresses near null."""
420
        return sum(
×
421
            address["count"]
422
            for address in self.signature["facets"]["address"]
423
            if is_near_null_address(address["term"])
424
        )
425

426
    @property
×
427
    def is_near_null_crash(self) -> bool:
×
428
        """Whether all crashes occurred on addresses near null."""
429
        return self.num_near_null_crashes == self.num_crashes
×
430

431
    @property
×
432
    def is_potential_near_null_crash(self) -> bool:
×
433
        """Whether the signature is a potential near null crash.
434

435
        The value will be True if some but not all crashes occurred on addresses
436
        near null.
437
        """
438
        return not self.is_near_null_crash and self.num_near_null_crashes > 0
×
439

440
    @property
×
441
    def is_near_null_related_crash(self) -> bool:
×
442
        """Whether the signature is related to near null crashes.
443

444
        The value will be True if any of the crashes occurred on addresses near
445
        null.
446
        """
447
        return self.is_near_null_crash or self.is_potential_near_null_crash
×
448

449
    @cached_property
×
450
    def num_near_allocator_crashes(self) -> int:
×
451
        """The number of crashes that occurred on addresses near an allocator
452
        poison value.
453
        """
454
        return sum(
×
455
            address["count"]
456
            for address in self.signature["facets"]["address"]
457
            if is_near_allocator_address(address["term"])
458
        )
459

460
    @property
×
461
    def is_near_allocator_crash(self) -> bool:
×
462
        """Whether all crashes occurred on addresses near an allocator poison
463
        value.
464
        """
465
        return self.num_near_allocator_crashes == self.num_crashes
×
466

467
    @property
×
468
    def is_potential_near_allocator_crash(self) -> bool:
×
469
        """Whether the signature is a potential near allocator poison value
470
        crash.
471

472
        The value will be True if some but not all crashes occurred on addresses
473
        near an allocator poison value.
474
        """
475
        return not self.is_near_allocator_crash and self.num_near_allocator_crashes > 0
×
476

477
    @property
×
478
    def is_near_allocator_related_crash(self) -> bool:
×
479
        """Whether the signature is related to near allocator poison value
480
        crashes.
481

482
        The value will be True if any of the crashes occurred on addresses near
483
        an allocator poison value.
484
        """
485
        return self.is_near_allocator_crash or self.is_potential_near_allocator_crash
×
486

487

488
class SignatureAnalyzer(SocorroDataAnalyzer, ClouseauDataAnalyzer):
×
489
    """Analyze the data related to a signature.
490

491
    This includes data from Socorro and Clouseau.
492
    """
493

494
    def __init__(
×
495
        self,
496
        socorro_signature: dict,
497
        num_total_crashes: int,
498
        clouseau_reports: list[dict],
499
        bugs_store: BugsStore,
500
    ):
501
        SocorroDataAnalyzer.__init__(self, socorro_signature, num_total_crashes)
×
502
        ClouseauDataAnalyzer.__init__(
×
503
            self, clouseau_reports, bugs_store, self.first_crash_date
504
        )
505

506
    def _fetch_crash_reports(
×
507
        self,
508
        proto_signature: str,
509
        build_id: int | Iterable[int],
510
        limit: int = 1,
511
    ) -> Iterator[dict]:
512
        params = {
×
513
            "proto_signature": "=" + proto_signature,
514
            "build_id": build_id,
515
            "_columns": [
516
                "uuid",
517
            ],
518
            "_results_number": limit,
519
        }
520

521
        def handler(res: dict, data: dict):
×
522
            data.update(res)
×
523

524
        data: dict = {}
×
525
        socorro.SuperSearch(params=params, handler=handler, handlerdata=data).wait()
×
526

527
        yield from data["hits"]
×
528

529
    def _is_corrupted_crash_stack(self, processed_crash: dict) -> bool:
×
530
        """Whether the crash stack is corrupted.
531

532
        Args:
533
            processed_crash: The processed crash to check.
534

535
        Returns:
536
            True if the crash stack is corrupted, False otherwise.
537
        """
538

539
        return any(
×
540
            not frame["module"]
541
            for frame in processed_crash["json_dump"]["crashing_thread"]["frames"]
542
        )
543

544
    def fetch_representative_processed_crash(self) -> dict:
×
545
        """Fetch a processed crash to represent the signature.
546

547
        This could fetch multiple processed crashes and return the one that is
548
        most likely to be useful.
549
        """
550
        limit_to_top_proto_signature = (
×
551
            self.num_top_proto_signature_crashes / self.num_crashes > 0.6
552
        )
553

554
        candidate_reports = itertools.chain(
×
555
            # Reports with a higher score from clouseau are more likely to be
556
            # useful.
557
            sorted(
558
                self._clouseau_reports,
559
                key=lambda report: report["max_score"],
560
                reverse=True,
561
            ),
562
            # Next we try find reports from the top crashing build because they
563
            # are likely to be representative.
564
            self._fetch_crash_reports(self.top_proto_signature, self.top_build_id),
565
            self._fetch_crash_reports(self.top_proto_signature, self._build_ids()),
566
        )
567

568
        first_representative_report = None
×
569
        for i, report in enumerate(candidate_reports):
×
570
            uuid = report["uuid"]
×
571
            processed_crash = socorro.ProcessedCrash.get_processed(uuid)[uuid]
×
572
            if (
×
573
                limit_to_top_proto_signature
574
                and processed_crash["proto_signature"] != self.top_proto_signature
575
            ):
576
                continue
×
577

578
            if first_representative_report is None:
×
579
                first_representative_report = processed_crash
×
580

581
            if not self._is_corrupted_crash_stack(processed_crash):
×
582
                return processed_crash
×
583

584
            if i >= 20:
×
585
                # We have tried enough reports, give up.
586
                break
×
587

588
        if first_representative_report is not None:
×
589
            # Fall back to the first representative report that we found, even
590
            # if it's corrupted.
591
            return first_representative_report
×
592

593
        raise NoCrashReportFoundError(
×
594
            f"No crash report found with the most frequent proto signature for {self.signature_term}."
595
        )
596

597
    @cached_property
×
598
    def is_potential_security_crash(self) -> bool:
×
599
        """Whether the crash is related to a potential security bug.
600

601
        The value will be True if:
602
            - the signature is related to near allocator poison value crashes, or
603
            - one of the potential regressors is a security bug
604
        """
605
        return self.is_near_allocator_related_crash or any(
×
606
            bug.is_security for bug in self.regressed_by_potential_bugs
607
        )
608

609
    def has_moz_crash_reason(self, reason: str) -> bool:
×
610
        """Whether the crash has a specific MOZ_CRASH reason.
611

612
        Args:
613
            reason: The MOZ_CRASH reason to check.
614

615
        Returns:
616
            True if the any of the MOZ_CRASH reasons has a partial match with
617
            the provided reason.
618
        """
619
        return any(
×
620
            reason in moz_crash_reason["term"]
621
            for moz_crash_reason in self.signature["facets"]["moz_crash_reason"]
622
        )
623

624
    @property
×
625
    def process_type_summary(self) -> str:
×
626
        """The summary of the process types for the crash signature."""
627
        process_types = self.signature["facets"]["process_type"]
×
628
        if len(process_types) == 0:
×
629
            return "Unknown"
×
630

631
        if len(process_types) == 1:
×
632
            process_type = process_types[0]["term"]
×
633
            # Small process types are usually acronyms (e.g., gpu for GPU), thus
634
            # we use upper case for them. Otherwise, we capitalize the first letter.
635
            if len(process_type) <= 3:
×
636
                return process_type.upper()
×
637
            return process_type.capitalize()
×
638

639
        return "Multiple distinct types"
×
640

641

642
class SignaturesDataFetcher:
×
643
    """Fetch the data related to the given signatures."""
644

645
    MEMORY_ACCESS_ERROR_REASONS = (
×
646
        # On Windows:
647
        "EXCEPTION_ACCESS_VIOLATION_READ",
648
        "EXCEPTION_ACCESS_VIOLATION_WRITE",
649
        "EXCEPTION_ACCESS_VIOLATION_EXEC"
650
        # On Linux:
651
        "SIGSEGV / SEGV_MAPERR",
652
        "SIGSEGV / SEGV_ACCERR",
653
    )
654

655
    EXCLUDED_MOZ_REASON_STRINGS = (
×
656
        "MOZ_CRASH(OOM)",
657
        "MOZ_CRASH(Out of memory)",
658
        "out of memory",
659
        "Shutdown hanging",
660
        # TODO(investigate): do we need to exclude signatures that their reason
661
        # contains `[unhandlable oom]`?
662
        # Example: arena_t::InitChunk | arena_t::AllocRun | arena_t::MallocLarge | arena_t::Malloc | BaseAllocator::malloc | Allocator::malloc | PageMalloc
663
        # "[unhandlable oom]",
664
    )
665

666
    # If any of the crash reason starts with any of the following, then it is
667
    # Network or I/O error.
668
    EXCLUDED_IO_ERROR_REASON_PREFIXES = (
×
669
        "EXCEPTION_IN_PAGE_ERROR_READ",
670
        "EXCEPTION_IN_PAGE_ERROR_WRITE",
671
        "EXCEPTION_IN_PAGE_ERROR_EXEC",
672
    )
673

674
    # TODO(investigate): do we need to exclude all these signatures prefixes?
675
    EXCLUDED_SIGNATURE_PREFIXES = (
×
676
        "OOM | ",
677
        "bad hardware | ",
678
        "shutdownhang | ",
679
    )
680

681
    SUMMARY_DURATION = timedelta(weeks=10)
×
682

683
    def __init__(
×
684
        self,
685
        signatures: Iterable[str],
686
        product: str = "Firefox",
687
        channel: str = "nightly",
688
    ):
689
        self._signatures = set(signatures)
×
690
        self._product = product
×
691
        self._channel = channel
×
692

693
    @classmethod
×
694
    def find_new_actionable_crashes(
×
695
        cls,
696
        product: str,
697
        channel: str,
698
        days_to_check: int = 7,
699
        days_without_crashes: int = 7,
700
    ) -> "SignaturesDataFetcher":
701
        """Find new actionable crashes.
702

703
        Args:
704
            product: The product to check.
705
            channel: The release channel to check.
706
            days_to_check: The number of days to check for crashes.
707
            days_without_crashes: The number of days without crashes before the
708
                `days_to_check` to consider the signature new.
709

710
        Returns:
711
            A list of actionable signatures.
712
        """
713
        duration = days_to_check + days_without_crashes
×
714
        end_date = lmdutils.get_date_ymd("today")
×
715
        start_date = end_date - timedelta(duration)
×
716
        earliest_allowed_date = lmdutils.get_date_str(
×
717
            end_date - timedelta(days_to_check)
718
        )
719
        date_range = socorro.SuperSearch.get_search_date(start_date, end_date)
×
720

721
        params = {
×
722
            "product": product,
723
            "release_channel": channel,
724
            "date": date_range,
725
            # TODO(investigate): should we do a local filter instead of the
726
            # following (should we exclude the signature if one of the crashes
727
            # is a shutdown hang?):
728
            # If the `ipc_shutdown_state` or `shutdown_progress` field are
729
            # non-empty then it's a shutdown hang.
730
            "ipc_shutdown_state": "__null__",
731
            "shutdown_progress": "__null__",
732
            # TODO(investigate): should we use the following instead of the
733
            # local filter.
734
            # "oom_allocation_size": "!__null__",
735
            "_aggs.signature": [
736
                "moz_crash_reason",
737
                "reason",
738
                "possible_bit_flips_max_confidence",
739
                "_histogram.date",
740
                "_cardinality.install_time",
741
                "_cardinality.oom_allocation_size",
742
            ],
743
            "_results_number": 0,
744
            "_facets_size": 10000,
745
        }
746

747
        def handler(search_resp: dict, data: list):
×
748
            logger.debug(
×
749
                "Total of %d signatures received from Socorro",
750
                len(search_resp["facets"]["signature"]),
751
            )
752

753
            for crash in search_resp["facets"]["signature"]:
×
754
                signature = crash["term"]
×
755
                if any(
×
756
                    signature.startswith(excluded_prefix)
757
                    for excluded_prefix in cls.EXCLUDED_SIGNATURE_PREFIXES
758
                ):
759
                    # Ignore signatures that start with any of the excluded prefixes.
760
                    continue
×
761

762
                facets = crash["facets"]
×
763
                installations = facets["cardinality_install_time"]["value"]
×
764
                if installations <= 1:
×
765
                    # Ignore crashes that only happen on one installation.
766
                    continue
×
767

768
                first_date = facets["histogram_date"][0]["term"]
×
769
                if first_date < earliest_allowed_date:
×
770
                    # The crash is not new, skip it.
771
                    continue
×
772

773
                if any(
×
774
                    reason["term"].startswith(io_error_prefix)
775
                    for reason in facets["reason"]
776
                    for io_error_prefix in cls.EXCLUDED_IO_ERROR_REASON_PREFIXES
777
                ):
778
                    # Ignore Network or I/O error crashes.
779
                    continue
×
780

781
                if crash["count"] < 20:
×
782
                    # For signatures with low volume, having multiple types of
783
                    # memory errors indicates potential bad hardware crashes.
784
                    num_memory_error_types = sum(
×
785
                        reason["term"] in cls.MEMORY_ACCESS_ERROR_REASONS
786
                        for reason in facets["reason"]
787
                    )
788
                    if num_memory_error_types > 1:
×
789
                        # Potential bad hardware crash, skip it.
790
                        continue
×
791

792
                bit_flips_count = sum(
×
793
                    row["count"] for row in facets["possible_bit_flips_max_confidence"]
794
                )
795
                bit_flips_percentage = bit_flips_count / crash["count"]
×
796
                if bit_flips_percentage >= 0.2:
×
797
                    # Potential bad hardware crash, skip it.
798
                    continue
×
799

800
                # TODO(investigate): is this needed since we are already
801
                # filtering signatures that start with "OOM | "
802
                if facets["cardinality_oom_allocation_size"]["value"]:
×
803
                    # If one of the crashes is an OOM crash, skip it.
804
                    continue
×
805

806
                # TODO(investigate): do we need to check for the `moz_crash_reason`
807
                moz_crash_reasons = facets["moz_crash_reason"]
×
808
                if moz_crash_reasons and any(
×
809
                    excluded_reason in reason["term"]
810
                    for reason in moz_crash_reasons
811
                    for excluded_reason in cls.EXCLUDED_MOZ_REASON_STRINGS
812
                ):
813
                    continue
×
814

815
                data.append(signature)
×
816

817
        signatures: list = []
×
818
        socorro.SuperSearch(
×
819
            params=params,
820
            handler=handler,
821
            handlerdata=signatures,
822
        ).wait()
823

824
        logger.debug(
×
825
            "Total of %d signatures left after applying the filtering criteria",
826
            len(signatures),
827
        )
828

829
        return cls(signatures, product, channel)
×
830

831
    def fetch_clouseau_crash_reports(self) -> dict[str, list]:
×
832
        """Fetch the crash reports data from Crash Clouseau."""
833
        if not self._signatures:
×
834
            return {}
×
835

836
        logger.debug(
×
837
            "Fetch from Clouseau: requesting reports for %d signatures",
838
            len(self._signatures),
839
        )
840

841
        signature_reports = clouseau.Reports.get_by_signatures(
×
842
            self._signatures,
843
            product=self._product,
844
            channel=self._channel,
845
        )
846

847
        logger.debug(
×
848
            "Fetch from Clouseau: received reports for %d signatures",
849
            len(signature_reports),
850
        )
851

852
        return signature_reports
×
853

854
    def fetch_socorro_info(self) -> tuple[list[dict], int]:
×
855
        """Fetch the signature data from Socorro."""
856
        if not self._signatures:
×
857
            return [], 0
×
858

859
        end_date = lmdutils.get_date_ymd("today")
×
860
        start_date = end_date - self.SUMMARY_DURATION
×
861
        date_range = socorro.SuperSearch.get_search_date(start_date, end_date)
×
862

863
        params = {
×
864
            "product": self._product,
865
            # TODO(investigate): should we included all release channels?
866
            "release_channel": self._channel,
867
            # TODO(investigate): should we limit based on the build date as well?
868
            "date": date_range,
869
            # TODO: split signatures into chunks to avoid very long query URLs
870
            "signature": ["=" + signature for signature in self._signatures],
871
            "_aggs.signature": [
872
                "address",
873
                "build_id",
874
                "cpu_arch",
875
                "proto_signature",
876
                "_cardinality.user_comments",
877
                "cpu_arch",
878
                "platform_pretty_version",
879
                "_histogram.date",
880
                # The following are needed for SignatureStats:
881
                "platform",
882
                "is_garbage_collecting",
883
                "_cardinality.install_time",
884
                "startup_crash",
885
                "_histogram.uptime",
886
                "process_type",
887
                "moz_crash_reason",
888
            ],
889
            "_results_number": 0,
890
            "_facets_size": 10000,
891
        }
892

893
        def handler(search_results: dict, data: dict):
×
894
            data["num_total_crashes"] = search_results["total"]
×
895
            data["signatures"] = search_results["facets"]["signature"]
×
896

897
        logger.debug(
×
898
            "Fetch from Socorro: requesting info for %d signatures",
899
            len(self._signatures),
900
        )
901

902
        data: dict = {}
×
903
        socorro.SuperSearchUnredacted(
×
904
            params=params,
905
            handler=handler,
906
            handlerdata=data,
907
        ).wait()
908

909
        logger.debug(
×
910
            "Fetch from Socorro: received info for %d signatures",
911
            len(data["signatures"]),
912
        )
913

914
        return data["signatures"], data["num_total_crashes"]
×
915

916
    def fetch_bugs(
×
917
        self, include_fields: list[str] | None = None
918
    ) -> dict[str, list[dict]]:
919
        """Fetch bugs that are filed against the given signatures."""
920
        if not self._signatures:
×
921
            return {}
×
922

923
        params_base: dict = {
×
924
            "include_fields": [
925
                "cf_crash_signature",
926
            ],
927
        }
928

929
        if include_fields:
×
930
            params_base["include_fields"].extend(include_fields)
×
931

932
        params_list = []
×
933
        for signatures_chunk in Connection.chunks(list(self._signatures), 30):
×
934
            params = params_base.copy()
×
935
            n = int(utils.get_last_field_num(params))
×
936
            params[f"f{n}"] = "OP"
×
937
            params[f"j{n}"] = "OR"
×
938
            for signature in signatures_chunk:
×
939
                n += 1
×
940
                params[f"f{n}"] = "cf_crash_signature"
×
941
                params[f"o{n}"] = "regexp"
×
942
                params[f"v{n}"] = rf"\[(@ |@){re.escape(signature)}( \]|\])"
×
943
            params[f"f{n+1}"] = "CP"
×
944
            params_list.append(params)
×
945

946
        signatures_bugs: dict = defaultdict(list)
×
947

948
        def handler(res, data):
×
949
            for bug in res["bugs"]:
×
950
                for signature in utils.get_signatures(bug["cf_crash_signature"]):
×
951
                    if signature in self._signatures:
×
952
                        data[signature].append(bug)
×
953

954
        logger.debug(
×
955
            "Fetch from Bugzilla: requesting bugs for %d signatures",
956
            len(self._signatures),
957
        )
958
        timeout = utils.get_config("common", "bz_query_timeout")
×
959
        Bugzilla(
×
960
            timeout=timeout,
961
            queries=[
962
                connection.Query(Bugzilla.API_URL, params, handler, signatures_bugs)
963
                for params in params_list
964
            ],
965
        ).wait()
966

967
        logger.debug(
×
968
            "Fetch from Bugzilla: received bugs for %d signatures", len(signatures_bugs)
969
        )
970

971
        return signatures_bugs
×
972

973
    def analyze(self) -> list[SignatureAnalyzer]:
×
974
        """Analyze the data related to the signatures."""
975
        bugs = self.fetch_bugs()
×
976
        # TODO(investigate): For now, we are ignoring signatures that have bugs
977
        # filed even if they are closed long time ago. We should investigate
978
        # whether we should include the ones with closed bugs. For example, if
979
        # the bug was closed as Fixed years ago.
980
        self._signatures.difference_update(bugs.keys())
×
981

982
        clouseau_reports = self.fetch_clouseau_crash_reports()
×
983
        # TODO(investigate): For now, we are ignoring signatures that are not
984
        # analyzed by clouseau. We should investigate why they are not analyzed
985
        # and whether we should include them.
986
        self._signatures.intersection_update(clouseau_reports.keys())
×
987

988
        signatures, num_total_crashes = self.fetch_socorro_info()
×
989
        bugs_store = BugsStore()
×
990

991
        return [
×
992
            SignatureAnalyzer(
993
                signature,
994
                num_total_crashes,
995
                clouseau_reports[signature["term"]],
996
                bugs_store,
997
            )
998
            for signature in signatures
999
        ]
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc