• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

mozilla / relman-auto-nag / #4787

25 Oct 2023 07:28PM CUT coverage: 22.093% (-0.03%) from 22.121%
#4787

push

coveralls-python

suhaibmujahid
Merge remote-tracking branch 'upstream/master' into crash-kind

716 of 3560 branches covered (0.0%)

10 of 10 new or added lines in 1 file covered. (100.0%)

1925 of 8713 relevant lines covered (22.09%)

0.22 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.0
/bugbot/crash/analyzer.py
1
# This Source Code Form is subject to the terms of the Mozilla Public
2
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
3
# You can obtain one at http://mozilla.org/MPL/2.0/.
4

5
import itertools
×
6
import re
×
7
from collections import defaultdict
×
8
from datetime import date, timedelta
×
9
from functools import cached_property
×
10
from typing import Iterable, Iterator
×
11

12
from libmozdata import bugzilla, clouseau, connection, socorro
×
13
from libmozdata import utils as lmdutils
×
14
from libmozdata.bugzilla import Bugzilla
×
15
from libmozdata.connection import Connection
×
16

17
from bugbot import logger, utils
×
18
from bugbot.bug.analyzer import BugAnalyzer, BugsStore
×
19
from bugbot.components import ComponentName
×
20
from bugbot.crash import socorro_util
×
21

22
# The max offset from a memory address to be considered "near".
23
OFFSET_64_BIT = 0x1000
×
24
OFFSET_32_BIT = 0x100
×
25
# Allocator poison value addresses.
26
ALLOCATOR_ADDRESSES_64_BIT = (
×
27
    (0xE5E5E5E5E5E5E5E5, OFFSET_64_BIT),
28
    # On 64-bit windows, sometimes it could be doing something with a 32-bit
29
    # value gotten from freed memory, so it'll be 0X00000000E5E5E5E5 +/-, and
30
    # because of the address limitation, quite often it will be
31
    # 0X0000E5E5E5E5E5E5 +/-.
32
    (0x00000000E5E5E5E5, OFFSET_32_BIT),
33
    (0x0000E5E5E5E5E5E5, OFFSET_64_BIT),
34
    (0x4B4B4B4B4B4B4B4B, OFFSET_64_BIT),
35
)
36
ALLOCATOR_ADDRESSES_32_BIT = (
×
37
    (0xE5E5E5E5, OFFSET_32_BIT),
38
    (0x4B4B4B4B, OFFSET_32_BIT),
39
)
40
# Ranges where addresses are considered near allocator poison values.
41
ALLOCATOR_RANGES_64_BIT = (
×
42
    (addr - offset, addr + offset) for addr, offset in ALLOCATOR_ADDRESSES_64_BIT
43
)
44
ALLOCATOR_RANGES_32_BIT = (
×
45
    (addr - offset, addr + offset) for addr, offset in ALLOCATOR_ADDRESSES_32_BIT
46
)
47

48

49
def is_near_null_address(str_address) -> bool:
×
50
    """Check if the address is near null.
51

52
    Args:
53
        str_address: The memory address to check.
54

55
    Returns:
56
        True if the address is near null, False otherwise.
57
    """
58
    address = int(str_address, 0)
×
59
    is_64_bit = len(str_address) >= 18
×
60

61
    if is_64_bit:
×
62
        return -OFFSET_64_BIT <= address <= OFFSET_64_BIT
×
63

64
    return -OFFSET_32_BIT <= address <= OFFSET_32_BIT
×
65

66

67
def is_near_allocator_address(str_address) -> bool:
×
68
    """Check if the address is near an allocator poison value.
69

70
    Args:
71
        str_address: The memory address to check.
72

73
    Returns:
74
        True if the address is near an allocator poison value, False otherwise.
75
    """
76
    address = int(str_address, 0)
×
77
    is_64_bit = len(str_address) >= 18
×
78

79
    return any(
×
80
        low <= address <= high
81
        for low, high in (
82
            ALLOCATOR_RANGES_64_BIT if is_64_bit else ALLOCATOR_RANGES_32_BIT
83
        )
84
    )
85

86

87
# TODO: Move this to libmozdata
88
def generate_signature_page_url(params: dict, tab: str) -> str:
×
89
    """Generate a URL to the signature page on Socorro
90

91
    Args:
92
        params: the parameters for the search query.
93
        tab: the page tab that should be selected.
94

95
    Returns:
96
        The URL of the signature page on Socorro
97
    """
98
    web_url = socorro.Socorro.CRASH_STATS_URL
×
99
    query = lmdutils.get_params_for_url(params)
×
100
    return f"{web_url}/signature/{query}#{tab}"
×
101

102

103
class NoCrashReportFoundError(Exception):
×
104
    """There are no crash reports that meet the required criteria."""
105

106

107
class ClouseauDataAnalyzer:
×
108
    """Analyze the data returned by Crash Clouseau about a specific crash
109
    signature.
110
    """
111

112
    MINIMUM_CLOUSEAU_SCORE_THRESHOLD: int = 8
×
113
    DEFAULT_CRASH_COMPONENT = ComponentName("Core", "General")
×
114

115
    def __init__(self, reports: Iterable[dict], bugs_store: BugsStore):
×
116
        self._clouseau_reports = reports
×
117
        self.bugs_store = bugs_store
×
118

119
    @cached_property
×
120
    def max_clouseau_score(self):
×
121
        """The maximum Clouseau score in the crash reports."""
122
        if not self._clouseau_reports:
×
123
            return 0
×
124
        return max(report["max_score"] for report in self._clouseau_reports)
×
125

126
    @cached_property
×
127
    def regressed_by_potential_bug_ids(self) -> set[int]:
×
128
        """The IDs for the bugs that their patches could have caused the crash."""
129
        minimum_accepted_score = max(
×
130
            self.MINIMUM_CLOUSEAU_SCORE_THRESHOLD, self.max_clouseau_score
131
        )
132
        return {
×
133
            changeset["bug_id"]
134
            for report in self._clouseau_reports
135
            if report["max_score"] >= minimum_accepted_score
136
            for changeset in report["changesets"]
137
            if changeset["max_score"] >= minimum_accepted_score
138
            and not changeset["is_merge"]
139
            and not changeset["is_backedout"]
140
        }
141

142
    @cached_property
×
143
    def regressed_by_patch(self) -> str | None:
×
144
        """The hash of the patch that could have caused the crash."""
145
        minimum_accepted_score = max(
×
146
            self.MINIMUM_CLOUSEAU_SCORE_THRESHOLD, self.max_clouseau_score
147
        )
148
        potential_patches = {
×
149
            changeset["changeset"]
150
            for report in self._clouseau_reports
151
            if report["max_score"] >= minimum_accepted_score
152
            for changeset in report["changesets"]
153
            if changeset["max_score"] >= minimum_accepted_score
154
            and not changeset["is_merge"]
155
            and not changeset["is_backedout"]
156
        }
157
        if len(potential_patches) == 1:
×
158
            return next(iter(potential_patches))
×
159
        return None
×
160

161
    @cached_property
×
162
    def regressed_by(self) -> int | None:
×
163
        """The ID of the bug that one of its patches could have caused
164
        the crash.
165

166
        If there are multiple bugs, the value will be `None`.
167
        """
168
        bug_ids = self.regressed_by_potential_bug_ids
×
169
        if len(bug_ids) == 1:
×
170
            return next(iter(bug_ids))
×
171
        return None
×
172

173
    @cached_property
×
174
    def regressed_by_potential_bugs(self) -> list[BugAnalyzer]:
×
175
        """The bugs whose patches could have caused the crash."""
176
        self.bugs_store.fetch_bugs(
×
177
            self.regressed_by_potential_bug_ids,
178
            [
179
                "id",
180
                "groups",
181
                "assigned_to",
182
                "product",
183
                "component",
184
            ],
185
        )
186
        return [
×
187
            self.bugs_store.get_bug_by_id(bug_id)
188
            for bug_id in self.regressed_by_potential_bug_ids
189
        ]
190

191
    @cached_property
×
192
    def regressed_by_author(self) -> dict | None:
×
193
        """The author of the patch that could have caused the crash.
194

195
        If there are multiple regressors, the value will be `None`.
196

197
        The regressor bug assignee is considered as the author, even if the
198
        assignee is not the patch author.
199
        """
200

201
        if not self.regressed_by:
×
202
            return None
×
203

204
        bug = self.regressed_by_potential_bugs[0]
×
205
        assert bug.id == self.regressed_by
×
206
        return bug.get_field("assigned_to_detail")
×
207

208
    @cached_property
×
209
    def crash_component(self) -> ComponentName:
×
210
        """The component that the crash belongs to.
211

212
        If there are multiple components, the value will be the default one.
213
        """
214
        potential_components = {
×
215
            bug.component for bug in self.regressed_by_potential_bugs
216
        }
217
        if len(potential_components) == 1:
×
218
            return next(iter(potential_components))
×
219
        return self.DEFAULT_CRASH_COMPONENT
×
220

221

222
class SocorroDataAnalyzer(socorro_util.SignatureStats):
×
223
    """Analyze the data returned by Socorro."""
224

225
    _bugzilla_os_legal_values = None
×
226
    _bugzilla_cpu_legal_values_map = None
×
227
    _platforms = [
×
228
        {"short_name": "win", "name": "Windows"},
229
        {"short_name": "mac", "name": "Mac OS X"},
230
        {"short_name": "lin", "name": "Linux"},
231
        {"short_name": "and", "name": "Android"},
232
        {"short_name": "unknown", "name": "Unknown"},
233
    ]
234

235
    def __init__(
×
236
        self,
237
        signature: dict,
238
        num_total_crashes: int,
239
    ):
240
        super().__init__(signature, num_total_crashes, platforms=self._platforms)
×
241

242
    @classmethod
×
243
    def to_bugzilla_op_sys(cls, op_sys: str) -> str:
×
244
        """Return the corresponding OS name in Bugzilla for the provided OS name
245
        from Socorro.
246

247
        If the OS name is not recognized, return "Other".
248
        """
249
        if cls._bugzilla_os_legal_values is None:
×
250
            cls._bugzilla_os_legal_values = set(
×
251
                bugzilla.BugFields.fetch_field_values("op_sys")
252
            )
253

254
        if op_sys in cls._bugzilla_os_legal_values:
×
255
            return op_sys
×
256

257
        if op_sys.startswith("OS X ") or op_sys.startswith("macOS "):
×
258
            op_sys = "macOS"
×
259
        elif op_sys.startswith("Windows"):
×
260
            op_sys = "Windows"
×
261
        elif "Linux" in op_sys or op_sys.startswith("Ubuntu"):
×
262
            op_sys = "Linux"
×
263
        else:
264
            op_sys = "Other"
×
265

266
        return op_sys
×
267

268
    @property
×
269
    def first_crash_date(self) -> str:
×
270
        """The date of the first crash within the query time range.
271

272
        The date is in YYYY-MM-DD format.
273
        """
274
        return self.signature["facets"]["histogram_date"][0]["term"][:10]
×
275

276
    @property
×
277
    def bugzilla_op_sys(self) -> str:
×
278
        """The name of the OS where the crash happens.
279

280
        The value is one of the legal values for Bugzilla's `op_sys` field.
281

282
        - If no OS name is found, the value will be "Unspecified".
283
        - If the OS name is not recognized, the value will be "Other".
284
        - If multiple OS names are found, the value will be "All". Unless the OS
285
          names can be resolved to a common name without a version. For example,
286
          "Windows 10" and "Windows 7" will become "Windows".
287
        """
288
        all_op_sys = {
×
289
            self.to_bugzilla_op_sys(op_sys["term"])
290
            for op_sys in self.signature["facets"]["platform_pretty_version"]
291
        }
292

293
        if len(all_op_sys) > 1:
×
294
            # Resolve to root OS name by removing the version number.
295
            all_op_sys = {op_sys.split(" ")[0] for op_sys in all_op_sys}
×
296

297
        if len(all_op_sys) == 2 and "Other" in all_op_sys:
×
298
            # TODO: explain this workaround.
299
            all_op_sys.remove("Other")
×
300

301
        if len(all_op_sys) == 1:
×
302
            return next(iter(all_op_sys))
×
303

304
        if len(all_op_sys) == 0:
×
305
            return "Unspecified"
×
306

307
        return "All"
×
308

309
    @classmethod
×
310
    def to_bugzilla_cpu(cls, cpu: str) -> str:
×
311
        """Return the corresponding CPU name in Bugzilla for the provided name
312
        from Socorro.
313

314
        If the CPU is not recognized, return "Other".
315
        """
316
        if cls._bugzilla_cpu_legal_values_map is None:
×
317
            cls._bugzilla_cpu_legal_values_map = {
×
318
                value.lower(): value
319
                for value in bugzilla.BugFields.fetch_field_values("rep_platform")
320
            }
321

322
        return cls._bugzilla_cpu_legal_values_map.get(cpu, "Other")
×
323

324
    @property
×
325
    def bugzilla_cpu_arch(self) -> str:
×
326
        """The CPU architecture of the devices where the crash happens.
327

328
        The value is one of the legal values for Bugzilla's `rep_platform` field.
329

330
        - If no CPU architecture is found, the value will be "Unspecified".
331
        - If the CPU architecture is not recognized, the value will be "Other".
332
        - If multiple CPU architectures are found, the value will "All".
333
        """
334
        all_cpu_arch = {
×
335
            self.to_bugzilla_cpu(cpu["term"])
336
            for cpu in self.signature["facets"]["cpu_arch"]
337
        }
338

339
        if len(all_cpu_arch) == 2 and "Other" in all_cpu_arch:
×
340
            all_cpu_arch.remove("Other")
×
341

342
        if len(all_cpu_arch) == 1:
×
343
            return next(iter(all_cpu_arch))
×
344

345
        if len(all_cpu_arch) == 0:
×
346
            return "Unspecified"
×
347

348
        return "All"
×
349

350
    @property
×
351
    def user_comments_page_url(self) -> str:
×
352
        """The URL to the Signature page on Socorro where the Comments tab is
353
        selected.
354
        """
355
        start_date = date.today() - timedelta(weeks=26)
×
356
        params = {
×
357
            "signature": self.signature_term,
358
            "date": socorro.SuperSearch.get_search_date(start_date),
359
        }
360
        return generate_signature_page_url(params, "comments")
×
361

362
    @property
×
363
    def num_user_comments(self) -> int:
×
364
        """The number of crash reports with user comments."""
365
        # TODO: count useful/interesting user comments (e.g., exclude one word comments)
366
        return self.signature["facets"]["cardinality_user_comments"]["value"]
×
367

368
    @property
×
369
    def has_user_comments(self) -> bool:
×
370
        """Whether the crash signature has any reports with a user comment."""
371
        return self.num_user_comments > 0
×
372

373
    @property
×
374
    def top_proto_signature(self) -> str:
×
375
        """The proto signature that occurs the most."""
376
        return self.signature["facets"]["proto_signature"][0]["term"]
×
377

378
    @property
×
379
    def num_top_proto_signature_crashes(self) -> int:
×
380
        """The number of crashes for the most occurring proto signature."""
381
        return self.signature["facets"]["proto_signature"][0]["count"]
×
382

383
    def _build_ids(self) -> Iterator[int]:
×
384
        """Yields the build IDs where the crash occurred."""
385
        for build_id in self.signature["facets"]["build_id"]:
×
386
            yield build_id["term"]
×
387

388
    @property
×
389
    def top_build_id(self) -> int:
×
390
        """The build ID where most crashes occurred."""
391
        return self.signature["facets"]["build_id"][0]["term"]
×
392

393
    @cached_property
×
394
    def num_near_null_crashes(self) -> int:
×
395
        """The number of crashes that occurred on addresses near null."""
396
        return sum(
×
397
            address["count"]
398
            for address in self.signature["facets"]["address"]
399
            if is_near_null_address(address["term"])
400
        )
401

402
    @property
×
403
    def is_near_null_crash(self) -> bool:
×
404
        """Whether all crashes occurred on addresses near null."""
405
        return self.num_near_null_crashes == self.num_crashes
×
406

407
    @property
×
408
    def is_potential_near_null_crash(self) -> bool:
×
409
        """Whether the signature is a potential near null crash.
410

411
        The value will be True if some but not all crashes occurred on addresses
412
        near null.
413
        """
414
        return not self.is_near_null_crash and self.num_near_null_crashes > 0
×
415

416
    @property
×
417
    def is_near_null_related_crash(self) -> bool:
×
418
        """Whether the signature is related to near null crashes.
419

420
        The value will be True if any of the crashes occurred on addresses near
421
        null.
422
        """
423
        return self.is_near_null_crash or self.is_potential_near_null_crash
×
424

425
    @cached_property
×
426
    def num_near_allocator_crashes(self) -> int:
×
427
        """The number of crashes that occurred on addresses near an allocator
428
        poison value.
429
        """
430
        return sum(
×
431
            address["count"]
432
            for address in self.signature["facets"]["address"]
433
            if is_near_allocator_address(address["term"])
434
        )
435

436
    @property
×
437
    def is_near_allocator_crash(self) -> bool:
×
438
        """Whether all crashes occurred on addresses near an allocator poison
439
        value.
440
        """
441
        return self.num_near_allocator_crashes == self.num_crashes
×
442

443
    @property
×
444
    def is_potential_near_allocator_crash(self) -> bool:
×
445
        """Whether the signature is a potential near allocator poison value
446
        crash.
447

448
        The value will be True if some but not all crashes occurred on addresses
449
        near an allocator poison value.
450
        """
451
        return not self.is_near_allocator_crash and self.num_near_allocator_crashes > 0
×
452

453
    @property
×
454
    def is_near_allocator_related_crash(self) -> bool:
×
455
        """Whether the signature is related to near allocator poison value
456
        crashes.
457

458
        The value will be True if any of the crashes occurred on addresses near
459
        an allocator poison value.
460
        """
461
        return self.is_near_allocator_crash or self.is_potential_near_allocator_crash
×
462

463

464
class SignatureAnalyzer(SocorroDataAnalyzer, ClouseauDataAnalyzer):
×
465
    """Analyze the data related to a signature.
466

467
    This includes data from Socorro and Clouseau.
468
    """
469

470
    def __init__(
×
471
        self,
472
        socorro_signature: dict,
473
        num_total_crashes: int,
474
        clouseau_reports: list[dict],
475
        bugs_store: BugsStore,
476
    ):
477
        SocorroDataAnalyzer.__init__(self, socorro_signature, num_total_crashes)
×
478
        ClouseauDataAnalyzer.__init__(self, clouseau_reports, bugs_store)
×
479

480
    def _fetch_crash_reports(
×
481
        self,
482
        proto_signature: str,
483
        build_id: int | Iterable[int],
484
        limit: int = 1,
485
    ) -> Iterator[dict]:
486
        params = {
×
487
            "proto_signature": "=" + proto_signature,
488
            "build_id": build_id,
489
            "_columns": [
490
                "uuid",
491
            ],
492
            "_results_number": limit,
493
        }
494

495
        def handler(res: dict, data: dict):
×
496
            data.update(res)
×
497

498
        data: dict = {}
×
499
        socorro.SuperSearch(params=params, handler=handler, handlerdata=data).wait()
×
500

501
        yield from data["hits"]
×
502

503
    def fetch_representative_processed_crash(self) -> dict:
×
504
        """Fetch a processed crash to represent the signature.
505

506
        This could fetch multiple processed crashes and return the one that is
507
        most likely to be useful.
508
        """
509
        limit_to_top_proto_signature = (
×
510
            self.num_top_proto_signature_crashes / self.num_crashes > 0.6
511
        )
512

513
        reports = itertools.chain(
×
514
            # Reports with a higher score from clouseau are more likely to be
515
            # useful.
516
            sorted(
517
                self._clouseau_reports,
518
                key=lambda report: report["max_score"],
519
                reverse=True,
520
            ),
521
            # Next we try find reports from the top crashing build because they
522
            # are likely to be representative.
523
            self._fetch_crash_reports(self.top_proto_signature, self.top_build_id),
524
            self._fetch_crash_reports(self.top_proto_signature, self._build_ids()),
525
        )
526
        for report in reports:
×
527
            uuid = report["uuid"]
×
528
            processed_crash = socorro.ProcessedCrash.get_processed(uuid)[uuid]
×
529
            if (
×
530
                not limit_to_top_proto_signature
531
                or processed_crash["proto_signature"] == self.top_proto_signature
532
            ):
533
                # TODO(investigate): maybe we should check if the stack is
534
                # corrupted (ask gsvelto or willkg about how to detect that)
535
                return processed_crash
×
536

537
        raise NoCrashReportFoundError(
×
538
            f"No crash report found with the most frequent proto signature for {self.signature_term}."
539
        )
540

541
    @cached_property
×
542
    def is_potential_security_crash(self) -> bool:
×
543
        """Whether the crash is related to a potential security bug.
544

545
        The value will be True if:
546
            - the signature is related to near allocator poison value crashes, or
547
            - one of the potential regressors is a security bug
548
        """
549
        return self.is_near_allocator_related_crash or any(
×
550
            bug.is_security for bug in self.regressed_by_potential_bugs
551
        )
552

553
    @property
×
554
    def process_type_summary(self) -> str:
×
555
        """The summary of the process types for the crash signature."""
556
        process_types = self.signature["facets"]["process_type"]
×
557
        if len(process_types) == 0:
×
558
            return "Unknown"
×
559

560
        if len(process_types) == 1:
×
561
            process_type = process_types[0]["term"]
×
562
            if len(process_type) <= 3:
×
563
                return process_type.upper()
×
564
            return process_type.capitalize()
×
565

566
        return "Multiple distinct types"
×
567

568

569
class SignaturesDataFetcher:
×
570
    """Fetch the data related to the given signatures."""
571

572
    MEMORY_ACCESS_ERROR_REASONS = (
×
573
        # On Windows:
574
        "EXCEPTION_ACCESS_VIOLATION_READ",
575
        "EXCEPTION_ACCESS_VIOLATION_WRITE",
576
        "EXCEPTION_ACCESS_VIOLATION_EXEC"
577
        # On Linux:
578
        "SIGSEGV / SEGV_MAPERR",
579
        "SIGSEGV / SEGV_ACCERR",
580
    )
581

582
    EXCLUDED_MOZ_REASON_STRINGS = (
×
583
        "MOZ_CRASH(OOM)",
584
        "MOZ_CRASH(Out of memory)",
585
        "out of memory",
586
        "Shutdown hanging",
587
        # TODO(investigate): do we need to exclude signatures that their reason
588
        # contains `[unhandlable oom]`?
589
        # Example: arena_t::InitChunk | arena_t::AllocRun | arena_t::MallocLarge | arena_t::Malloc | BaseAllocator::malloc | Allocator::malloc | PageMalloc
590
        # "[unhandlable oom]",
591
    )
592

593
    # If any of the crash reason starts with any of the following, then it is
594
    # Network or I/O error.
595
    EXCLUDED_IO_ERROR_REASON_PREFIXES = (
×
596
        "EXCEPTION_IN_PAGE_ERROR_READ",
597
        "EXCEPTION_IN_PAGE_ERROR_WRITE",
598
        "EXCEPTION_IN_PAGE_ERROR_EXEC",
599
    )
600

601
    # TODO(investigate): do we need to exclude all these signatures prefixes?
602
    EXCLUDED_SIGNATURE_PREFIXES = (
×
603
        "OOM | ",
604
        "bad hardware | ",
605
        "shutdownhang | ",
606
    )
607

608
    SUMMARY_DURATION = timedelta(weeks=10)
×
609

610
    def __init__(
×
611
        self,
612
        signatures: Iterable[str],
613
        product: str = "Firefox",
614
        channel: str = "nightly",
615
    ):
616
        self._signatures = set(signatures)
×
617
        self._product = product
×
618
        self._channel = channel
×
619

620
    @classmethod
×
621
    def find_new_actionable_crashes(
×
622
        cls,
623
        product: str,
624
        channel: str,
625
        days_to_check: int = 7,
626
        days_without_crashes: int = 7,
627
    ) -> "SignaturesDataFetcher":
628
        """Find new actionable crashes.
629

630
        Args:
631
            product: The product to check.
632
            channel: The release channel to check.
633
            days_to_check: The number of days to check for crashes.
634
            days_without_crashes: The number of days without crashes before the
635
                `days_to_check` to consider the signature new.
636

637
        Returns:
638
            A list of actionable signatures.
639
        """
640
        duration = days_to_check + days_without_crashes
×
641
        end_date = lmdutils.get_date_ymd("today")
×
642
        start_date = end_date - timedelta(duration)
×
643
        earliest_allowed_date = lmdutils.get_date_str(
×
644
            end_date - timedelta(days_to_check)
645
        )
646
        date_range = socorro.SuperSearch.get_search_date(start_date, end_date)
×
647

648
        params = {
×
649
            "product": product,
650
            "release_channel": channel,
651
            "date": date_range,
652
            # TODO(investigate): should we do a local filter instead of the
653
            # following (should we exclude the signature if one of the crashes
654
            # is a shutdown hang?):
655
            # If the `ipc_shutdown_state` or `shutdown_progress` field are
656
            # non-empty then it's a shutdown hang.
657
            "ipc_shutdown_state": "__null__",
658
            "shutdown_progress": "__null__",
659
            # TODO(investigate): should we use the following instead of the
660
            # local filter.
661
            # "oom_allocation_size": "!__null__",
662
            "_aggs.signature": [
663
                "moz_crash_reason",
664
                "reason",
665
                "_histogram.date",
666
                "_cardinality.install_time",
667
                "_cardinality.oom_allocation_size",
668
            ],
669
            "_results_number": 0,
670
            "_facets_size": 10000,
671
        }
672

673
        def handler(search_resp: dict, data: list):
×
674
            logger.debug(
×
675
                "Total of %d signatures received from Socorro",
676
                len(search_resp["facets"]["signature"]),
677
            )
678

679
            for crash in search_resp["facets"]["signature"]:
×
680
                signature = crash["term"]
×
681
                if any(
×
682
                    signature.startswith(excluded_prefix)
683
                    for excluded_prefix in cls.EXCLUDED_SIGNATURE_PREFIXES
684
                ):
685
                    # Ignore signatures that start with any of the excluded prefixes.
686
                    continue
×
687

688
                facets = crash["facets"]
×
689
                installations = facets["cardinality_install_time"]["value"]
×
690
                if installations <= 1:
×
691
                    # Ignore crashes that only happen on one installation.
692
                    continue
×
693

694
                first_date = facets["histogram_date"][0]["term"]
×
695
                if first_date < earliest_allowed_date:
×
696
                    # The crash is not new, skip it.
697
                    continue
×
698

699
                if any(
×
700
                    reason["term"].startswith(io_error_prefix)
701
                    for reason in facets["reason"]
702
                    for io_error_prefix in cls.EXCLUDED_IO_ERROR_REASON_PREFIXES
703
                ):
704
                    # Ignore Network or I/O error crashes.
705
                    continue
×
706

707
                if crash["count"] < 20:
×
708
                    # For signatures with low volume, having multiple types of
709
                    # memory errors indicates potential bad hardware crashes.
710
                    num_memory_error_types = sum(
×
711
                        reason["term"] in cls.MEMORY_ACCESS_ERROR_REASONS
712
                        for reason in facets["reason"]
713
                    )
714
                    if num_memory_error_types > 1:
×
715
                        # Potential bad hardware crash, skip it.
716
                        continue
×
717

718
                # TODO: Add a filter using the `possible_bit_flips_max_confidence`
719
                # field to exclude bad hardware crashes. The filed is not available yet.
720
                # See: https://bugzilla.mozilla.org/show_bug.cgi?id=1816669#c3
721

722
                # TODO(investigate): is this needed since we are already
723
                # filtering signatures that start with "OOM | "
724
                if facets["cardinality_oom_allocation_size"]["value"]:
×
725
                    # If one of the crashes is an OOM crash, skip it.
726
                    continue
×
727

728
                # TODO(investigate): do we need to check for the `moz_crash_reason`
729
                moz_crash_reasons = facets["moz_crash_reason"]
×
730
                if moz_crash_reasons and any(
×
731
                    excluded_reason in reason["term"]
732
                    for reason in moz_crash_reasons
733
                    for excluded_reason in cls.EXCLUDED_MOZ_REASON_STRINGS
734
                ):
735
                    continue
×
736

737
                data.append(signature)
×
738

739
        signatures: list = []
×
740
        socorro.SuperSearch(
×
741
            params=params,
742
            handler=handler,
743
            handlerdata=signatures,
744
        ).wait()
745

746
        logger.debug(
×
747
            "Total of %d signatures left after applying the filtering criteria",
748
            len(signatures),
749
        )
750

751
        return cls(signatures, product, channel)
×
752

753
    def fetch_clouseau_crash_reports(self) -> dict[str, list]:
×
754
        """Fetch the crash reports data from Crash Clouseau."""
755
        if not self._signatures:
×
756
            return {}
×
757

758
        logger.debug(
×
759
            "Fetch from Clouseau: requesting reports for %d signatures",
760
            len(self._signatures),
761
        )
762

763
        signature_reports = clouseau.Reports.get_by_signatures(
×
764
            self._signatures,
765
            product=self._product,
766
            channel=self._channel,
767
        )
768

769
        logger.debug(
×
770
            "Fetch from Clouseau: received reports for %d signatures",
771
            len(signature_reports),
772
        )
773

774
        return signature_reports
×
775

776
    def fetch_socorro_info(self) -> tuple[list[dict], int]:
×
777
        """Fetch the signature data from Socorro."""
778
        if not self._signatures:
×
779
            return [], 0
×
780

781
        end_date = lmdutils.get_date_ymd("today")
×
782
        start_date = end_date - self.SUMMARY_DURATION
×
783
        date_range = socorro.SuperSearch.get_search_date(start_date, end_date)
×
784

785
        params = {
×
786
            "product": self._product,
787
            # TODO(investigate): should we included all release channels?
788
            "release_channel": self._channel,
789
            # TODO(investigate): should we limit based on the build date as well?
790
            "date": date_range,
791
            # TODO: split signatures into chunks to avoid very long query URLs
792
            "signature": ["=" + signature for signature in self._signatures],
793
            "_aggs.signature": [
794
                "address",
795
                "build_id",
796
                "cpu_arch",
797
                "proto_signature",
798
                "_cardinality.user_comments",
799
                "cpu_arch",
800
                "platform_pretty_version",
801
                "_histogram.date",
802
                # The following are needed for SignatureStats:
803
                "platform",
804
                "is_garbage_collecting",
805
                "_cardinality.install_time",
806
                "startup_crash",
807
                "_histogram.uptime",
808
                "process_type",
809
            ],
810
            "_results_number": 0,
811
            "_facets_size": 10000,
812
        }
813

814
        def handler(search_results: dict, data: dict):
×
815
            data["num_total_crashes"] = search_results["total"]
×
816
            data["signatures"] = search_results["facets"]["signature"]
×
817

818
        logger.debug(
×
819
            "Fetch from Socorro: requesting info for %d signatures",
820
            len(self._signatures),
821
        )
822

823
        data: dict = {}
×
824
        socorro.SuperSearchUnredacted(
×
825
            params=params,
826
            handler=handler,
827
            handlerdata=data,
828
        ).wait()
829

830
        logger.debug(
×
831
            "Fetch from Socorro: received info for %d signatures",
832
            len(data["signatures"]),
833
        )
834

835
        return data["signatures"], data["num_total_crashes"]
×
836

837
    def fetch_bugs(
×
838
        self, include_fields: list[str] | None = None
839
    ) -> dict[str, list[dict]]:
840
        """Fetch bugs that are filed against the given signatures."""
841
        if not self._signatures:
×
842
            return {}
×
843

844
        params_base: dict = {
×
845
            "include_fields": [
846
                "cf_crash_signature",
847
            ],
848
        }
849

850
        if include_fields:
×
851
            params_base["include_fields"].extend(include_fields)
×
852

853
        params_list = []
×
854
        for signatures_chunk in Connection.chunks(list(self._signatures), 30):
×
855
            params = params_base.copy()
×
856
            n = int(utils.get_last_field_num(params))
×
857
            params[f"f{n}"] = "OP"
×
858
            params[f"j{n}"] = "OR"
×
859
            for signature in signatures_chunk:
×
860
                n += 1
×
861
                params[f"f{n}"] = "cf_crash_signature"
×
862
                params[f"o{n}"] = "regexp"
×
863
                params[f"v{n}"] = rf"\[(@ |@){re.escape(signature)}( \]|\])"
×
864
            params[f"f{n+1}"] = "CP"
×
865
            params_list.append(params)
×
866

867
        signatures_bugs: dict = defaultdict(list)
×
868

869
        def handler(res, data):
×
870
            for bug in res["bugs"]:
×
871
                for signature in utils.get_signatures(bug["cf_crash_signature"]):
×
872
                    if signature in self._signatures:
×
873
                        data[signature].append(bug)
×
874

875
        logger.debug(
×
876
            "Fetch from Bugzilla: requesting bugs for %d signatures",
877
            len(self._signatures),
878
        )
879
        timeout = utils.get_config("common", "bz_query_timeout")
×
880
        Bugzilla(
×
881
            timeout=timeout,
882
            queries=[
883
                connection.Query(Bugzilla.API_URL, params, handler, signatures_bugs)
884
                for params in params_list
885
            ],
886
        ).wait()
887

888
        logger.debug(
×
889
            "Fetch from Bugzilla: received bugs for %d signatures", len(signatures_bugs)
890
        )
891

892
        return signatures_bugs
×
893

894
    def analyze(self) -> list[SignatureAnalyzer]:
×
895
        """Analyze the data related to the signatures."""
896
        bugs = self.fetch_bugs()
×
897
        # TODO(investigate): For now, we are ignoring signatures that have bugs
898
        # filed even if they are closed long time ago. We should investigate
899
        # whether we should include the ones with closed bugs. For example, if
900
        # the bug was closed as Fixed years ago.
901
        self._signatures.difference_update(bugs.keys())
×
902

903
        clouseau_reports = self.fetch_clouseau_crash_reports()
×
904
        # TODO(investigate): For now, we are ignoring signatures that are not
905
        # analyzed by clouseau. We should investigate why they are not analyzed
906
        # and whether we should include them.
907
        self._signatures.intersection_update(clouseau_reports.keys())
×
908

909
        signatures, num_total_crashes = self.fetch_socorro_info()
×
910
        bugs_store = BugsStore()
×
911

912
        return [
×
913
            SignatureAnalyzer(
914
                signature,
915
                num_total_crashes,
916
                clouseau_reports[signature["term"]],
917
                bugs_store,
918
            )
919
            for signature in signatures
920
        ]
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc