• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

mozilla / relman-auto-nag / #4794

30 Oct 2023 01:38PM CUT coverage: 22.121%. Remained the same
#4794

push

coveralls-python

web-flow
Bump filelock from 3.12.4 to 3.13.0

Bumps [filelock](https://github.com/tox-dev/py-filelock) from 3.12.4 to 3.13.0.
- [Release notes](https://github.com/tox-dev/py-filelock/releases)
- [Changelog](https://github.com/tox-dev/filelock/blob/main/docs/changelog.rst)
- [Commits](https://github.com/tox-dev/py-filelock/compare/3.12.4...3.13.0)

---
updated-dependencies:
- dependency-name: filelock
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

716 of 3554 branches covered (0.0%)

1925 of 8702 relevant lines covered (22.12%)

0.22 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.0
/bugbot/crash/analyzer.py
1
# This Source Code Form is subject to the terms of the Mozilla Public
2
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
3
# You can obtain one at http://mozilla.org/MPL/2.0/.
4

5
import itertools
×
6
import re
×
7
from collections import defaultdict
×
8
from datetime import date, timedelta
×
9
from functools import cached_property
×
10
from typing import Iterable, Iterator
×
11

12
from libmozdata import bugzilla, clouseau, connection, socorro
×
13
from libmozdata import utils as lmdutils
×
14
from libmozdata.bugzilla import Bugzilla
×
15
from libmozdata.connection import Connection
×
16

17
from bugbot import logger, utils
×
18
from bugbot.bug.analyzer import BugAnalyzer, BugsStore
×
19
from bugbot.components import ComponentName
×
20
from bugbot.crash import socorro_util
×
21

22
# The max offset from a memory address to be considered "near".
23
OFFSET_64_BIT = 0x1000
×
24
OFFSET_32_BIT = 0x100
×
25
# Allocator poison value addresses.
26
ALLOCATOR_ADDRESSES_64_BIT = (
×
27
    (0xE5E5E5E5E5E5E5E5, OFFSET_64_BIT),
28
    # On 64-bit windows, sometimes it could be doing something with a 32-bit
29
    # value gotten from freed memory, so it'll be 0X00000000E5E5E5E5 +/-, and
30
    # because of the address limitation, quite often it will be
31
    # 0X0000E5E5E5E5E5E5 +/-.
32
    (0x00000000E5E5E5E5, OFFSET_32_BIT),
33
    (0x0000E5E5E5E5E5E5, OFFSET_64_BIT),
34
    (0x4B4B4B4B4B4B4B4B, OFFSET_64_BIT),
35
)
36
ALLOCATOR_ADDRESSES_32_BIT = (
×
37
    (0xE5E5E5E5, OFFSET_32_BIT),
38
    (0x4B4B4B4B, OFFSET_32_BIT),
39
)
40
# Ranges where addresses are considered near allocator poison values.
41
ALLOCATOR_RANGES_64_BIT = (
×
42
    (addr - offset, addr + offset) for addr, offset in ALLOCATOR_ADDRESSES_64_BIT
43
)
44
ALLOCATOR_RANGES_32_BIT = (
×
45
    (addr - offset, addr + offset) for addr, offset in ALLOCATOR_ADDRESSES_32_BIT
46
)
47

48

49
def is_near_null_address(str_address) -> bool:
×
50
    """Check if the address is near null.
51

52
    Args:
53
        str_address: The memory address to check.
54

55
    Returns:
56
        True if the address is near null, False otherwise.
57
    """
58
    address = int(str_address, 0)
×
59
    is_64_bit = len(str_address) >= 18
×
60

61
    if is_64_bit:
×
62
        return -OFFSET_64_BIT <= address <= OFFSET_64_BIT
×
63

64
    return -OFFSET_32_BIT <= address <= OFFSET_32_BIT
×
65

66

67
def is_near_allocator_address(str_address) -> bool:
×
68
    """Check if the address is near an allocator poison value.
69

70
    Args:
71
        str_address: The memory address to check.
72

73
    Returns:
74
        True if the address is near an allocator poison value, False otherwise.
75
    """
76
    address = int(str_address, 0)
×
77
    is_64_bit = len(str_address) >= 18
×
78

79
    return any(
×
80
        low <= address <= high
81
        for low, high in (
82
            ALLOCATOR_RANGES_64_BIT if is_64_bit else ALLOCATOR_RANGES_32_BIT
83
        )
84
    )
85

86

87
# TODO: Move this to libmozdata
88
def generate_signature_page_url(params: dict, tab: str) -> str:
×
89
    """Generate a URL to the signature page on Socorro
90

91
    Args:
92
        params: the parameters for the search query.
93
        tab: the page tab that should be selected.
94

95
    Returns:
96
        The URL of the signature page on Socorro
97
    """
98
    web_url = socorro.Socorro.CRASH_STATS_URL
×
99
    query = lmdutils.get_params_for_url(params)
×
100
    return f"{web_url}/signature/{query}#{tab}"
×
101

102

103
class NoCrashReportFoundError(Exception):
×
104
    """There are no crash reports that meet the required criteria."""
105

106

107
class ClouseauDataAnalyzer:
×
108
    """Analyze the data returned by Crash Clouseau about a specific crash
109
    signature.
110
    """
111

112
    MINIMUM_CLOUSEAU_SCORE_THRESHOLD: int = 8
×
113
    DEFAULT_CRASH_COMPONENT = ComponentName("Core", "General")
×
114

115
    def __init__(self, reports: Iterable[dict], bugs_store: BugsStore):
×
116
        self._clouseau_reports = reports
×
117
        self.bugs_store = bugs_store
×
118

119
    @cached_property
×
120
    def max_clouseau_score(self):
×
121
        """The maximum Clouseau score in the crash reports."""
122
        if not self._clouseau_reports:
×
123
            return 0
×
124
        return max(report["max_score"] for report in self._clouseau_reports)
×
125

126
    @cached_property
×
127
    def regressed_by_potential_bug_ids(self) -> set[int]:
×
128
        """The IDs for the bugs that their patches could have caused the crash."""
129
        minimum_accepted_score = max(
×
130
            self.MINIMUM_CLOUSEAU_SCORE_THRESHOLD, self.max_clouseau_score
131
        )
132
        return {
×
133
            changeset["bug_id"]
134
            for report in self._clouseau_reports
135
            if report["max_score"] >= minimum_accepted_score
136
            for changeset in report["changesets"]
137
            if changeset["max_score"] >= minimum_accepted_score
138
            and not changeset["is_merge"]
139
            and not changeset["is_backedout"]
140
        }
141

142
    @cached_property
×
143
    def regressed_by_patch(self) -> str | None:
×
144
        """The hash of the patch that could have caused the crash."""
145
        minimum_accepted_score = max(
×
146
            self.MINIMUM_CLOUSEAU_SCORE_THRESHOLD, self.max_clouseau_score
147
        )
148
        potential_patches = {
×
149
            changeset["changeset"]
150
            for report in self._clouseau_reports
151
            if report["max_score"] >= minimum_accepted_score
152
            for changeset in report["changesets"]
153
            if changeset["max_score"] >= minimum_accepted_score
154
            and not changeset["is_merge"]
155
            and not changeset["is_backedout"]
156
        }
157
        if len(potential_patches) == 1:
×
158
            return next(iter(potential_patches))
×
159
        return None
×
160

161
    @cached_property
×
162
    def regressed_by(self) -> int | None:
×
163
        """The ID of the bug that one of its patches could have caused
164
        the crash.
165

166
        If there are multiple bugs, the value will be `None`.
167
        """
168
        bug_ids = self.regressed_by_potential_bug_ids
×
169
        if len(bug_ids) == 1:
×
170
            return next(iter(bug_ids))
×
171
        return None
×
172

173
    @cached_property
×
174
    def regressed_by_potential_bugs(self) -> list[BugAnalyzer]:
×
175
        """The bugs whose patches could have caused the crash."""
176
        self.bugs_store.fetch_bugs(
×
177
            self.regressed_by_potential_bug_ids,
178
            [
179
                "id",
180
                "groups",
181
                "assigned_to",
182
                "product",
183
                "component",
184
            ],
185
        )
186
        return [
×
187
            self.bugs_store.get_bug_by_id(bug_id)
188
            for bug_id in self.regressed_by_potential_bug_ids
189
        ]
190

191
    @cached_property
×
192
    def regressed_by_author(self) -> dict | None:
×
193
        """The author of the patch that could have caused the crash.
194

195
        If there are multiple regressors, the value will be `None`.
196

197
        The regressor bug assignee is considered as the author, even if the
198
        assignee is not the patch author.
199
        """
200

201
        if not self.regressed_by:
×
202
            return None
×
203

204
        bug = self.regressed_by_potential_bugs[0]
×
205
        assert bug.id == self.regressed_by
×
206
        return bug.get_field("assigned_to_detail")
×
207

208
    @cached_property
×
209
    def crash_component(self) -> ComponentName:
×
210
        """The component that the crash belongs to.
211

212
        If there are multiple components, the value will be the default one.
213
        """
214
        potential_components = {
×
215
            bug.component for bug in self.regressed_by_potential_bugs
216
        }
217
        if len(potential_components) == 1:
×
218
            return next(iter(potential_components))
×
219
        return self.DEFAULT_CRASH_COMPONENT
×
220

221

222
class SocorroDataAnalyzer(socorro_util.SignatureStats):
×
223
    """Analyze the data returned by Socorro."""
224

225
    _bugzilla_os_legal_values = None
×
226
    _bugzilla_cpu_legal_values_map = None
×
227
    _platforms = [
×
228
        {"short_name": "win", "name": "Windows"},
229
        {"short_name": "mac", "name": "Mac OS X"},
230
        {"short_name": "lin", "name": "Linux"},
231
        {"short_name": "and", "name": "Android"},
232
        {"short_name": "unknown", "name": "Unknown"},
233
    ]
234

235
    def __init__(
×
236
        self,
237
        signature: dict,
238
        num_total_crashes: int,
239
    ):
240
        super().__init__(signature, num_total_crashes, platforms=self._platforms)
×
241

242
    @classmethod
×
243
    def to_bugzilla_op_sys(cls, op_sys: str) -> str:
×
244
        """Return the corresponding OS name in Bugzilla for the provided OS name
245
        from Socorro.
246

247
        If the OS name is not recognized, return "Other".
248
        """
249
        if cls._bugzilla_os_legal_values is None:
×
250
            cls._bugzilla_os_legal_values = set(
×
251
                bugzilla.BugFields.fetch_field_values("op_sys")
252
            )
253

254
        if op_sys in cls._bugzilla_os_legal_values:
×
255
            return op_sys
×
256

257
        if op_sys.startswith("OS X ") or op_sys.startswith("macOS "):
×
258
            op_sys = "macOS"
×
259
        elif op_sys.startswith("Windows"):
×
260
            op_sys = "Windows"
×
261
        elif "Linux" in op_sys or op_sys.startswith("Ubuntu"):
×
262
            op_sys = "Linux"
×
263
        else:
264
            op_sys = "Other"
×
265

266
        return op_sys
×
267

268
    @property
×
269
    def first_crash_date(self) -> str:
×
270
        """The date of the first crash within the query time range.
271

272
        The date is in YYYY-MM-DD format.
273
        """
274
        return self.signature["facets"]["histogram_date"][0]["term"][:10]
×
275

276
    @property
×
277
    def bugzilla_op_sys(self) -> str:
×
278
        """The name of the OS where the crash happens.
279

280
        The value is one of the legal values for Bugzilla's `op_sys` field.
281

282
        - If no OS name is found, the value will be "Unspecified".
283
        - If the OS name is not recognized, the value will be "Other".
284
        - If multiple OS names are found, the value will be "All". Unless the OS
285
          names can be resolved to a common name without a version. For example,
286
          "Windows 10" and "Windows 7" will become "Windows".
287
        """
288
        all_op_sys = {
×
289
            self.to_bugzilla_op_sys(op_sys["term"])
290
            for op_sys in self.signature["facets"]["platform_pretty_version"]
291
        }
292

293
        if len(all_op_sys) > 1:
×
294
            # Resolve to root OS name by removing the version number.
295
            all_op_sys = {op_sys.split(" ")[0] for op_sys in all_op_sys}
×
296

297
        if len(all_op_sys) == 2 and "Other" in all_op_sys:
×
298
            # TODO: explain this workaround.
299
            all_op_sys.remove("Other")
×
300

301
        if len(all_op_sys) == 1:
×
302
            return next(iter(all_op_sys))
×
303

304
        if len(all_op_sys) == 0:
×
305
            return "Unspecified"
×
306

307
        return "All"
×
308

309
    @classmethod
×
310
    def to_bugzilla_cpu(cls, cpu: str) -> str:
×
311
        """Return the corresponding CPU name in Bugzilla for the provided name
312
        from Socorro.
313

314
        If the CPU is not recognized, return "Other".
315
        """
316
        if cls._bugzilla_cpu_legal_values_map is None:
×
317
            cls._bugzilla_cpu_legal_values_map = {
×
318
                value.lower(): value
319
                for value in bugzilla.BugFields.fetch_field_values("rep_platform")
320
            }
321

322
        return cls._bugzilla_cpu_legal_values_map.get(cpu, "Other")
×
323

324
    @property
×
325
    def bugzilla_cpu_arch(self) -> str:
×
326
        """The CPU architecture of the devices where the crash happens.
327

328
        The value is one of the legal values for Bugzilla's `rep_platform` field.
329

330
        - If no CPU architecture is found, the value will be "Unspecified".
331
        - If the CPU architecture is not recognized, the value will be "Other".
332
        - If multiple CPU architectures are found, the value will "All".
333
        """
334
        all_cpu_arch = {
×
335
            self.to_bugzilla_cpu(cpu["term"])
336
            for cpu in self.signature["facets"]["cpu_arch"]
337
        }
338

339
        if len(all_cpu_arch) == 2 and "Other" in all_cpu_arch:
×
340
            all_cpu_arch.remove("Other")
×
341

342
        if len(all_cpu_arch) == 1:
×
343
            return next(iter(all_cpu_arch))
×
344

345
        if len(all_cpu_arch) == 0:
×
346
            return "Unspecified"
×
347

348
        return "All"
×
349

350
    @property
×
351
    def user_comments_page_url(self) -> str:
×
352
        """The URL to the Signature page on Socorro where the Comments tab is
353
        selected.
354
        """
355
        start_date = date.today() - timedelta(weeks=26)
×
356
        params = {
×
357
            "signature": self.signature_term,
358
            "date": socorro.SuperSearch.get_search_date(start_date),
359
        }
360
        return generate_signature_page_url(params, "comments")
×
361

362
    @property
×
363
    def num_user_comments(self) -> int:
×
364
        """The number of crash reports with user comments."""
365
        # TODO: count useful/interesting user comments (e.g., exclude one word comments)
366
        return self.signature["facets"]["cardinality_user_comments"]["value"]
×
367

368
    @property
×
369
    def has_user_comments(self) -> bool:
×
370
        """Whether the crash signature has any reports with a user comment."""
371
        return self.num_user_comments > 0
×
372

373
    @property
×
374
    def top_proto_signature(self) -> str:
×
375
        """The proto signature that occurs the most."""
376
        return self.signature["facets"]["proto_signature"][0]["term"]
×
377

378
    @property
×
379
    def num_top_proto_signature_crashes(self) -> int:
×
380
        """The number of crashes for the most occurring proto signature."""
381
        return self.signature["facets"]["proto_signature"][0]["count"]
×
382

383
    def _build_ids(self) -> Iterator[int]:
×
384
        """Yields the build IDs where the crash occurred."""
385
        for build_id in self.signature["facets"]["build_id"]:
×
386
            yield build_id["term"]
×
387

388
    @property
×
389
    def top_build_id(self) -> int:
×
390
        """The build ID where most crashes occurred."""
391
        return self.signature["facets"]["build_id"][0]["term"]
×
392

393
    @cached_property
×
394
    def num_near_null_crashes(self) -> int:
×
395
        """The number of crashes that occurred on addresses near null."""
396
        return sum(
×
397
            address["count"]
398
            for address in self.signature["facets"]["address"]
399
            if is_near_null_address(address["term"])
400
        )
401

402
    @property
×
403
    def is_near_null_crash(self) -> bool:
×
404
        """Whether all crashes occurred on addresses near null."""
405
        return self.num_near_null_crashes == self.num_crashes
×
406

407
    @property
×
408
    def is_potential_near_null_crash(self) -> bool:
×
409
        """Whether the signature is a potential near null crash.
410

411
        The value will be True if some but not all crashes occurred on addresses
412
        near null.
413
        """
414
        return not self.is_near_null_crash and self.num_near_null_crashes > 0
×
415

416
    @property
×
417
    def is_near_null_related_crash(self) -> bool:
×
418
        """Whether the signature is related to near null crashes.
419

420
        The value will be True if any of the crashes occurred on addresses near
421
        null.
422
        """
423
        return self.is_near_null_crash or self.is_potential_near_null_crash
×
424

425
    @cached_property
×
426
    def num_near_allocator_crashes(self) -> int:
×
427
        """The number of crashes that occurred on addresses near an allocator
428
        poison value.
429
        """
430
        return sum(
×
431
            address["count"]
432
            for address in self.signature["facets"]["address"]
433
            if is_near_allocator_address(address["term"])
434
        )
435

436
    @property
×
437
    def is_near_allocator_crash(self) -> bool:
×
438
        """Whether all crashes occurred on addresses near an allocator poison
439
        value.
440
        """
441
        return self.num_near_allocator_crashes == self.num_crashes
×
442

443
    @property
×
444
    def is_potential_near_allocator_crash(self) -> bool:
×
445
        """Whether the signature is a potential near allocator poison value
446
        crash.
447

448
        The value will be True if some but not all crashes occurred on addresses
449
        near an allocator poison value.
450
        """
451
        return not self.is_near_allocator_crash and self.num_near_allocator_crashes > 0
×
452

453
    @property
×
454
    def is_near_allocator_related_crash(self) -> bool:
×
455
        """Whether the signature is related to near allocator poison value
456
        crashes.
457

458
        The value will be True if any of the crashes occurred on addresses near
459
        an allocator poison value.
460
        """
461
        return self.is_near_allocator_crash or self.is_potential_near_allocator_crash
×
462

463

464
class SignatureAnalyzer(SocorroDataAnalyzer, ClouseauDataAnalyzer):
×
465
    """Analyze the data related to a signature.
466

467
    This includes data from Socorro and Clouseau.
468
    """
469

470
    def __init__(
×
471
        self,
472
        socorro_signature: dict,
473
        num_total_crashes: int,
474
        clouseau_reports: list[dict],
475
        bugs_store: BugsStore,
476
    ):
477
        SocorroDataAnalyzer.__init__(self, socorro_signature, num_total_crashes)
×
478
        ClouseauDataAnalyzer.__init__(self, clouseau_reports, bugs_store)
×
479

480
    def _fetch_crash_reports(
×
481
        self,
482
        proto_signature: str,
483
        build_id: int | Iterable[int],
484
        limit: int = 1,
485
    ) -> Iterator[dict]:
486
        params = {
×
487
            "proto_signature": "=" + proto_signature,
488
            "build_id": build_id,
489
            "_columns": [
490
                "uuid",
491
            ],
492
            "_results_number": limit,
493
        }
494

495
        def handler(res: dict, data: dict):
×
496
            data.update(res)
×
497

498
        data: dict = {}
×
499
        socorro.SuperSearch(params=params, handler=handler, handlerdata=data).wait()
×
500

501
        yield from data["hits"]
×
502

503
    def fetch_representative_processed_crash(self) -> dict:
×
504
        """Fetch a processed crash to represent the signature.
505

506
        This could fetch multiple processed crashes and return the one that is
507
        most likely to be useful.
508
        """
509
        limit_to_top_proto_signature = (
×
510
            self.num_top_proto_signature_crashes / self.num_crashes > 0.6
511
        )
512

513
        reports = itertools.chain(
×
514
            # Reports with a higher score from clouseau are more likely to be
515
            # useful.
516
            sorted(
517
                self._clouseau_reports,
518
                key=lambda report: report["max_score"],
519
                reverse=True,
520
            ),
521
            # Next we try find reports from the top crashing build because they
522
            # are likely to be representative.
523
            self._fetch_crash_reports(self.top_proto_signature, self.top_build_id),
524
            self._fetch_crash_reports(self.top_proto_signature, self._build_ids()),
525
        )
526
        for report in reports:
×
527
            uuid = report["uuid"]
×
528
            processed_crash = socorro.ProcessedCrash.get_processed(uuid)[uuid]
×
529
            if (
×
530
                not limit_to_top_proto_signature
531
                or processed_crash["proto_signature"] == self.top_proto_signature
532
            ):
533
                # TODO(investigate): maybe we should check if the stack is
534
                # corrupted (ask gsvelto or willkg about how to detect that)
535
                return processed_crash
×
536

537
        raise NoCrashReportFoundError(
×
538
            f"No crash report found with the most frequent proto signature for {self.signature_term}."
539
        )
540

541
    @cached_property
×
542
    def is_potential_security_crash(self) -> bool:
×
543
        """Whether the crash is related to a potential security bug.
544

545
        The value will be True if:
546
            - the signature is related to near allocator poison value crashes, or
547
            - one of the potential regressors is a security bug
548
        """
549
        return self.is_near_allocator_related_crash or any(
×
550
            bug.is_security for bug in self.regressed_by_potential_bugs
551
        )
552

553

554
class SignaturesDataFetcher:
×
555
    """Fetch the data related to the given signatures."""
556

557
    MEMORY_ACCESS_ERROR_REASONS = (
×
558
        # On Windows:
559
        "EXCEPTION_ACCESS_VIOLATION_READ",
560
        "EXCEPTION_ACCESS_VIOLATION_WRITE",
561
        "EXCEPTION_ACCESS_VIOLATION_EXEC"
562
        # On Linux:
563
        "SIGSEGV / SEGV_MAPERR",
564
        "SIGSEGV / SEGV_ACCERR",
565
    )
566

567
    EXCLUDED_MOZ_REASON_STRINGS = (
×
568
        "MOZ_CRASH(OOM)",
569
        "MOZ_CRASH(Out of memory)",
570
        "out of memory",
571
        "Shutdown hanging",
572
        # TODO(investigate): do we need to exclude signatures that their reason
573
        # contains `[unhandlable oom]`?
574
        # Example: arena_t::InitChunk | arena_t::AllocRun | arena_t::MallocLarge | arena_t::Malloc | BaseAllocator::malloc | Allocator::malloc | PageMalloc
575
        # "[unhandlable oom]",
576
    )
577

578
    # If any of the crash reason starts with any of the following, then it is
579
    # Network or I/O error.
580
    EXCLUDED_IO_ERROR_REASON_PREFIXES = (
×
581
        "EXCEPTION_IN_PAGE_ERROR_READ",
582
        "EXCEPTION_IN_PAGE_ERROR_WRITE",
583
        "EXCEPTION_IN_PAGE_ERROR_EXEC",
584
    )
585

586
    # TODO(investigate): do we need to exclude all these signatures prefixes?
587
    EXCLUDED_SIGNATURE_PREFIXES = (
×
588
        "OOM | ",
589
        "bad hardware | ",
590
        "shutdownhang | ",
591
    )
592

593
    SUMMARY_DURATION = timedelta(weeks=10)
×
594

595
    def __init__(
×
596
        self,
597
        signatures: Iterable[str],
598
        product: str = "Firefox",
599
        channel: str = "nightly",
600
    ):
601
        self._signatures = set(signatures)
×
602
        self._product = product
×
603
        self._channel = channel
×
604

605
    @classmethod
×
606
    def find_new_actionable_crashes(
×
607
        cls,
608
        product: str,
609
        channel: str,
610
        days_to_check: int = 7,
611
        days_without_crashes: int = 7,
612
    ) -> "SignaturesDataFetcher":
613
        """Find new actionable crashes.
614

615
        Args:
616
            product: The product to check.
617
            channel: The release channel to check.
618
            days_to_check: The number of days to check for crashes.
619
            days_without_crashes: The number of days without crashes before the
620
                `days_to_check` to consider the signature new.
621

622
        Returns:
623
            A list of actionable signatures.
624
        """
625
        duration = days_to_check + days_without_crashes
×
626
        end_date = lmdutils.get_date_ymd("today")
×
627
        start_date = end_date - timedelta(duration)
×
628
        earliest_allowed_date = lmdutils.get_date_str(
×
629
            end_date - timedelta(days_to_check)
630
        )
631
        date_range = socorro.SuperSearch.get_search_date(start_date, end_date)
×
632

633
        params = {
×
634
            "product": product,
635
            "release_channel": channel,
636
            "date": date_range,
637
            # TODO(investigate): should we do a local filter instead of the
638
            # following (should we exclude the signature if one of the crashes
639
            # is a shutdown hang?):
640
            # If the `ipc_shutdown_state` or `shutdown_progress` field are
641
            # non-empty then it's a shutdown hang.
642
            "ipc_shutdown_state": "__null__",
643
            "shutdown_progress": "__null__",
644
            # TODO(investigate): should we use the following instead of the
645
            # local filter.
646
            # "oom_allocation_size": "!__null__",
647
            "_aggs.signature": [
648
                "moz_crash_reason",
649
                "reason",
650
                "_histogram.date",
651
                "_cardinality.install_time",
652
                "_cardinality.oom_allocation_size",
653
            ],
654
            "_results_number": 0,
655
            "_facets_size": 10000,
656
        }
657

658
        def handler(search_resp: dict, data: list):
×
659
            logger.debug(
×
660
                "Total of %d signatures received from Socorro",
661
                len(search_resp["facets"]["signature"]),
662
            )
663

664
            for crash in search_resp["facets"]["signature"]:
×
665
                signature = crash["term"]
×
666
                if any(
×
667
                    signature.startswith(excluded_prefix)
668
                    for excluded_prefix in cls.EXCLUDED_SIGNATURE_PREFIXES
669
                ):
670
                    # Ignore signatures that start with any of the excluded prefixes.
671
                    continue
×
672

673
                facets = crash["facets"]
×
674
                installations = facets["cardinality_install_time"]["value"]
×
675
                if installations <= 1:
×
676
                    # Ignore crashes that only happen on one installation.
677
                    continue
×
678

679
                first_date = facets["histogram_date"][0]["term"]
×
680
                if first_date < earliest_allowed_date:
×
681
                    # The crash is not new, skip it.
682
                    continue
×
683

684
                if any(
×
685
                    reason["term"].startswith(io_error_prefix)
686
                    for reason in facets["reason"]
687
                    for io_error_prefix in cls.EXCLUDED_IO_ERROR_REASON_PREFIXES
688
                ):
689
                    # Ignore Network or I/O error crashes.
690
                    continue
×
691

692
                if crash["count"] < 20:
×
693
                    # For signatures with low volume, having multiple types of
694
                    # memory errors indicates potential bad hardware crashes.
695
                    num_memory_error_types = sum(
×
696
                        reason["term"] in cls.MEMORY_ACCESS_ERROR_REASONS
697
                        for reason in facets["reason"]
698
                    )
699
                    if num_memory_error_types > 1:
×
700
                        # Potential bad hardware crash, skip it.
701
                        continue
×
702

703
                # TODO: Add a filter using the `possible_bit_flips_max_confidence`
704
                # field to exclude bad hardware crashes. The filed is not available yet.
705
                # See: https://bugzilla.mozilla.org/show_bug.cgi?id=1816669#c3
706

707
                # TODO(investigate): is this needed since we are already
708
                # filtering signatures that start with "OOM | "
709
                if facets["cardinality_oom_allocation_size"]["value"]:
×
710
                    # If one of the crashes is an OOM crash, skip it.
711
                    continue
×
712

713
                # TODO(investigate): do we need to check for the `moz_crash_reason`
714
                moz_crash_reasons = facets["moz_crash_reason"]
×
715
                if moz_crash_reasons and any(
×
716
                    excluded_reason in reason["term"]
717
                    for reason in moz_crash_reasons
718
                    for excluded_reason in cls.EXCLUDED_MOZ_REASON_STRINGS
719
                ):
720
                    continue
×
721

722
                data.append(signature)
×
723

724
        signatures: list = []
×
725
        socorro.SuperSearch(
×
726
            params=params,
727
            handler=handler,
728
            handlerdata=signatures,
729
        ).wait()
730

731
        logger.debug(
×
732
            "Total of %d signatures left after applying the filtering criteria",
733
            len(signatures),
734
        )
735

736
        return cls(signatures, product, channel)
×
737

738
    def fetch_clouseau_crash_reports(self) -> dict[str, list]:
×
739
        """Fetch the crash reports data from Crash Clouseau."""
740
        if not self._signatures:
×
741
            return {}
×
742

743
        logger.debug(
×
744
            "Fetch from Clouseau: requesting reports for %d signatures",
745
            len(self._signatures),
746
        )
747

748
        signature_reports = clouseau.Reports.get_by_signatures(
×
749
            self._signatures,
750
            product=self._product,
751
            channel=self._channel,
752
        )
753

754
        logger.debug(
×
755
            "Fetch from Clouseau: received reports for %d signatures",
756
            len(signature_reports),
757
        )
758

759
        return signature_reports
×
760

761
    def fetch_socorro_info(self) -> tuple[list[dict], int]:
×
762
        """Fetch the signature data from Socorro."""
763
        if not self._signatures:
×
764
            return [], 0
×
765

766
        end_date = lmdutils.get_date_ymd("today")
×
767
        start_date = end_date - self.SUMMARY_DURATION
×
768
        date_range = socorro.SuperSearch.get_search_date(start_date, end_date)
×
769

770
        params = {
×
771
            "product": self._product,
772
            # TODO(investigate): should we included all release channels?
773
            "release_channel": self._channel,
774
            # TODO(investigate): should we limit based on the build date as well?
775
            "date": date_range,
776
            # TODO: split signatures into chunks to avoid very long query URLs
777
            "signature": ["=" + signature for signature in self._signatures],
778
            "_aggs.signature": [
779
                "address",
780
                "build_id",
781
                "cpu_arch",
782
                "proto_signature",
783
                "_cardinality.user_comments",
784
                "cpu_arch",
785
                "platform_pretty_version",
786
                "_histogram.date",
787
                # The following are needed for SignatureStats:
788
                "platform",
789
                "is_garbage_collecting",
790
                "_cardinality.install_time",
791
                "startup_crash",
792
                "_histogram.uptime",
793
                "process_type",
794
            ],
795
            "_results_number": 0,
796
            "_facets_size": 10000,
797
        }
798

799
        def handler(search_results: dict, data: dict):
×
800
            data["num_total_crashes"] = search_results["total"]
×
801
            data["signatures"] = search_results["facets"]["signature"]
×
802

803
        logger.debug(
×
804
            "Fetch from Socorro: requesting info for %d signatures",
805
            len(self._signatures),
806
        )
807

808
        data: dict = {}
×
809
        socorro.SuperSearchUnredacted(
×
810
            params=params,
811
            handler=handler,
812
            handlerdata=data,
813
        ).wait()
814

815
        logger.debug(
×
816
            "Fetch from Socorro: received info for %d signatures",
817
            len(data["signatures"]),
818
        )
819

820
        return data["signatures"], data["num_total_crashes"]
×
821

822
    def fetch_bugs(
×
823
        self, include_fields: list[str] | None = None
824
    ) -> dict[str, list[dict]]:
825
        """Fetch bugs that are filed against the given signatures."""
826
        if not self._signatures:
×
827
            return {}
×
828

829
        params_base: dict = {
×
830
            "include_fields": [
831
                "cf_crash_signature",
832
            ],
833
        }
834

835
        if include_fields:
×
836
            params_base["include_fields"].extend(include_fields)
×
837

838
        params_list = []
×
839
        for signatures_chunk in Connection.chunks(list(self._signatures), 30):
×
840
            params = params_base.copy()
×
841
            n = int(utils.get_last_field_num(params))
×
842
            params[f"f{n}"] = "OP"
×
843
            params[f"j{n}"] = "OR"
×
844
            for signature in signatures_chunk:
×
845
                n += 1
×
846
                params[f"f{n}"] = "cf_crash_signature"
×
847
                params[f"o{n}"] = "regexp"
×
848
                params[f"v{n}"] = rf"\[(@ |@){re.escape(signature)}( \]|\])"
×
849
            params[f"f{n+1}"] = "CP"
×
850
            params_list.append(params)
×
851

852
        signatures_bugs: dict = defaultdict(list)
×
853

854
        def handler(res, data):
×
855
            for bug in res["bugs"]:
×
856
                for signature in utils.get_signatures(bug["cf_crash_signature"]):
×
857
                    if signature in self._signatures:
×
858
                        data[signature].append(bug)
×
859

860
        logger.debug(
×
861
            "Fetch from Bugzilla: requesting bugs for %d signatures",
862
            len(self._signatures),
863
        )
864
        timeout = utils.get_config("common", "bz_query_timeout")
×
865
        Bugzilla(
×
866
            timeout=timeout,
867
            queries=[
868
                connection.Query(Bugzilla.API_URL, params, handler, signatures_bugs)
869
                for params in params_list
870
            ],
871
        ).wait()
872

873
        logger.debug(
×
874
            "Fetch from Bugzilla: received bugs for %d signatures", len(signatures_bugs)
875
        )
876

877
        return signatures_bugs
×
878

879
    def analyze(self) -> list[SignatureAnalyzer]:
×
880
        """Analyze the data related to the signatures."""
881
        bugs = self.fetch_bugs()
×
882
        # TODO(investigate): For now, we are ignoring signatures that have bugs
883
        # filed even if they are closed long time ago. We should investigate
884
        # whether we should include the ones with closed bugs. For example, if
885
        # the bug was closed as Fixed years ago.
886
        self._signatures.difference_update(bugs.keys())
×
887

888
        clouseau_reports = self.fetch_clouseau_crash_reports()
×
889
        # TODO(investigate): For now, we are ignoring signatures that are not
890
        # analyzed by clouseau. We should investigate why they are not analyzed
891
        # and whether we should include them.
892
        self._signatures.intersection_update(clouseau_reports.keys())
×
893

894
        signatures, num_total_crashes = self.fetch_socorro_info()
×
895
        bugs_store = BugsStore()
×
896

897
        return [
×
898
            SignatureAnalyzer(
899
                signature,
900
                num_total_crashes,
901
                clouseau_reports[signature["term"]],
902
                bugs_store,
903
            )
904
            for signature in signatures
905
        ]
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc