• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

mozilla / mozregression / 9713240652

28 Jun 2024 01:02PM CUT coverage: 89.418%. First build
9713240652

Pull #1724

github

web-flow
Merge 4000cfcb7 into 03a97847e
Pull Request #1724: Bug 1899515 - Inform users of maybe missing AppArmor rules

13 of 44 new or added lines in 2 files covered. (29.55%)

2535 of 2835 relevant lines covered (89.42%)

13.43 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

87.55
/mozregression/main.py
1
"""
2
Entry point for the mozregression command line.
3
"""
4

5
from __future__ import absolute_import
14✔
6

7
import atexit
14✔
8
import os
14✔
9
import pipes
14✔
10
import sys
14✔
11

12
import colorama
14✔
13
import mozfile
14✔
14
import requests
14✔
15
from mozlog import get_proxy_logger
14✔
16
from requests.exceptions import HTTPError, RequestException
14✔
17

18
from mozregression import __version__
14✔
19
from mozregression.approx_persist import ApproxPersistChooser
14✔
20
from mozregression.bisector import Bisection, Bisector, IntegrationHandler, NightlyHandler
14✔
21
from mozregression.bugzilla import bug_url, find_bugids_in_push
14✔
22
from mozregression.cli import cli
14✔
23
from mozregression.config import DEFAULT_EXPAND, TC_CREDENTIALS_FNAME
14✔
24
from mozregression.download_manager import BuildDownloadManager
14✔
25
from mozregression.errors import GoodBadExpectationError, MozRegressionError
14✔
26
from mozregression.fetch_build_info import IntegrationInfoFetcher, NightlyInfoFetcher
14✔
27
from mozregression.json_pushes import JsonPushes
14✔
28
from mozregression.launchers import REGISTRY as APP_REGISTRY
14✔
29
from mozregression.network import set_http_session
14✔
30
from mozregression.persist_limit import PersistLimit
14✔
31
from mozregression.telemetry import UsageMetrics, get_system_info, send_telemetry_ping_oop
14✔
32
from mozregression.tempdir import safe_mkdtemp
14✔
33
from mozregression.test_runner import CommandTestRunner, ManualTestRunner
14✔
34

35
LOG = get_proxy_logger("main")
14✔
36

37

38
class Application(object):
14✔
39
    def __init__(self, fetch_config, options):
14✔
40
        self.fetch_config = fetch_config
14✔
41
        self.options = options
14✔
42
        self._test_runner = None
14✔
43
        self._bisector = None
14✔
44
        self._build_download_manager = None
14✔
45
        self._download_dir = options.persist
14✔
46
        self._rm_download_dir = False
14✔
47
        if not options.persist:
14✔
48
            self._download_dir = safe_mkdtemp()
14✔
49
            self._rm_download_dir = True
14✔
50
        launcher_class = APP_REGISTRY.get(fetch_config.app_name)
14✔
51
        launcher_class.check_is_runnable()
14✔
52
        # init global profile if required
53
        self._global_profile = None
14✔
54
        if options.profile_persistence in ("clone-first", "reuse"):
14✔
55
            self._global_profile = launcher_class.create_profile(
×
56
                profile=options.profile,
57
                addons=options.addons,
58
                preferences=options.preferences,
59
                clone=options.profile_persistence == "clone-first",
60
            )
61
            options.cmdargs = options.cmdargs + ["--allow-downgrade"]
×
62
        elif options.profile:
14✔
63
            options.cmdargs = options.cmdargs + ["--allow-downgrade"]
14✔
64

65
    def clear(self):
14✔
66
        if self._build_download_manager:
14✔
67
            # cancel all possible downloads
68
            self._build_download_manager.cancel()
14✔
69
        if self._rm_download_dir:
14✔
70
            if self._build_download_manager:
14✔
71
                # we need to wait explicitly for downloading threads completion
72
                # here because it may remove a file in the download dir - and
73
                # in that case we could end up with a race condition when
74
                # we will remove the download dir. See
75
                # https://bugzilla.mozilla.org/show_bug.cgi?id=1231745
76
                self._build_download_manager.wait(raise_if_error=False)
14✔
77
            mozfile.remove(self._download_dir)
14✔
78
        if self._global_profile and self.options.profile_persistence == "clone-first":
14✔
79
            self._global_profile.cleanup()
×
80

81
    @property
14✔
82
    def test_runner(self):
14✔
83
        if self._test_runner is None:
14✔
84
            if self.options.command is None:
14✔
85
                self._test_runner = ManualTestRunner(
14✔
86
                    launcher_kwargs=dict(
87
                        addons=self.options.addons,
88
                        profile=self._global_profile or self.options.profile,
89
                        cmdargs=self.options.cmdargs,
90
                        preferences=self.options.preferences,
91
                        adb_profile_dir=self.options.adb_profile_dir,
92
                    )
93
                )
94
            else:
95
                self._test_runner = CommandTestRunner(self.options.command)
14✔
96
        return self._test_runner
14✔
97

98
    @property
14✔
99
    def bisector(self):
14✔
100
        if self._bisector is None:
14✔
101
            self._bisector = Bisector(
14✔
102
                self.fetch_config,
103
                self.test_runner,
104
                self.build_download_manager,
105
                dl_in_background=self.options.background_dl,
106
                approx_chooser=(
107
                    None if self.options.approx_policy != "auto" else ApproxPersistChooser(7)
108
                ),
109
            )
110
        return self._bisector
14✔
111

112
    @property
14✔
113
    def build_download_manager(self):
14✔
114
        if self._build_download_manager is None:
14✔
115
            background_dl_policy = self.options.background_dl_policy
14✔
116
            if not self.options.persist:
14✔
117
                # cancel background downloads forced
118
                background_dl_policy = "cancel"
14✔
119
            self._build_download_manager = BuildDownloadManager(
14✔
120
                self._download_dir,
121
                background_dl_policy=background_dl_policy,
122
                persist_limit=PersistLimit(self.options.persist_size_limit),
123
            )
124
        return self._build_download_manager
14✔
125

126
    def bisect_nightlies(self):
14✔
127
        good_date, bad_date = self.options.good, self.options.bad
14✔
128
        handler = NightlyHandler(
14✔
129
            find_fix=self.options.find_fix,
130
            ensure_good_and_bad=self.options.mode != "no-first-check",
131
        )
132
        result = self._do_bisect(handler, good_date, bad_date)
14✔
133
        if result == Bisection.FINISHED:
14✔
134
            LOG.info("Got as far as we can go bisecting nightlies...")
14✔
135
            handler.print_range()
14✔
136
            if self.fetch_config.can_go_integration():
14✔
137
                LOG.info("Switching bisection method to taskcluster")
14✔
138
                self.fetch_config.set_repo(self.fetch_config.get_nightly_repo(handler.bad_date))
14✔
139
                return self._bisect_integration(
14✔
140
                    handler.good_revision, handler.bad_revision, expand=DEFAULT_EXPAND
141
                )
142
        elif result == Bisection.USER_EXIT:
14✔
143
            self._print_resume_info(handler)
14✔
144
        else:
145
            # NO_DATA
146
            LOG.info(
14✔
147
                "Unable to get valid builds within the given"
148
                " range. You should try to launch mozregression"
149
                " again with a larger date range."
150
            )
151
            return 1
14✔
152
        return 0
14✔
153

154
    def bisect_integration(self):
14✔
155
        return self._bisect_integration(
14✔
156
            self.options.good,
157
            self.options.bad,
158
            ensure_good_and_bad=self.options.mode != "no-first-check",
159
        )
160

161
    def _bisect_integration(self, good_rev, bad_rev, ensure_good_and_bad=False, expand=0):
14✔
162
        LOG.info(
14✔
163
            "Getting %s builds between %s and %s"
164
            % (self.fetch_config.integration_branch, good_rev, bad_rev)
165
        )
166
        handler = IntegrationHandler(
14✔
167
            find_fix=self.options.find_fix, ensure_good_and_bad=ensure_good_and_bad
168
        )
169
        result = self._do_bisect(handler, good_rev, bad_rev, expand=expand)
14✔
170
        if result == Bisection.FINISHED:
14✔
171
            LOG.info("No more integration revisions, bisection finished.")
14✔
172
            handler.print_range()
14✔
173
            if handler.good_revision == handler.bad_revision:
14✔
174
                LOG.warning(
14✔
175
                    "It seems that you used two changesets that are in"
176
                    " the same push. Check the pushlog url."
177
                )
178
            elif len(handler.build_range) == 2:
×
179
                # range reduced to 2 pushes (at least ones with builds):
180
                # one good, one bad.
181
                result = handler.handle_merge()
×
182
                if result:
×
183
                    branch, good_rev, bad_rev = result
×
184
                    self.fetch_config.set_repo(branch)
×
185
                    return self._bisect_integration(good_rev, bad_rev, expand=DEFAULT_EXPAND)
×
186
                else:
187
                    # This code is broken, it prints out the message even when
188
                    # there are multiple bug numbers or commits in the range.
189
                    # Somebody should fix it before re-enabling it.
190
                    return 0
×
191
                    # print a bug if:
192
                    # (1) there really is only one bad push (and we're not
193
                    # just missing the builds for some intermediate builds)
194
                    # (2) there is only one bug number in that push
195
                    jp = JsonPushes(handler.build_range[1].repo_name)
196
                    num_pushes = len(
197
                        jp.pushes_within_changes(
198
                            handler.build_range[0].changeset,
199
                            handler.build_range[1].changeset,
200
                        )
201
                    )
202
                    if num_pushes == 2:
203
                        bugids = find_bugids_in_push(
204
                            handler.build_range[1].repo_name,
205
                            handler.build_range[1].changeset,
206
                        )
207
                        if len(bugids) == 1:
208
                            word = "fix" if handler.find_fix else "regression"
209
                            LOG.info(
210
                                "Looks like the following bug has the "
211
                                " changes which introduced the"
212
                                " {}:\n{}".format(word, bug_url(bugids[0]))
213
                            )
214
        elif result == Bisection.USER_EXIT:
14✔
215
            self._print_resume_info(handler)
14✔
216
        else:
217
            # NO_DATA. With integration branches, this can not happen if changesets
218
            # are incorrect - so builds are probably too old
219
            LOG.info(
14✔
220
                "There are no build artifacts for these changesets (they are probably too old)."
221
            )
222
            return 1
14✔
223
        return 0
14✔
224

225
    def _do_bisect(self, handler, good, bad, **kwargs):
14✔
226
        try:
14✔
227
            return self.bisector.bisect(handler, good, bad, **kwargs)
14✔
228
        except (KeyboardInterrupt, MozRegressionError, RequestException) as exc:
14✔
229
            if (
14✔
230
                handler.good_revision is not None
231
                and handler.bad_revision is not None
232
                and not isinstance(exc, GoodBadExpectationError)
233
            ):
234
                atexit.register(self._on_exit_print_resume_info, handler)
14✔
235
            raise
14✔
236

237
    def _print_resume_info(self, handler):
14✔
238
        # copy sys.argv, remove every --good/--bad/--repo related argument,
239
        # then add our own
240
        argv = sys.argv[:]
14✔
241
        args = ("--good", "--bad", "-g", "-b", "--good-rev", "--bad-rev", "--repo")
14✔
242
        indexes_to_remove = []
14✔
243
        for i, arg in enumerate(argv):
14✔
244
            if i in indexes_to_remove:
14✔
245
                continue
14✔
246
            for karg in args:
14✔
247
                if karg == arg:
14✔
248
                    # handle '--good 2015-01-01'
249
                    indexes_to_remove.extend((i, i + 1))
14✔
250
                    break
14✔
251
                elif arg.startswith(karg + "="):
14✔
252
                    # handle '--good=2015-01-01'
253
                    indexes_to_remove.append(i)
×
254
                    break
×
255
        for i in reversed(indexes_to_remove):
14✔
256
            del argv[i]
14✔
257

258
        argv.append("--repo=%s" % handler.build_range[0].repo_name)
14✔
259

260
        if hasattr(handler, "good_date"):
14✔
261
            argv.append("--good=%s" % handler.good_date)
14✔
262
            argv.append("--bad=%s" % handler.bad_date)
14✔
263
        else:
264
            argv.append("--good=%s" % handler.good_revision)
14✔
265
            argv.append("--bad=%s" % handler.bad_revision)
14✔
266

267
        LOG.info("To resume, run:")
14✔
268
        LOG.info(" ".join([pipes.quote(arg) for arg in argv]))
14✔
269

270
    def _on_exit_print_resume_info(self, handler):
14✔
271
        handler.print_range()
14✔
272
        self._print_resume_info(handler)
14✔
273

274
    def _launch(self, fetcher_class):
14✔
275
        fetcher = fetcher_class(self.fetch_config)
×
276
        build_info = fetcher.find_build_info(self.options.launch)
×
277
        self.build_download_manager.focus_download(build_info)
×
278
        self.test_runner.run_once(build_info)
×
279

280
    def launch_nightlies(self):
14✔
281
        self._launch(NightlyInfoFetcher)
×
282

283
    def launch_integration(self):
14✔
284
        self._launch(IntegrationInfoFetcher)
×
285

286

287
def pypi_latest_version():
14✔
288
    url = "https://pypi.python.org/pypi/mozregression/json"
14✔
289
    return requests.get(url, timeout=10).json()["info"]["version"]
14✔
290

291

292
def check_mozregression_version():
14✔
293
    try:
14✔
294
        mozregression_version = pypi_latest_version()
14✔
295
    except (RequestException, KeyError, ValueError):
14✔
296
        LOG.critical("Unable to get latest version from pypi.")
14✔
297
        return
14✔
298

299
    if __version__ != mozregression_version:
14✔
300
        LOG.warning(
14✔
301
            "You are using mozregression version %s, "
302
            "however version %s is available." % (__version__, mozregression_version)
303
        )
304

305
        LOG.warning(
14✔
306
            "You should consider upgrading via the 'pip install"
307
            " --upgrade mozregression' command."
308
        )
309

310

311
def check_unprivileged_userns():
14✔
312
    """
313
    Some distribution started to block unprivileged user namespaces via
314
    AppArmor.  This might result in crashes on older builds, and in degraded
315
    sandbox behavior.  It is fixed with an AppArmor profile that allows the
316
    syscall to proceed, but this is path dependant on the binary we download
317
    and needs to be installed at a system level, so we can only advise people
318
    of the situation.
319

320
    The following sys entry should be enough to verify whether it is blocked or
321
    not, but the Ubuntu security team recommend cross-checking with actual
322
    syscall.  This code is a simplification of how Firerox does it, cf
323
    https://searchfox.org/mozilla-central/rev/23efe2c8c5b3a3182d449211ff9036fb34fe0219/security/sandbox/linux/SandboxInfo.cpp#114-175
324
    and has been the most reliable way so far (shell with unshare would not
325
    reproduce EPERM like we want).
326
    """
327

328
    apparmor_file = "/proc/sys/kernel/apparmor_restrict_unprivileged_userns"
14✔
329
    if not os.path.isfile(apparmor_file):
14✔
330
        return
5✔
331

332
    with open(apparmor_file, "r") as f:
9✔
333
        if f.read().strip() != "1":
9✔
334
            return
5✔
335

NEW
336
    import ctypes
4✔
NEW
337
    import errno
4✔
NEW
338
    import platform
4✔
NEW
339
    import signal
4✔
340

341
    # Values are from
342
    # https://github.com/hrw/syscalls-table/tree/163e238e4d7761fcf6ac500aad92d53ac88d663a/system_calls/tables
343
    # imported from linux kernel headers
NEW
344
    SYS_clone = {
4✔
345
        "i386": 120,
346
        "x32": 1073741880,
347
        "x86_64": 56,
348
        "arm": 120,
349
        "armv7l": 120,
350
        "arm64": 220,
351
        "aarch64": 220,
352
        "aarch64_be": 220,
353
        "armv8b": 220,
354
        "armv8l": 220,
355
    }.get(platform.machine())
NEW
356
    if not SYS_clone:
4✔
NEW
357
        LOG.warning(
×
358
            "Unprivileged user namespaces might be disabled, but unsupported platform? {}".format(
359
                platform.machine()
360
            )
361
        )
NEW
362
        return
×
363

NEW
364
    libc = ctypes.CDLL(None, use_errno=True)
4✔
365

NEW
366
    LOG.warning(
4✔
367
        "Unprivileged user namespaces might be disabled. Checking clone() + unshare() syscalls ..."
368
    )
369

NEW
370
    try:
4✔
371
        # Introduced in 3.12 which is the version of Ubuntu 24.04
NEW
372
        clone_newuser = os.CLONE_NEWUSER
4✔
NEW
373
        clone_newpid = os.CLONE_NEWPID
1✔
NEW
374
    except AttributeError:
3✔
375
        # From
376
        # https://github.com/torvalds/linux/blob/5bbd9b249880dba032bffa002dd9cd12cd5af09c/include/uapi/linux/sched.h#L31-L32
377
        # Last change 12 years ago, so it should be a stable fallback
NEW
378
        clone_newuser = 0x10000000
3✔
NEW
379
        clone_newpid = 0x20000000
3✔
380

NEW
381
    pid = libc.syscall(SYS_clone, signal.SIGCHLD.value | clone_newuser, None, None, None, None)
4✔
382

NEW
383
    if pid == 0:
4✔
384
        # Child side ...
NEW
385
        rv = libc.unshare(clone_newpid)
×
NEW
386
        _errno = ctypes.get_errno()
×
NEW
387
        if rv < 0:
×
NEW
388
            sys.exit(_errno)
×
NEW
389
        sys.exit(0)
×
390
    else:
NEW
391
        (pid, statuscode) = os.waitpid(pid, 0)
4✔
NEW
392
        exitcode = os.waitstatus_to_exitcode(statuscode)
4✔
393

NEW
394
        if exitcode == 0:
4✔
NEW
395
            return
×
396

NEW
397
        if exitcode == errno.EPERM:
4✔
NEW
398
            LOG.warning(
4✔
399
                "Unprivileged user namespaces are disabled. This is likely because AppArmor policy "
400
                "change. Please refer to {} to learn how to setup AppArmor so that MozRegression "
401
                "works correctly. Missing AppArmor profile can lead to crashes or to incorrectly "
402
                "sandboxed processes.".format(
403
                    "https://mozilla.github.io/mozregression/documentation/usage.html#unprivileged-user-namespaces"  # noqa: E501
404
                )
405
            )
NEW
406
            return
4✔
407

NEW
408
        LOG.warning(
×
409
            "Unexpected exit code {} while performing user namespace "
410
            "checks. You might want to file a bug.".format(exitcode)
411
        )
412

413

414
def main(
14✔
415
    argv=None,
416
    namespace=None,
417
    check_new_version=True,
418
    mozregression_variant="console",
419
):
420
    """
421
    main entry point of mozregression command line.
422
    """
423
    # terminal color support on windows
424
    if os.name == "nt":
14✔
425
        colorama.init()
×
426

427
    config, app = None, None
14✔
428
    try:
14✔
429
        config = cli(argv=argv, namespace=namespace)
14✔
430
        if check_new_version:
14✔
431
            check_mozregression_version()
14✔
432
        config.validate()
14✔
433
        if (
14✔
434
            sys.platform
435
            in (
436
                "linux",
437
                "linux2",
438
            )
439
            and not config.options.dont_check_userns
440
        ):
441
            check_unprivileged_userns()
14✔
442
        set_http_session(get_defaults={"timeout": config.options.http_timeout})
14✔
443

444
        app = Application(config.fetch_config, config.options)
14✔
445
        send_telemetry_ping_oop(
14✔
446
            UsageMetrics(
447
                variant=mozregression_variant,
448
                appname=config.fetch_config.app_name,
449
                build_type=config.fetch_config.build_type,
450
                good=config.options.good,
451
                bad=config.options.bad,
452
                launch=config.options.launch,
453
                **get_system_info(),
454
            ),
455
            config.enable_telemetry,
456
        )
457

458
        method = getattr(app, config.action)
14✔
459
        sys.exit(method())
14✔
460

461
    except KeyboardInterrupt:
14✔
462
        sys.exit("\nInterrupted.")
14✔
463
    except (MozRegressionError, RequestException) as exc:
14✔
464
        if isinstance(exc, HTTPError) and exc.response.status_code == 401:
14✔
465
            # remove the taskcluster credential file - looks like it's wrong
466
            # anyway. This will force mozregression to ask again next time.
467
            mozfile.remove(TC_CREDENTIALS_FNAME)
×
468
        LOG.error(str(exc)) if config else sys.exit(str(exc))
14✔
469
        sys.exit(1)
14✔
470
    finally:
471
        if app:
14✔
472
            app.clear()
14✔
473

474

475
if __name__ == "__main__":
476
    main()
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc