• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

mozilla / mozregression / 14641798044

24 Apr 2025 12:38PM CUT coverage: 87.386%. First build
14641798044

Pull #1983

github

web-flow
Merge da7e48128 into 807564865
Pull Request #1983: Bug 1763188 - Add Snap support using TC builds

59 of 132 new or added lines in 4 files covered. (44.7%)

2577 of 2949 relevant lines covered (87.39%)

8.54 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

89.64
/mozregression/main.py
1
"""
2
Entry point for the mozregression command line.
3
"""
4

5
from __future__ import absolute_import
9✔
6

7
import atexit
9✔
8
import os
9✔
9
import shlex
9✔
10
import sys
9✔
11

12
import colorama
9✔
13
import mozfile
9✔
14
import requests
9✔
15
from mozlog import get_proxy_logger
9✔
16
from requests.exceptions import HTTPError, RequestException
9✔
17

18
from mozregression import __version__
9✔
19
from mozregression.approx_persist import ApproxPersistChooser
9✔
20
from mozregression.bisector import Bisection, Bisector, IntegrationHandler, NightlyHandler
9✔
21
from mozregression.bugzilla import bug_url, find_bugids_in_push
9✔
22
from mozregression.cli import cli
9✔
23
from mozregression.config import DEFAULT_EXPAND, TC_CREDENTIALS_FNAME
9✔
24
from mozregression.download_manager import BuildDownloadManager
9✔
25
from mozregression.errors import GoodBadExpectationError, MozRegressionError
9✔
26
from mozregression.fetch_build_info import IntegrationInfoFetcher, NightlyInfoFetcher
9✔
27
from mozregression.json_pushes import JsonPushes
9✔
28
from mozregression.launchers import REGISTRY as APP_REGISTRY
9✔
29
from mozregression.network import set_http_session
9✔
30
from mozregression.persist_limit import PersistLimit
9✔
31
from mozregression.telemetry import UsageMetrics, get_system_info, send_telemetry_ping_oop
9✔
32
from mozregression.tempdir import safe_mkdtemp
9✔
33
from mozregression.test_runner import CommandTestRunner, ManualTestRunner
9✔
34

35
LOG = get_proxy_logger("main")
9✔
36

37

38
class Application(object):
9✔
39
    def __init__(self, fetch_config, options):
9✔
40
        self.fetch_config = fetch_config
9✔
41
        self.options = options
9✔
42
        self._test_runner = None
9✔
43
        self._bisector = None
9✔
44
        self._build_download_manager = None
9✔
45
        self._download_dir = options.persist
9✔
46
        self._rm_download_dir = False
9✔
47
        if not options.persist:
9✔
48
            self._download_dir = safe_mkdtemp()
9✔
49
            self._rm_download_dir = True
9✔
50
        launcher_class = APP_REGISTRY.get(fetch_config.app_name)
9✔
51
        launcher_class.check_is_runnable()
9✔
52
        # init global profile if required
53
        self._global_profile = None
9✔
54
        if options.profile_persistence in ("clone-first", "reuse"):
9✔
55
            self._global_profile = launcher_class.create_profile(
×
56
                profile=options.profile,
57
                addons=options.addons,
58
                preferences=options.preferences,
59
                clone=options.profile_persistence == "clone-first",
60
            )
61
            options.cmdargs = options.cmdargs + ["--allow-downgrade"]
×
62
        elif options.profile:
9✔
63
            options.cmdargs = options.cmdargs + ["--allow-downgrade"]
9✔
64

65
    def clear(self):
9✔
66
        if self._build_download_manager:
9✔
67
            # cancel all possible downloads
68
            self._build_download_manager.cancel()
9✔
69
        if self._rm_download_dir:
9✔
70
            if self._build_download_manager:
9✔
71
                # we need to wait explicitly for downloading threads completion
72
                # here because it may remove a file in the download dir - and
73
                # in that case we could end up with a race condition when
74
                # we will remove the download dir. See
75
                # https://bugzilla.mozilla.org/show_bug.cgi?id=1231745
76
                self._build_download_manager.wait(raise_if_error=False)
9✔
77
            mozfile.remove(self._download_dir)
9✔
78
        if self._global_profile and self.options.profile_persistence == "clone-first":
9✔
79
            self._global_profile.cleanup()
×
80

81
    @property
9✔
82
    def test_runner(self):
9✔
83
        if self._test_runner is None:
9✔
84
            if self.options.command is None:
9✔
85
                self._test_runner = ManualTestRunner(
9✔
86
                    launcher_kwargs=dict(
87
                        addons=self.options.addons,
88
                        profile=self._global_profile or self.options.profile,
89
                        cmdargs=self.options.cmdargs,
90
                        preferences=self.options.preferences,
91
                        adb_profile_dir=self.options.adb_profile_dir,
92
                        allow_sudo=self.options.allow_sudo,
93
                        disable_snap_connect=self.options.disable_snap_connect,
94
                    )
95
                )
96
            else:
97
                self._test_runner = CommandTestRunner(self.options.command)
9✔
98
        return self._test_runner
9✔
99

100
    @property
9✔
101
    def bisector(self):
9✔
102
        if self._bisector is None:
9✔
103
            self._bisector = Bisector(
9✔
104
                self.fetch_config,
105
                self.test_runner,
106
                self.build_download_manager,
107
                dl_in_background=self.options.background_dl,
108
                approx_chooser=(
109
                    None if self.options.approx_policy != "auto" else ApproxPersistChooser(7)
110
                ),
111
            )
112
        return self._bisector
9✔
113

114
    @property
9✔
115
    def build_download_manager(self):
9✔
116
        if self._build_download_manager is None:
9✔
117
            background_dl_policy = self.options.background_dl_policy
9✔
118
            if not self.options.persist:
9✔
119
                # cancel background downloads forced
120
                background_dl_policy = "cancel"
9✔
121
            self._build_download_manager = BuildDownloadManager(
9✔
122
                self._download_dir,
123
                background_dl_policy=background_dl_policy,
124
                persist_limit=PersistLimit(self.options.persist_size_limit),
125
            )
126
        return self._build_download_manager
9✔
127

128
    def bisect_nightlies(self):
9✔
129
        good_date, bad_date = self.options.good, self.options.bad
9✔
130
        handler = NightlyHandler(
9✔
131
            find_fix=self.options.find_fix,
132
            ensure_good_and_bad=self.options.mode != "no-first-check",
133
        )
134
        result = self._do_bisect(handler, good_date, bad_date)
9✔
135
        if result == Bisection.FINISHED:
9✔
136
            LOG.info("Got as far as we can go bisecting nightlies...")
9✔
137
            handler.print_range()
9✔
138
            if self.fetch_config.can_go_integration():
9✔
139
                LOG.info("Switching bisection method to taskcluster")
9✔
140
                self.fetch_config.set_repo(self.fetch_config.get_nightly_repo(handler.bad_date))
9✔
141
                return self._bisect_integration(
9✔
142
                    handler.good_revision, handler.bad_revision, expand=DEFAULT_EXPAND
143
                )
144
        elif result == Bisection.USER_EXIT:
9✔
145
            self._print_resume_info(handler)
9✔
146
        else:
147
            # NO_DATA
148
            LOG.info(
9✔
149
                "Unable to get valid builds within the given"
150
                " range. You should try to launch mozregression"
151
                " again with a larger date range."
152
            )
153
            return 1
9✔
154
        return 0
9✔
155

156
    def bisect_integration(self):
9✔
157
        return self._bisect_integration(
9✔
158
            self.options.good,
159
            self.options.bad,
160
            ensure_good_and_bad=self.options.mode != "no-first-check",
161
        )
162

163
    def _bisect_integration(self, good_rev, bad_rev, ensure_good_and_bad=False, expand=0):
9✔
164
        LOG.info(
9✔
165
            "Getting %s builds between %s and %s"
166
            % (self.fetch_config.integration_branch, good_rev, bad_rev)
167
        )
168
        handler = IntegrationHandler(
9✔
169
            find_fix=self.options.find_fix, ensure_good_and_bad=ensure_good_and_bad
170
        )
171
        result = self._do_bisect(handler, good_rev, bad_rev, expand=expand)
9✔
172
        if result == Bisection.FINISHED:
9✔
173
            LOG.info("No more integration revisions, bisection finished.")
9✔
174
            handler.print_range()
9✔
175
            if handler.good_revision == handler.bad_revision:
9✔
176
                LOG.warning(
9✔
177
                    "It seems that you used two changesets that are in"
178
                    " the same push. Check the pushlog url."
179
                )
180
            elif len(handler.build_range) == 2:
×
181
                # range reduced to 2 pushes (at least ones with builds):
182
                # one good, one bad.
183
                result = handler.handle_merge()
×
184
                if result:
×
185
                    branch, good_rev, bad_rev = result
×
186
                    self.fetch_config.set_repo(branch)
×
187
                    return self._bisect_integration(good_rev, bad_rev, expand=DEFAULT_EXPAND)
×
188
                else:
189
                    # This code is broken, it prints out the message even when
190
                    # there are multiple bug numbers or commits in the range.
191
                    # Somebody should fix it before re-enabling it.
192
                    return 0
×
193
                    # print a bug if:
194
                    # (1) there really is only one bad push (and we're not
195
                    # just missing the builds for some intermediate builds)
196
                    # (2) there is only one bug number in that push
197
                    jp = JsonPushes(handler.build_range[1].repo_name)
198
                    num_pushes = len(
199
                        jp.pushes_within_changes(
200
                            handler.build_range[0].changeset,
201
                            handler.build_range[1].changeset,
202
                        )
203
                    )
204
                    if num_pushes == 2:
205
                        bugids = find_bugids_in_push(
206
                            handler.build_range[1].repo_name,
207
                            handler.build_range[1].changeset,
208
                        )
209
                        if len(bugids) == 1:
210
                            word = "fix" if handler.find_fix else "regression"
211
                            LOG.info(
212
                                "Looks like the following bug has the "
213
                                " changes which introduced the"
214
                                " {}:\n{}".format(word, bug_url(bugids[0]))
215
                            )
216
        elif result == Bisection.USER_EXIT:
9✔
217
            self._print_resume_info(handler)
9✔
218
        else:
219
            # NO_DATA. With integration branches, this can not happen if changesets
220
            # are incorrect - so builds are probably too old
221
            LOG.info(
9✔
222
                "There are no build artifacts for these changesets (they are probably too old)."
223
            )
224
            return 1
9✔
225
        return 0
9✔
226

227
    def _do_bisect(self, handler, good, bad, **kwargs):
9✔
228
        try:
9✔
229
            return self.bisector.bisect(handler, good, bad, **kwargs)
9✔
230
        except (KeyboardInterrupt, MozRegressionError, RequestException) as exc:
9✔
231
            if (
9✔
232
                handler.good_revision is not None
233
                and handler.bad_revision is not None
234
                and not isinstance(exc, GoodBadExpectationError)
235
            ):
236
                atexit.register(self._on_exit_print_resume_info, handler)
9✔
237
            raise
9✔
238

239
    def _print_resume_info(self, handler):
9✔
240
        # copy sys.argv, remove every --good/--bad/--repo related argument,
241
        # then add our own
242
        argv = sys.argv[:]
9✔
243
        args = ("--good", "--bad", "-g", "-b", "--good-rev", "--bad-rev", "--repo")
9✔
244
        indexes_to_remove = []
9✔
245
        for i, arg in enumerate(argv):
9✔
246
            if i in indexes_to_remove:
9✔
247
                continue
9✔
248
            for karg in args:
9✔
249
                if karg == arg:
9✔
250
                    # handle '--good 2015-01-01'
251
                    indexes_to_remove.extend((i, i + 1))
9✔
252
                    break
9✔
253
                elif arg.startswith(karg + "="):
9✔
254
                    # handle '--good=2015-01-01'
255
                    indexes_to_remove.append(i)
×
256
                    break
×
257
        for i in reversed(indexes_to_remove):
9✔
258
            del argv[i]
9✔
259

260
        argv.append("--repo=%s" % handler.build_range[0].repo_name)
9✔
261

262
        if hasattr(handler, "good_date"):
9✔
263
            argv.append("--good=%s" % handler.good_date)
9✔
264
            argv.append("--bad=%s" % handler.bad_date)
9✔
265
        else:
266
            argv.append("--good=%s" % handler.good_revision)
9✔
267
            argv.append("--bad=%s" % handler.bad_revision)
9✔
268

269
        LOG.info("To resume, run:")
9✔
270
        LOG.info(" ".join([shlex.quote(arg) for arg in argv]))
9✔
271

272
    def _on_exit_print_resume_info(self, handler):
9✔
273
        handler.print_range()
9✔
274
        self._print_resume_info(handler)
9✔
275

276
    def _launch(self, fetcher_class):
9✔
277
        fetcher = fetcher_class(self.fetch_config)
×
278
        build_info = fetcher.find_build_info(self.options.launch)
×
279
        self.build_download_manager.focus_download(build_info)
×
280
        self.test_runner.run_once(build_info)
×
281

282
    def launch_nightlies(self):
9✔
283
        self._launch(NightlyInfoFetcher)
×
284

285
    def launch_integration(self):
9✔
286
        self._launch(IntegrationInfoFetcher)
×
287

288

289
def pypi_latest_version():
9✔
290
    url = "https://pypi.python.org/pypi/mozregression/json"
9✔
291
    return requests.get(url, timeout=10).json()["info"]["version"]
9✔
292

293

294
def check_mozregression_version():
9✔
295
    try:
9✔
296
        mozregression_version = pypi_latest_version()
9✔
297
    except (RequestException, KeyError, ValueError):
9✔
298
        LOG.critical("Unable to get latest version from pypi.")
9✔
299
        return
9✔
300

301
    if __version__ != mozregression_version:
9✔
302
        LOG.warning(
9✔
303
            "You are using mozregression version %s, "
304
            "however version %s is available." % (__version__, mozregression_version)
305
        )
306

307
        LOG.warning(
9✔
308
            "You should consider upgrading via the 'pip install"
309
            " --upgrade mozregression' command."
310
        )
311

312

313
def main(
9✔
314
    argv=None,
315
    namespace=None,
316
    check_new_version=True,
317
    mozregression_variant="console",
318
):
319
    """
320
    main entry point of mozregression command line.
321
    """
322
    # terminal color support on windows
323
    if os.name == "nt":
9✔
324
        colorama.init()
×
325

326
    config, app = None, None
9✔
327
    try:
9✔
328
        config = cli(argv=argv, namespace=namespace)
9✔
329
        if check_new_version:
9✔
330
            check_mozregression_version()
9✔
331
        config.validate()
9✔
332
        set_http_session(get_defaults={"timeout": config.options.http_timeout})
9✔
333

334
        app = Application(config.fetch_config, config.options)
9✔
335
        send_telemetry_ping_oop(
9✔
336
            UsageMetrics(
337
                variant=mozregression_variant,
338
                appname=config.fetch_config.app_name,
339
                build_type=config.fetch_config.build_type,
340
                good=config.options.good,
341
                bad=config.options.bad,
342
                launch=config.options.launch,
343
                **get_system_info(),
344
            ),
345
            config.enable_telemetry,
346
        )
347

348
        method = getattr(app, config.action)
9✔
349
        sys.exit(method())
9✔
350

351
    except KeyboardInterrupt:
9✔
352
        sys.exit("\nInterrupted.")
9✔
353
    except (MozRegressionError, RequestException) as exc:
9✔
354
        if isinstance(exc, HTTPError) and exc.response.status_code == 401:
9✔
355
            # remove the taskcluster credential file - looks like it's wrong
356
            # anyway. This will force mozregression to ask again next time.
357
            mozfile.remove(TC_CREDENTIALS_FNAME)
×
358
        LOG.error(str(exc)) if config else sys.exit(str(exc))
9✔
359
        sys.exit(1)
9✔
360
    finally:
361
        if app:
9✔
362
            app.clear()
9✔
363

364

365
if __name__ == "__main__":
366
    main()
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc