• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

mozilla / mozregression / 11572196313

29 Oct 2024 10:42AM CUT coverage: 35.046%. First build
11572196313

Pull #1450

github

web-flow
Merge 8994ec23b into f558f7daa
Pull Request #1450: Bug 1763188 - Add Snap support using TC builds

65 of 209 new or added lines in 8 files covered. (31.1%)

1057 of 3016 relevant lines covered (35.05%)

1.05 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

22.83
/mozregression/bisector.py
1
from __future__ import absolute_import
3✔
2

3
import math
3✔
4
import os
3✔
5
import threading
3✔
6
from abc import ABCMeta, abstractmethod
3✔
7

8
from mozlog import get_proxy_logger
3✔
9

10
from mozregression.branches import find_branch_in_merge_commit, get_name
3✔
11
from mozregression.build_range import get_integration_range, get_nightly_range
3✔
12
from mozregression.dates import to_datetime
3✔
13
from mozregression.errors import (
3✔
14
    EmptyPushlogError,
15
    GoodBadExpectationError,
16
    LauncherError,
17
    MozRegressionError,
18
)
19
from mozregression.history import BisectionHistory
3✔
20
from mozregression.json_pushes import JsonPushes
3✔
21

22
LOG = get_proxy_logger("Bisector")
3✔
23

24

25
def compute_steps_left(steps):
3✔
26
    if steps <= 1:
×
27
        return 0
×
28
    return math.trunc(math.log(steps, 2))
×
29

30

31
class BisectorHandler(metaclass=ABCMeta):
3✔
32
    """
33
    React to events of a :class:`Bisector`. This is intended to be subclassed.
34

35
    A BisectorHandler keep the state of the current bisection process.
36
    """
37

38
    def __init__(self, find_fix=False, ensure_good_and_bad=False):
3✔
39
        self.find_fix = find_fix
×
40
        self.ensure_good_and_bad = ensure_good_and_bad
×
41
        self.found_repo = None
×
42
        self.build_range = None
×
43
        self.good_revision = None
×
44
        self.bad_revision = None
×
45

46
    def set_build_range(self, build_range):
3✔
47
        """
48
        Save a reference of the :class:`mozregression.build_range.BuildData`
49
        instance.
50

51
        This is called by the bisector before each step of the bisection
52
        process.
53
        """
54
        self.build_range = build_range
×
55

56
    @abstractmethod
3✔
57
    def _print_progress(self, new_data):
3✔
58
        """
59
        Log the current state of the bisection process.
60
        """
61
        raise NotImplementedError
62

63
    def _reverse_if_find_fix(self, var1, var2):
3✔
64
        return (var1, var2) if not self.find_fix else (var2, var1)
×
65

66
    def initialize(self):
3✔
67
        """
68
        Initialize some data at the beginning of each step of a bisection
69
        process.
70

71
        This will only be called if there is some build data.
72
        """
73
        # these values could be missing for old integration builds
74
        # until we tried the builds
75
        repo = self.build_range[-1].repo_url
×
76
        if repo is not None:
×
77
            # do not update repo if we can' find it now
78
            # else we may override a previously defined one
79
            self.found_repo = repo
×
80
        self.good_revision, self.bad_revision = self._reverse_if_find_fix(
×
81
            self.build_range[0].changeset, self.build_range[-1].changeset
82
        )
83

84
    def get_pushlog_url(self):
3✔
85
        first_rev, last_rev = self.get_range()
×
86
        if first_rev == last_rev:
×
87
            return "%s/pushloghtml?changeset=%s" % (self.found_repo, first_rev)
×
88
        return "%s/pushloghtml?fromchange=%s&tochange=%s" % (
×
89
            self.found_repo,
90
            first_rev,
91
            last_rev,
92
        )
93

94
    def get_range(self):
3✔
95
        return self._reverse_if_find_fix(self.good_revision, self.bad_revision)
×
96

97
    def print_range(self, good_date=None, bad_date=None, full=True):
3✔
98
        """
99
        Log the state of the current state of the bisection process, with an
100
        appropriate pushlog url.
101
        """
102
        if full:
×
103
            if good_date and bad_date:
×
104
                good_date = " (%s)" % good_date
×
105
                bad_date = " (%s)" % bad_date
×
106
            words = self._reverse_if_find_fix("Last", "First")
×
107
            LOG.info(
×
108
                "%s good revision: %s%s"
109
                % (words[0], self.good_revision, good_date if good_date else "")
110
            )
111
            LOG.info(
×
112
                "%s bad revision: %s%s"
113
                % (words[1], self.bad_revision, bad_date if bad_date else "")
114
            )
115
        LOG.info("Pushlog:\n%s\n" % self.get_pushlog_url())
×
116

117
    def build_good(self, mid, new_data):
3✔
118
        """
119
        Called by the Bisector when a build is good.
120

121
        *new_data* is ensured to contain at least two elements.
122
        """
123
        self._print_progress(new_data)
×
124

125
    def build_bad(self, mid, new_data):
3✔
126
        """
127
        Called by the Bisector when a build is bad.
128

129
        *new_data* is ensured to contain at least two elements.
130
        """
131
        self._print_progress(new_data)
×
132

133
    def build_retry(self, mid):
3✔
134
        pass
135

136
    def build_skip(self, mid):
3✔
137
        pass
138

139
    def no_data(self):
3✔
140
        pass
141

142
    def finished(self):
3✔
143
        pass
144

145
    def user_exit(self, mid):
3✔
146
        pass
147

148

149
class NightlyHandler(BisectorHandler):
3✔
150
    create_range = staticmethod(get_nightly_range)
3✔
151
    good_date = None
3✔
152
    bad_date = None
3✔
153

154
    def initialize(self):
3✔
155
        BisectorHandler.initialize(self)
×
156
        # register dates
157
        self.good_date, self.bad_date = self._reverse_if_find_fix(
×
158
            self.build_range[0].build_date, self.build_range[-1].build_date
159
        )
160

161
    def _print_progress(self, new_data):
3✔
162
        good_date, bad_date = self._reverse_if_find_fix(self.good_date, self.bad_date)
×
163
        next_good_date = new_data[0].build_date
×
164
        next_bad_date = new_data[-1].build_date
×
165
        next_days_range = abs((to_datetime(next_bad_date) - to_datetime(next_good_date)).days)
×
166
        LOG.info(
×
167
            "Narrowed nightly %s window from"
168
            " [%s, %s] (%d days) to [%s, %s] (%d days)"
169
            " (~%d steps left)"
170
            % (
171
                "fix" if self.find_fix else "regression",
172
                good_date,
173
                bad_date,
174
                abs((to_datetime(self.bad_date) - to_datetime(self.good_date)).days),
175
                next_good_date,
176
                next_bad_date,
177
                next_days_range,
178
                compute_steps_left(next_days_range),
179
            )
180
        )
181

182
    def _print_date_range(self):
3✔
183
        words = self._reverse_if_find_fix("Newest", "Oldest")
×
184
        LOG.info("%s known good nightly: %s" % (words[0], self.good_date))
×
185
        LOG.info("%s known bad nightly: %s" % (words[1], self.bad_date))
×
186

187
    def user_exit(self, mid):
3✔
188
        self._print_date_range()
×
189

190
    def are_revisions_available(self):
3✔
191
        return self.good_revision is not None and self.bad_revision is not None
×
192

193
    def get_date_range(self):
3✔
194
        return self._reverse_if_find_fix(self.good_date, self.bad_date)
×
195

196
    def print_range(self, full=True):
3✔
197
        if self.found_repo is None:
×
198
            # this may happen if we are bisecting old builds without
199
            # enough tests of the builds.
200
            LOG.error(
×
201
                "Sorry, but mozregression was unable to get"
202
                " a repository - no pushlog url available."
203
            )
204
            # still we can print date range
205
            if full:
×
206
                self._print_date_range()
×
207
        elif self.are_revisions_available():
×
208
            BisectorHandler.print_range(self, self.good_date, self.bad_date, full=full)
×
209
        else:
210
            if full:
×
211
                self._print_date_range()
×
212
            LOG.info("Pushlog:\n%s\n" % self.get_pushlog_url())
×
213

214
    def get_pushlog_url(self):
3✔
215
        assert self.found_repo
×
216
        if self.are_revisions_available():
×
217
            return BisectorHandler.get_pushlog_url(self)
×
218
        else:
219
            start, end = self.get_date_range()
×
220
            return "%s/pushloghtml?startdate=%s&enddate=%s\n" % (
×
221
                self.found_repo,
222
                start,
223
                end,
224
            )
225

226

227
class IntegrationHandler(BisectorHandler):
3✔
228
    create_range = staticmethod(get_integration_range)
3✔
229

230
    def _print_progress(self, new_data):
3✔
231
        LOG.info(
×
232
            "Narrowed integration %s window from [%s, %s]"
233
            " (%d builds) to [%s, %s] (%d builds)"
234
            " (~%d steps left)"
235
            % (
236
                "fix" if self.find_fix else "regression",
237
                self.build_range[0].short_changeset,
238
                self.build_range[-1].short_changeset,
239
                len(self.build_range),
240
                new_data[0].short_changeset,
241
                new_data[-1].short_changeset,
242
                len(new_data),
243
                compute_steps_left(len(new_data)),
244
            )
245
        )
246

247
    def user_exit(self, mid):
3✔
248
        words = self._reverse_if_find_fix("Newest", "Oldest")
×
249
        LOG.info("%s known good integration revision: %s" % (words[0], self.good_revision))
×
250
        LOG.info("%s known bad integration revision: %s" % (words[1], self.bad_revision))
×
251

252
    def _choose_integration_branch(self, changeset):
3✔
253
        """
254
        Tries to determine which integration branch the given changeset
255
        originated from by checking the date the changeset first showed up
256
        in each repo. The repo with the earliest date is chosen.
257
        """
258
        landings = {}
×
259
        for k in ("autoland", "mozilla-inbound"):
×
260
            jp = JsonPushes(k)
×
261

262
            try:
×
263
                push = jp.push(changeset, full="1")
×
264
                landings[k] = push.timestamp
×
265
            except EmptyPushlogError:
×
266
                LOG.debug("Didn't find %s in %s" % (changeset, k))
×
267

268
        repo = min(landings, key=landings.get)
×
269
        LOG.debug("Repo '%s' seems to have the earliest push" % repo)
×
270
        return repo
×
271

272
    def handle_merge(self):
3✔
273
        # let's check if we are facing a merge, and in that case,
274
        # continue the bisection from the merged branch.
275
        result = None
×
276

277
        LOG.debug("Starting merge handling...")
×
278
        # we have to check the commit of the most recent push
279
        most_recent_push = self.build_range[1]
×
280
        jp = JsonPushes(most_recent_push.repo_name)
×
281
        push = jp.push(most_recent_push.changeset, full="1")
×
282
        msg = push.changeset["desc"]
×
283
        LOG.debug("Found commit message:\n%s\n" % msg)
×
284
        branch = find_branch_in_merge_commit(msg, most_recent_push.repo_name)
×
285
        if not (branch and len(push.changesets) >= 2):
×
286
            # We did not find a branch, lets check the integration branches if we are bisecting m-c
287
            LOG.debug("Did not find a branch, checking all integration branches")
×
288
            if (
×
289
                get_name(most_recent_push.repo_name) == "mozilla-central"
290
                and len(push.changesets) >= 2
291
            ):
292
                branch = self._choose_integration_branch(most_recent_push.changeset)
×
293
                oldest = push.changesets[0]["node"]
×
294
                youngest = push.changesets[-1]["node"]
×
295
                LOG.info(
×
296
                    "************* Switching to %s by"
297
                    " process of elimination (no branch detected in"
298
                    " commit message)" % branch
299
                )
300
            else:
301
                return
×
302
        else:
303
            # so, this is a merge. see how many changesets are in it, if it
304
            # is just one, we have our answer
305
            if len(push.changesets) == 2:
×
306
                LOG.info(
×
307
                    "Merge commit has only two revisions (one of which "
308
                    "is the merge): we are done"
309
                )
310
                return
×
311

312
            # Otherwise, we can find the oldest and youngest
313
            # changesets, and the branch where the merge comes from.
314
            oldest = push.changesets[0]["node"]
×
315
            # exclude the merge commit
316
            youngest = push.changesets[-2]["node"]
×
317
            LOG.info("************* Switching to %s" % branch)
×
318

319
        # we can't use directly the oldest changeset because we
320
        # don't know yet if it is good.
321
        #
322
        # PUSH1    PUSH2
323
        # [1 2] [3 4 5 6 7]
324
        #    G    MERGE  B
325
        #
326
        # so first grab the previous push to get the last known good
327
        # changeset. This needs to be done on the right branch.
328
        try:
×
329
            jp2 = JsonPushes(branch)
×
330
            raw = [int(p.push_id) for p in jp2.pushes_within_changes(oldest, youngest)]
×
331
            data = jp2.pushes(
×
332
                startID=str(min(raw) - 2),
333
                endID=str(max(raw)),
334
            )
335

336
            older = data[0].changeset
×
337
            youngest = data[-1].changeset
×
338

339
            # we are ready to bisect further
340
            gr, br = self._reverse_if_find_fix(older, youngest)
×
341
            result = (branch, gr, br)
×
342
        except MozRegressionError:
×
343
            LOG.debug("Got exception", exc_info=True)
×
344
            raise MozRegressionError(
×
345
                "Unable to exploit the merge commit. Origin branch is {}, and"
346
                " the commit message for {} was:\n{}".format(
347
                    most_recent_push.repo_name, most_recent_push.short_changeset, msg
348
                )
349
            )
350
        LOG.debug("End merge handling")
×
351
        return result
×
352

353

354
"""
1✔
355
We are just using this to make it clear we have no merge to take care of
356
We are running an Integration because builds are triggered from cron jobs
357
on mozilla-central for all Snap package branches
358
"""
359

360

361
class SnapHandler(IntegrationHandler):
3✔
362
    snap_repo = None
3✔
363
    _build_infos = {}
3✔
364
    snap_rev = {}
3✔
365

366
    def __init__(self, **kwargs):
3✔
NEW
367
        super(IntegrationHandler, self).__init__(**kwargs)
×
368

369
    def record_build_infos(self, build_infos):
3✔
NEW
370
        self._build_infos["_changeset"] = build_infos._changeset
×
NEW
371
        self._build_infos["_repo_url"] = build_infos._repo_url
×
NEW
372
        self.snap_repo = build_infos._repo_url
×
373

374
    def update_build_infos(self, build_infos):
3✔
375
        # _build_infos here holds the mozilla-central ones,
376
        # build_infos should be the snap-specific one
NEW
377
        self.snap_rev[self._build_infos["_changeset"]] = build_infos.changeset
×
NEW
378
        self.snap_repo = build_infos._repo_url
×
379

380
    def get_pushlog_url(self):
3✔
381
        # somehow, self.found_repo from this class would not reflect
NEW
382
        first_rev, last_rev = self.get_range()
×
NEW
383
        if first_rev == last_rev:
×
NEW
384
            return "%s/pushloghtml?changeset=%s" % (self.snap_repo, first_rev)
×
NEW
385
        return "%s/pushloghtml?fromchange=%s&tochange=%s" % (
×
386
            self.snap_repo,
387
            first_rev,
388
            last_rev,
389
        )
390

391
    def revert_build_infos(self, build_infos):
3✔
NEW
392
        build_infos._changeset = self._build_infos["_changeset"]
×
NEW
393
        build_infos._repo_url = self._build_infos["_repo_url"]
×
394

395
    def handle_merge(self):
3✔
NEW
396
        return None
×
397

398

399
class IndexPromise(object):
3✔
400
    """
401
    A promise to get a build index.
402

403
    Provide a callable object that gives the next index when called.
404
    """
405

406
    def __init__(self, index, callback=None, args=()):
3✔
407
        self.thread = None
×
408
        self.index = index
×
409
        if callback:
×
410
            self.thread = threading.Thread(target=self._run, args=(callback,) + args)
×
411
            self.thread.start()
×
412

413
    def _run(self, callback, *args):
3✔
414
        self.index = callback(self.index, *args)
×
415

416
    def __call__(self):
3✔
417
        if self.thread:
×
418
            self.thread.join()
×
419
        return self.index
×
420

421

422
class Bisection(object):
3✔
423
    RUNNING = 0
3✔
424
    NO_DATA = 1
3✔
425
    FINISHED = 2
3✔
426
    USER_EXIT = 3
3✔
427

428
    def __init__(
3✔
429
        self,
430
        handler,
431
        build_range,
432
        download_manager,
433
        test_runner,
434
        dl_in_background=True,
435
        approx_chooser=None,
436
    ):
437
        self.handler = handler
×
438
        self.build_range = build_range
×
439
        self.download_manager = download_manager
×
440
        self.test_runner = test_runner
×
441
        self.dl_in_background = dl_in_background
×
442
        self.history = BisectionHistory()
×
443
        self.approx_chooser = approx_chooser
×
444

445
    def search_mid_point(self, interrupt=None):
3✔
446
        self.handler.set_build_range(self.build_range)
×
447
        return self._search_mid_point(interrupt=interrupt)
×
448

449
    def _search_mid_point(self, interrupt=None):
3✔
450
        return self.build_range.mid_point(interrupt=interrupt)
×
451

452
    def init_handler(self, mid_point):
3✔
453
        if len(self.build_range) == 0:
×
454
            self.handler.no_data()
×
455
            return self.NO_DATA
×
456

457
        self.handler.initialize()
×
458

459
        if mid_point == 0:
×
460
            self.handler.finished()
×
461
            return self.FINISHED
×
462
        return self.RUNNING
×
463

464
    def download_build(self, mid_point, allow_bg_download=True):
3✔
465
        """
466
        Download the build for the given mid_point.
467

468
        This call may start the download of next builds in background (if
469
        dl_in_background evaluates to True). Note that the mid point may
470
        change in this case.
471

472
        Returns a couple (index_promise, build_infos) where build_infos
473
        is the dict of build infos for the build.
474
        """
475
        build_infos = self.handler.build_range[mid_point]
×
476
        return self._download_build(mid_point, build_infos, allow_bg_download=allow_bg_download)
×
477

478
    def _find_approx_build(self, mid_point, build_infos):
3✔
479
        approx_index, persist_files = None, ()
×
480
        if self.approx_chooser:
×
481
            # try to find an approx build
482
            persist_files = os.listdir(self.download_manager.destdir)
×
483
            # first test if we have the exact file - if we do,
484
            # just act as usual, the downloader will take care of it.
485
            if build_infos.persist_filename not in persist_files:
×
486
                approx_index = self.approx_chooser.index(
×
487
                    self.build_range, build_infos, persist_files
488
                )
489
        if approx_index is not None:
×
490
            # we found an approx build. First, stop possible background
491
            # downloads, then update the mid point and build info.
492
            if self.download_manager.background_dl_policy == "cancel":
×
493
                self.download_manager.cancel()
×
494

495
            old_url = build_infos.build_url
×
496
            mid_point = approx_index
×
497
            build_infos = self.build_range[approx_index]
×
498
            fname = self.download_manager.get_dest(build_infos.persist_filename)
×
499
            LOG.info(
×
500
                "Using `%s` as an acceptable approximated"
501
                " build file instead of downloading %s" % (fname, old_url)
502
            )
503
            build_infos.build_file = fname
×
504
        return (approx_index is not None, mid_point, build_infos, persist_files)
×
505

506
    def _download_build(self, mid_point, build_infos, allow_bg_download=True):
3✔
507
        found, mid_point, build_infos, persist_files = self._find_approx_build(
×
508
            mid_point, build_infos
509
        )
510
        if not found and self.download_manager:
×
511
            # else, do the download. Note that nothing will
512
            # be downloaded if the exact build file is already present.
513
            self.download_manager.focus_download(build_infos)
×
514
        callback = None
×
515
        if self.dl_in_background and allow_bg_download:
×
516
            callback = self._download_next_builds
×
517
        return (IndexPromise(mid_point, callback, args=(persist_files,)), build_infos)
×
518

519
    def _download_next_builds(self, mid_point, persist_files=()):
3✔
520
        # start downloading the next builds.
521
        # note that we don't have to worry if builds are already
522
        # downloaded, or if our build infos are the same because
523
        # this will be handled by the downloadmanager.
524
        def start_dl(r):
×
525
            # first get the next mid point
526
            # this will trigger some blocking downloads
527
            # (we need to find the build info)
528
            m = r.mid_point()
×
529
            if len(r) != 0:
×
530
                # non-blocking download of the build
531
                if (
×
532
                    self.approx_chooser
533
                    and self.approx_chooser.index(r, r[m], persist_files) is not None
534
                ):
535
                    pass  # nothing to download, we have an approx build
536
                else:
537
                    self.download_manager.download_in_background(r[m])
×
538

539
        bdata = self.build_range[mid_point]
×
540
        # download next left mid point
541
        start_dl(self.build_range[mid_point:])
×
542
        # download right next mid point
543
        start_dl(self.build_range[: mid_point + 1])
×
544
        # since we called mid_point() on copy of self.build_range instance,
545
        # the underlying cache may have changed and we need to find the new
546
        # mid point.
547
        self.build_range.filter_invalid_builds()
×
548
        return self.build_range.index(bdata)
×
549

550
    def evaluate(self, build_infos):
3✔
551
        # we force getting data from app info for snap since we are building everything
552
        # out of mozilla-central
NEW
553
        if type(self.handler) is SnapHandler:
×
NEW
554
            self.handler.record_build_infos(build_infos)
×
NEW
555
            build_infos._force_update = True
×
556
        verdict = self.test_runner.evaluate(build_infos, allow_back=bool(self.history))
×
557
        # old builds do not have metadata about the repo. But once
558
        # the build is installed, we may have it
559
        if self.handler.found_repo is None:
×
560
            self.handler.found_repo = build_infos.repo_url
×
NEW
561
        if type(self.handler) is SnapHandler:
×
562
            # Some Snap nightly builds are missing SourceRepository/SourceStamp
563
            # So since we dont have a better source of information, let's get back
564
            # what we had
NEW
565
            if build_infos.repo_url is None:
×
NEW
566
                LOG.warning(
×
567
                    "Bisection on a Snap package missing SourceRepository/SourceStamp,"
568
                    " falling back to mozilla-central revs."
569
                )
NEW
570
                build_infos._force_update = False
×
NEW
571
                self.handler.revert_build_infos(build_infos)
×
572
            else:
NEW
573
                self.handler.update_build_infos(build_infos)
×
574
        return verdict
×
575

576
    def ensure_good_and_bad(self):
3✔
577
        good, bad = self.build_range[0], self.build_range[-1]
×
578
        if self.handler.find_fix:
×
579
            good, bad = bad, good
×
580

581
        LOG.info("Testing good and bad builds to ensure that they are" " really good and bad...")
×
582
        self.download_manager.focus_download(good)
×
583
        if self.dl_in_background:
×
584
            self.download_manager.download_in_background(bad)
×
585

586
        def _evaluate(build_info, expected):
×
587
            while 1:
588
                res = self.test_runner.evaluate(build_info)
×
589
                if res == expected[0]:
×
590
                    return True
×
591
                elif res == "s":
×
592
                    LOG.info("You can not skip this build.")
×
593
                elif res == "e":
×
594
                    return
×
595
                elif res == "r":
×
596
                    pass
597
                else:
598
                    raise GoodBadExpectationError(
×
599
                        "Build was expected to be %s! The initial good/bad"
600
                        " range seems incorrect." % expected
601
                    )
602

603
        if _evaluate(good, "good"):
×
604
            self.download_manager.focus_download(bad)
×
605
            if self.dl_in_background:
×
606
                # download next build (mid) in background
607
                self.download_manager.download_in_background(
×
608
                    self.build_range[self.build_range.mid_point()]
609
                )
610
            return _evaluate(bad, "bad")
×
611

612
    def handle_verdict(self, mid_point, verdict):
3✔
613
        if verdict == "g":
×
614
            # if build is good and we are looking for a regression, we
615
            # have to split from
616
            # [G, ?, ?, G, ?, B]
617
            # to
618
            #          [G, ?, B]
619
            self.history.add(self.build_range, mid_point, verdict)
×
620
            if not self.handler.find_fix:
×
621
                self.build_range = self.build_range[mid_point:]
×
622
            else:
623
                self.build_range = self.build_range[: mid_point + 1]
×
624
            self.handler.build_good(mid_point, self.build_range)
×
625
        elif verdict == "b":
×
626
            # if build is bad and we are looking for a regression, we
627
            # have to split from
628
            # [G, ?, ?, B, ?, B]
629
            # to
630
            # [G, ?, ?, B]
631
            self.history.add(self.build_range, mid_point, verdict)
×
632
            if not self.handler.find_fix:
×
633
                self.build_range = self.build_range[: mid_point + 1]
×
634
            else:
635
                self.build_range = self.build_range[mid_point:]
×
636
            self.handler.build_bad(mid_point, self.build_range)
×
637
        elif verdict == "r":
×
638
            self.handler.build_retry(mid_point)
×
639
        elif verdict == "s":
×
640
            self.handler.build_skip(mid_point)
×
641
            self.history.add(self.build_range, mid_point, verdict)
×
642
            self.build_range = self.build_range.deleted(mid_point)
×
643
        elif verdict == "back":
×
644
            self.build_range = self.history[-1].build_range
×
645
        else:
646
            # user exit
647
            self.handler.user_exit(mid_point)
×
648
            return self.USER_EXIT
×
649
        return self.RUNNING
×
650

651

652
class Bisector(object):
3✔
653
    """
654
    Handle the logic of the bisection process, and report events to a given
655
    :class:`BisectorHandler`.
656
    """
657

658
    def __init__(
3✔
659
        self,
660
        fetch_config,
661
        test_runner,
662
        download_manager,
663
        dl_in_background=True,
664
        approx_chooser=None,
665
    ):
666
        self.fetch_config = fetch_config
×
667
        self.test_runner = test_runner
×
668
        self.download_manager = download_manager
×
669
        self.dl_in_background = dl_in_background
×
670
        self.approx_chooser = approx_chooser
×
671

672
    def bisect(self, handler, good, bad, **kwargs):
3✔
673
        if handler.find_fix:
×
674
            good, bad = bad, good
×
675
        build_range = handler.create_range(self.fetch_config, good, bad, **kwargs)
×
676

677
        return self._bisect(handler, build_range)
×
678

679
    def _bisect(self, handler, build_range):
3✔
680
        """
681
        Starts a bisection for a :class:`mozregression.build_range.BuildData`.
682
        """
683

684
        bisection = Bisection(
×
685
            handler,
686
            build_range,
687
            self.download_manager,
688
            self.test_runner,
689
            dl_in_background=self.dl_in_background,
690
            approx_chooser=self.approx_chooser,
691
        )
692

693
        previous_verdict = None
×
694

695
        while True:
696
            index = bisection.search_mid_point()
×
697
            result = bisection.init_handler(index)
×
698
            if result != bisection.RUNNING:
×
699
                return result
×
700
            if previous_verdict is None and handler.ensure_good_and_bad:
×
701
                if bisection.ensure_good_and_bad():
×
702
                    LOG.info("Good and bad builds are correct. Let's" " continue the bisection.")
×
703
                else:
704
                    return bisection.USER_EXIT
×
705
            bisection.handler.print_range(full=False)
×
706

707
            if previous_verdict == "back":
×
708
                index = bisection.history.pop(-1).index
×
709

710
            allow_bg_download = True
×
711
            if previous_verdict == "s":
×
712
                # disallow background download since we are not sure of what
713
                # to download next.
714
                allow_bg_download = False
×
715
                index = self.test_runner.index_to_try_after_skip(bisection.build_range)
×
716

717
            index_promise = None
×
718
            build_info = bisection.build_range[index]
×
719
            try:
×
720
                if previous_verdict != "r" and build_info:
×
721
                    # if the last verdict was retry, do not download
722
                    # the build. Futhermore trying to download if we are
723
                    # in background download mode would stop the next builds
724
                    # from downloading.
725
                    index_promise, build_info = bisection.download_build(
×
726
                        index, allow_bg_download=allow_bg_download
727
                    )
728

729
                if not build_info:
×
730
                    LOG.info("Unable to find build info. Skipping this build...")
×
731
                    verdict = "s"
×
732
                else:
733
                    try:
×
734
                        verdict = bisection.evaluate(build_info)
×
735
                    except LauncherError as exc:
×
736
                        # we got an unrecoverable error while trying
737
                        # to run the tested app. We can just fallback
738
                        # to skip the build.
739
                        LOG.info("Error: %s. Skipping this build..." % exc)
×
740
                        verdict = "s"
×
741
            finally:
742
                # be sure to terminate the index_promise thread in all
743
                # circumstances.
744
                if index_promise:
×
745
                    index = index_promise()
×
746
            previous_verdict = verdict
×
747
            result = bisection.handle_verdict(index, verdict)
×
748
            if result != bisection.RUNNING:
×
749
                return result
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc