/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/branch.py

  • Committer: Jelmer Vernooij
  • Date: 2017-06-08 23:30:31 UTC
  • mto: This revision was merged to the branch mainline in revision 6690.
  • Revision ID: jelmer@jelmer.uk-20170608233031-3qavls2o7a1pqllj
Update imports.

Show diffs side-by-side

added added

removed removed

Lines of Context:
14
14
# along with this program; if not, write to the Free Software
15
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
 
17
from __future__ import absolute_import
 
18
 
 
19
from . import errors
 
20
 
17
21
from .lazy_import import lazy_import
18
22
lazy_import(globals(), """
19
 
import contextlib
20
23
import itertools
21
24
from breezy import (
 
25
    cleanup,
22
26
    config as _mod_config,
23
27
    debug,
24
 
    memorytree,
 
28
    fetch,
 
29
    remote,
25
30
    repository,
26
31
    revision as _mod_revision,
27
32
    tag as _mod_tag,
28
33
    transport,
29
34
    ui,
30
35
    urlutils,
31
 
    )
32
 
from breezy.bzr import (
33
 
    fetch,
34
 
    remote,
35
36
    vf_search,
36
37
    )
37
38
from breezy.i18n import gettext, ngettext
39
40
 
40
41
from . import (
41
42
    controldir,
42
 
    errors,
43
43
    registry,
44
44
    )
 
45
from .decorators import (
 
46
    needs_read_lock,
 
47
    needs_write_lock,
 
48
    only_raises,
 
49
    )
45
50
from .hooks import Hooks
46
51
from .inter import InterObject
47
52
from .lock import LogicalLockResult
48
 
from .trace import mutter, mutter_callsite, note, is_quiet, warning
49
 
 
50
 
 
51
 
class UnstackableBranchFormat(errors.BzrError):
52
 
 
53
 
    _fmt = ("The branch '%(url)s'(%(format)s) is not a stackable format. "
54
 
            "You will need to upgrade the branch to permit branch stacking.")
55
 
 
56
 
    def __init__(self, format, url):
57
 
        errors.BzrError.__init__(self)
58
 
        self.format = format
59
 
        self.url = url
 
53
from .sixish import (
 
54
    BytesIO,
 
55
    viewitems,
 
56
    )
 
57
from .trace import mutter, mutter_callsite, note, is_quiet
60
58
 
61
59
 
62
60
class Branch(controldir.ControlComponent):
79
77
 
80
78
    @property
81
79
    def user_transport(self):
82
 
        return self.controldir.user_transport
 
80
        return self.bzrdir.user_transport
83
81
 
84
82
    def __init__(self, possible_transports=None):
85
83
        self.tags = self._format.make_tags(self)
87
85
        self._revision_id_to_revno_cache = None
88
86
        self._partial_revision_id_to_revno_cache = {}
89
87
        self._partial_revision_history_cache = []
 
88
        self._tags_bytes = None
90
89
        self._last_revision_info_cache = None
91
90
        self._master_branch_cache = None
92
91
        self._merge_sorted_revisions_cache = None
149
148
        repository._iter_for_revno(
150
149
            self.repository, self._partial_revision_history_cache,
151
150
            stop_index=stop_index, stop_revision=stop_revision)
152
 
        if self._partial_revision_history_cache[-1] == \
153
 
                _mod_revision.NULL_REVISION:
 
151
        if self._partial_revision_history_cache[-1] == _mod_revision.NULL_REVISION:
154
152
            self._partial_revision_history_cache.pop()
155
153
 
156
154
    def _get_check_refs(self):
168
166
        For instance, if the branch is at URL/.bzr/branch,
169
167
        Branch.open(URL) -> a Branch instance.
170
168
        """
171
 
        control = controldir.ControlDir.open(
172
 
            base, possible_transports=possible_transports,
173
 
            _unsupported=_unsupported)
174
 
        return control.open_branch(
175
 
            unsupported=_unsupported,
 
169
        control = controldir.ControlDir.open(base,
 
170
            possible_transports=possible_transports, _unsupported=_unsupported)
 
171
        return control.open_branch(unsupported=_unsupported,
176
172
            possible_transports=possible_transports)
177
173
 
178
174
    @staticmethod
179
175
    def open_from_transport(transport, name=None, _unsupported=False,
180
 
                            possible_transports=None):
 
176
            possible_transports=None):
181
177
        """Open the branch rooted at transport"""
182
 
        control = controldir.ControlDir.open_from_transport(
183
 
            transport, _unsupported)
184
 
        return control.open_branch(
185
 
            name=name, unsupported=_unsupported,
 
178
        control = controldir.ControlDir.open_from_transport(transport, _unsupported)
 
179
        return control.open_branch(name=name, unsupported=_unsupported,
186
180
            possible_transports=possible_transports)
187
181
 
188
182
    @staticmethod
193
187
 
194
188
        Basically we keep looking up until we find the control directory or
195
189
        run into the root.  If there isn't one, raises NotBranchError.
196
 
        If there is one and it is either an unrecognised format or an
197
 
        unsupported format, UnknownFormatError or UnsupportedFormatError are
198
 
        raised.  If there is one, it is returned, along with the unused portion
199
 
        of url.
 
190
        If there is one and it is either an unrecognised format or an unsupported
 
191
        format, UnknownFormatError or UnsupportedFormatError are raised.
 
192
        If there is one, it is returned, along with the unused portion of url.
200
193
        """
201
 
        control, relpath = controldir.ControlDir.open_containing(
202
 
            url, possible_transports)
 
194
        control, relpath = controldir.ControlDir.open_containing(url,
 
195
                                                         possible_transports)
203
196
        branch = control.open_branch(possible_transports=possible_transports)
204
197
        return (branch, relpath)
205
198
 
231
224
        """
232
225
        return _mod_config.BranchStack(self)
233
226
 
 
227
    def _get_config(self):
 
228
        """Get the concrete config for just the config in this branch.
 
229
 
 
230
        This is not intended for client use; see Branch.get_config for the
 
231
        public API.
 
232
 
 
233
        Added in 1.14.
 
234
 
 
235
        :return: An object supporting get_option and set_option.
 
236
        """
 
237
        raise NotImplementedError(self._get_config)
 
238
 
234
239
    def store_uncommitted(self, creator):
235
240
        """Store uncommitted changes from a ShelfCreator.
236
241
 
254
259
        a_branch = Branch.open(url, possible_transports=possible_transports)
255
260
        return a_branch.repository
256
261
 
 
262
    @needs_read_lock
 
263
    def _get_tags_bytes(self):
 
264
        """Get the bytes of a serialised tags dict.
 
265
 
 
266
        Note that not all branches support tags, nor do all use the same tags
 
267
        logic: this method is specific to BasicTags. Other tag implementations
 
268
        may use the same method name and behave differently, safely, because
 
269
        of the double-dispatch via
 
270
        format.make_tags->tags_instance->get_tags_dict.
 
271
 
 
272
        :return: The bytes of the tags file.
 
273
        :seealso: Branch._set_tags_bytes.
 
274
        """
 
275
        if self._tags_bytes is None:
 
276
            self._tags_bytes = self._transport.get_bytes('tags')
 
277
        return self._tags_bytes
 
278
 
257
279
    def _get_nick(self, local=False, possible_transports=None):
258
280
        config = self.get_config()
259
281
        # explicit overrides master, but don't look for master if local is True
271
293
                # Silently fall back to local implicit nick if the master is
272
294
                # unavailable
273
295
                mutter("Could not connect to bound branch, "
274
 
                       "falling back to local nick.\n " + str(e))
 
296
                    "falling back to local nick.\n " + str(e))
275
297
        return config.get_nickname()
276
298
 
277
299
    def _set_nick(self, nick):
300
322
        new_history = []
301
323
        check_not_reserved_id = _mod_revision.check_not_reserved_id
302
324
        # Do not include ghosts or graph origin in revision_history
303
 
        while (current_rev_id in parents_map
304
 
               and len(parents_map[current_rev_id]) > 0):
 
325
        while (current_rev_id in parents_map and
 
326
               len(parents_map[current_rev_id]) > 0):
305
327
            check_not_reserved_id(current_rev_id)
306
328
            new_history.append(current_rev_id)
307
329
            current_rev_id = parents_map[current_rev_id][0]
335
357
    def get_physical_lock_status(self):
336
358
        raise NotImplementedError(self.get_physical_lock_status)
337
359
 
 
360
    @needs_read_lock
338
361
    def dotted_revno_to_revision_id(self, revno, _cache_reverse=False):
339
362
        """Return the revision_id for a dotted revno.
340
363
 
346
369
        :return: the revision_id
347
370
        :raises errors.NoSuchRevision: if the revno doesn't exist
348
371
        """
349
 
        with self.lock_read():
350
 
            rev_id = self._do_dotted_revno_to_revision_id(revno)
351
 
            if _cache_reverse:
352
 
                self._partial_revision_id_to_revno_cache[rev_id] = revno
353
 
            return rev_id
 
372
        rev_id = self._do_dotted_revno_to_revision_id(revno)
 
373
        if _cache_reverse:
 
374
            self._partial_revision_id_to_revno_cache[rev_id] = revno
 
375
        return rev_id
354
376
 
355
377
    def _do_dotted_revno_to_revision_id(self, revno):
356
378
        """Worker function for dotted_revno_to_revision_id.
359
381
        provide a more efficient implementation.
360
382
        """
361
383
        if len(revno) == 1:
362
 
            try:
363
 
                return self.get_rev_id(revno[0])
364
 
            except errors.RevisionNotPresent as e:
365
 
                raise errors.GhostRevisionsHaveNoRevno(revno[0], e.revision_id)
 
384
            return self.get_rev_id(revno[0])
366
385
        revision_id_to_revno = self.get_revision_id_to_revno_map()
367
386
        revision_ids = [revision_id for revision_id, this_revno
368
 
                        in revision_id_to_revno.items()
 
387
                        in viewitems(revision_id_to_revno)
369
388
                        if revno == this_revno]
370
389
        if len(revision_ids) == 1:
371
390
            return revision_ids[0]
373
392
            revno_str = '.'.join(map(str, revno))
374
393
            raise errors.NoSuchRevision(self, revno_str)
375
394
 
 
395
    @needs_read_lock
376
396
    def revision_id_to_dotted_revno(self, revision_id):
377
397
        """Given a revision id, return its dotted revno.
378
398
 
379
399
        :return: a tuple like (1,) or (400,1,3).
380
400
        """
381
 
        with self.lock_read():
382
 
            return self._do_revision_id_to_dotted_revno(revision_id)
 
401
        return self._do_revision_id_to_dotted_revno(revision_id)
383
402
 
384
403
    def _do_revision_id_to_dotted_revno(self, revision_id):
385
404
        """Worker function for revision_id_to_revno."""
402
421
                raise errors.NoSuchRevision(self, revision_id)
403
422
        return result
404
423
 
 
424
    @needs_read_lock
405
425
    def get_revision_id_to_revno_map(self):
406
426
        """Return the revision_id => dotted revno map.
407
427
 
410
430
        :return: A dictionary mapping revision_id => dotted revno.
411
431
            This dictionary should not be modified by the caller.
412
432
        """
413
 
        if 'evil' in debug.debug_flags:
414
 
            mutter_callsite(
415
 
                3, "get_revision_id_to_revno_map scales with ancestry.")
416
 
        with self.lock_read():
417
 
            if self._revision_id_to_revno_cache is not None:
418
 
                mapping = self._revision_id_to_revno_cache
419
 
            else:
420
 
                mapping = self._gen_revno_map()
421
 
                self._cache_revision_id_to_revno(mapping)
422
 
            # TODO: jam 20070417 Since this is being cached, should we be
423
 
            # returning a copy?
424
 
            # I would rather not, and instead just declare that users should
425
 
            # not modify the return value.
426
 
            return mapping
 
433
        if self._revision_id_to_revno_cache is not None:
 
434
            mapping = self._revision_id_to_revno_cache
 
435
        else:
 
436
            mapping = self._gen_revno_map()
 
437
            self._cache_revision_id_to_revno(mapping)
 
438
        # TODO: jam 20070417 Since this is being cached, should we be returning
 
439
        #       a copy?
 
440
        # I would rather not, and instead just declare that users should not
 
441
        # modify the return value.
 
442
        return mapping
427
443
 
428
444
    def _gen_revno_map(self):
429
445
        """Create a new mapping from revision ids to dotted revnos.
435
451
 
436
452
        :return: A dictionary mapping revision_id => dotted revno.
437
453
        """
438
 
        revision_id_to_revno = {
439
 
            rev_id: revno for rev_id, depth, revno, end_of_merge
440
 
            in self.iter_merge_sorted_revisions()}
 
454
        revision_id_to_revno = dict((rev_id, revno)
 
455
            for rev_id, depth, revno, end_of_merge
 
456
             in self.iter_merge_sorted_revisions())
441
457
        return revision_id_to_revno
442
458
 
 
459
    @needs_read_lock
443
460
    def iter_merge_sorted_revisions(self, start_revision_id=None,
444
 
                                    stop_revision_id=None,
445
 
                                    stop_rule='exclude', direction='reverse'):
 
461
            stop_revision_id=None, stop_rule='exclude', direction='reverse'):
446
462
        """Walk the revisions for a branch in merge sorted order.
447
463
 
448
464
        Merge sorted order is the output from a merge-aware,
460
476
            * 'include' - the stop revision is the last item in the result
461
477
            * 'with-merges' - include the stop revision and all of its
462
478
              merged revisions in the result
463
 
            * 'with-merges-without-common-ancestry' - filter out revisions
 
479
            * 'with-merges-without-common-ancestry' - filter out revisions 
464
480
              that are in both ancestries
465
481
        :param direction: either 'reverse' or 'forward':
466
482
 
485
501
            * end_of_merge: When True the next node (earlier in history) is
486
502
              part of a different merge.
487
503
        """
488
 
        with self.lock_read():
489
 
            # Note: depth and revno values are in the context of the branch so
490
 
            # we need the full graph to get stable numbers, regardless of the
491
 
            # start_revision_id.
492
 
            if self._merge_sorted_revisions_cache is None:
493
 
                last_revision = self.last_revision()
494
 
                known_graph = self.repository.get_known_graph_ancestry(
495
 
                    [last_revision])
496
 
                self._merge_sorted_revisions_cache = known_graph.merge_sort(
497
 
                    last_revision)
498
 
            filtered = self._filter_merge_sorted_revisions(
499
 
                self._merge_sorted_revisions_cache, start_revision_id,
500
 
                stop_revision_id, stop_rule)
501
 
            # Make sure we don't return revisions that are not part of the
502
 
            # start_revision_id ancestry.
503
 
            filtered = self._filter_start_non_ancestors(filtered)
504
 
            if direction == 'reverse':
505
 
                return filtered
506
 
            if direction == 'forward':
507
 
                return reversed(list(filtered))
508
 
            else:
509
 
                raise ValueError('invalid direction %r' % direction)
 
504
        # Note: depth and revno values are in the context of the branch so
 
505
        # we need the full graph to get stable numbers, regardless of the
 
506
        # start_revision_id.
 
507
        if self._merge_sorted_revisions_cache is None:
 
508
            last_revision = self.last_revision()
 
509
            known_graph = self.repository.get_known_graph_ancestry(
 
510
                [last_revision])
 
511
            self._merge_sorted_revisions_cache = known_graph.merge_sort(
 
512
                last_revision)
 
513
        filtered = self._filter_merge_sorted_revisions(
 
514
            self._merge_sorted_revisions_cache, start_revision_id,
 
515
            stop_revision_id, stop_rule)
 
516
        # Make sure we don't return revisions that are not part of the
 
517
        # start_revision_id ancestry.
 
518
        filtered = self._filter_start_non_ancestors(filtered)
 
519
        if direction == 'reverse':
 
520
            return filtered
 
521
        if direction == 'forward':
 
522
            return reversed(list(filtered))
 
523
        else:
 
524
            raise ValueError('invalid direction %r' % direction)
510
525
 
511
526
    def _filter_merge_sorted_revisions(self, merge_sorted_revisions,
512
 
                                       start_revision_id, stop_revision_id,
513
 
                                       stop_rule):
 
527
        start_revision_id, stop_revision_id, stop_rule):
514
528
        """Iterate over an inclusive range of sorted revisions."""
515
529
        rev_iter = iter(merge_sorted_revisions)
516
530
        if start_revision_id is not None:
571
585
                if rev_id == left_parent:
572
586
                    # reached the left parent after the stop_revision
573
587
                    return
574
 
                if (not reached_stop_revision_id
575
 
                        or rev_id in revision_id_whitelist):
 
588
                if (not reached_stop_revision_id or
 
589
                        rev_id in revision_id_whitelist):
576
590
                    yield (rev_id, node.merge_depth, node.revno,
577
 
                           node.end_of_merge)
 
591
                       node.end_of_merge)
578
592
                    if reached_stop_revision_id or rev_id == stop_revision_id:
579
593
                        # only do the merged revs of rev_id from now on
580
594
                        rev = self.repository.get_revision(rev_id)
590
604
        # ancestry. Given the order guaranteed by the merge sort, we will see
591
605
        # uninteresting descendants of the first parent of our tip before the
592
606
        # tip itself.
593
 
        try:
594
 
            first = next(rev_iter)
595
 
        except StopIteration:
596
 
            return
 
607
        first = next(rev_iter)
597
608
        (rev_id, merge_depth, revno, end_of_merge) = first
598
609
        yield first
599
610
        if not merge_depth:
636
647
        """Tell this branch object not to release the physical lock when this
637
648
        object is unlocked.
638
649
 
639
 
        If lock_write doesn't return a token, then this method is not
640
 
        supported.
 
650
        If lock_write doesn't return a token, then this method is not supported.
641
651
        """
642
652
        self.control_files.leave_in_place()
643
653
 
645
655
        """Tell this branch object to release the physical lock when this
646
656
        object is unlocked, even if it didn't originally acquire it.
647
657
 
648
 
        If lock_write doesn't return a token, then this method is not
649
 
        supported.
 
658
        If lock_write doesn't return a token, then this method is not supported.
650
659
        """
651
660
        self.control_files.dont_leave_in_place()
652
661
 
670
679
            raise errors.UpgradeRequired(self.user_url)
671
680
        self.get_config_stack().set('append_revisions_only', enabled)
672
681
 
673
 
    def fetch(self, from_branch, stop_revision=None, limit=None, lossy=False):
 
682
    def set_reference_info(self, file_id, tree_path, branch_location):
 
683
        """Set the branch location to use for a tree reference."""
 
684
        raise errors.UnsupportedOperation(self.set_reference_info, self)
 
685
 
 
686
    def get_reference_info(self, file_id):
 
687
        """Get the tree_path and branch_location for a tree reference."""
 
688
        raise errors.UnsupportedOperation(self.get_reference_info, self)
 
689
 
 
690
    @needs_write_lock
 
691
    def fetch(self, from_branch, last_revision=None, limit=None):
674
692
        """Copy revisions from from_branch into this branch.
675
693
 
676
694
        :param from_branch: Where to copy from.
677
 
        :param stop_revision: What revision to stop at (None for at the end
 
695
        :param last_revision: What revision to stop at (None for at the end
678
696
                              of the branch.
679
697
        :param limit: Optional rough limit of revisions to fetch
680
698
        :return: None
681
699
        """
682
 
        with self.lock_write():
683
 
            return InterBranch.get(from_branch, self).fetch(
684
 
                stop_revision, limit=limit, lossy=lossy)
 
700
        return InterBranch.get(from_branch, self).fetch(last_revision, limit=limit)
685
701
 
686
702
    def get_bound_location(self):
687
703
        """Return the URL of the branch we are bound to.
709
725
        :param revprops: Optional dictionary of revision properties.
710
726
        :param revision_id: Optional revision id.
711
727
        :param lossy: Whether to discard data that can not be natively
712
 
            represented, when pushing to a foreign VCS
 
728
            represented, when pushing to a foreign VCS 
713
729
        """
714
730
 
715
731
        if config_stack is None:
716
732
            config_stack = self.get_config_stack()
717
733
 
718
 
        return self.repository.get_commit_builder(
719
 
            self, parents, config_stack, timestamp, timezone, committer,
720
 
            revprops, revision_id, lossy)
 
734
        return self.repository.get_commit_builder(self, parents, config_stack,
 
735
            timestamp, timezone, committer, revprops, revision_id,
 
736
            lossy)
721
737
 
722
738
    def get_master_branch(self, possible_transports=None):
723
739
        """Return the branch we are bound to.
735
751
        """
736
752
        raise NotImplementedError(self.get_stacked_on_url)
737
753
 
 
754
    def print_file(self, file, revision_id):
 
755
        """Print `file` to stdout."""
 
756
        raise NotImplementedError(self.print_file)
 
757
 
 
758
    @needs_write_lock
738
759
    def set_last_revision_info(self, revno, revision_id):
739
760
        """Set the last revision of this branch.
740
761
 
748
769
        """
749
770
        raise NotImplementedError(self.set_last_revision_info)
750
771
 
 
772
    @needs_write_lock
751
773
    def generate_revision_history(self, revision_id, last_rev=None,
752
774
                                  other_branch=None):
753
775
        """See Branch.generate_revision_history"""
754
 
        with self.lock_write():
755
 
            graph = self.repository.get_graph()
756
 
            (last_revno, last_revid) = self.last_revision_info()
757
 
            known_revision_ids = [
758
 
                (last_revid, last_revno),
759
 
                (_mod_revision.NULL_REVISION, 0),
760
 
                ]
761
 
            if last_rev is not None:
762
 
                if not graph.is_ancestor(last_rev, revision_id):
763
 
                    # our previous tip is not merged into stop_revision
764
 
                    raise errors.DivergedBranches(self, other_branch)
765
 
            revno = graph.find_distance_to_null(
766
 
                revision_id, known_revision_ids)
767
 
            self.set_last_revision_info(revno, revision_id)
 
776
        graph = self.repository.get_graph()
 
777
        (last_revno, last_revid) = self.last_revision_info()
 
778
        known_revision_ids = [
 
779
            (last_revid, last_revno),
 
780
            (_mod_revision.NULL_REVISION, 0),
 
781
            ]
 
782
        if last_rev is not None:
 
783
            if not graph.is_ancestor(last_rev, revision_id):
 
784
                # our previous tip is not merged into stop_revision
 
785
                raise errors.DivergedBranches(self, other_branch)
 
786
        revno = graph.find_distance_to_null(revision_id, known_revision_ids)
 
787
        self.set_last_revision_info(revno, revision_id)
768
788
 
 
789
    @needs_write_lock
769
790
    def set_parent(self, url):
770
791
        """See Branch.set_parent."""
771
792
        # TODO: Maybe delete old location files?
773
794
        # FIXUP this and get_parent in a future branch format bump:
774
795
        # read and rewrite the file. RBC 20060125
775
796
        if url is not None:
776
 
            if isinstance(url, str):
 
797
            if isinstance(url, unicode):
777
798
                try:
778
 
                    url.encode('ascii')
 
799
                    url = url.encode('ascii')
779
800
                except UnicodeEncodeError:
780
 
                    raise urlutils.InvalidURL(
781
 
                        url, "Urls must be 7-bit ascii, "
 
801
                    raise errors.InvalidURL(url,
 
802
                        "Urls must be 7-bit ascii, "
782
803
                        "use breezy.urlutils.escape")
783
804
            url = urlutils.relative_url(self.base, url)
784
 
        with self.lock_write():
785
 
            self._set_parent_location(url)
 
805
        self._set_parent_location(url)
786
806
 
 
807
    @needs_write_lock
787
808
    def set_stacked_on_url(self, url):
788
809
        """Set the URL this branch is stacked against.
789
810
 
793
814
            stacking.
794
815
        """
795
816
        if not self._format.supports_stacking():
796
 
            raise UnstackableBranchFormat(self._format, self.user_url)
797
 
        with self.lock_write():
798
 
            # XXX: Changing from one fallback repository to another does not
799
 
            # check that all the data you need is present in the new fallback.
800
 
            # Possibly it should.
801
 
            self._check_stackable_repo()
802
 
            if not url:
803
 
                try:
804
 
                    self.get_stacked_on_url()
805
 
                except (errors.NotStacked, UnstackableBranchFormat,
806
 
                        errors.UnstackableRepositoryFormat):
807
 
                    return
808
 
                self._unstack()
809
 
            else:
810
 
                self._activate_fallback_location(
811
 
                    url, possible_transports=[self.controldir.root_transport])
812
 
            # write this out after the repository is stacked to avoid setting a
813
 
            # stacked config that doesn't work.
814
 
            self._set_config_location('stacked_on_location', url)
 
817
            raise errors.UnstackableBranchFormat(self._format, self.user_url)
 
818
        # XXX: Changing from one fallback repository to another does not check
 
819
        # that all the data you need is present in the new fallback.
 
820
        # Possibly it should.
 
821
        self._check_stackable_repo()
 
822
        if not url:
 
823
            try:
 
824
                old_url = self.get_stacked_on_url()
 
825
            except (errors.NotStacked, errors.UnstackableBranchFormat,
 
826
                errors.UnstackableRepositoryFormat):
 
827
                return
 
828
            self._unstack()
 
829
        else:
 
830
            self._activate_fallback_location(url,
 
831
                possible_transports=[self.bzrdir.root_transport])
 
832
        # write this out after the repository is stacked to avoid setting a
 
833
        # stacked config that doesn't work.
 
834
        self._set_config_location('stacked_on_location', url)
815
835
 
816
836
    def _unstack(self):
817
837
        """Change a branch to be unstacked, copying data as needed.
818
838
 
819
839
        Don't call this directly, use set_stacked_on_url(None).
820
840
        """
821
 
        with ui.ui_factory.nested_progress_bar() as pb:
 
841
        pb = ui.ui_factory.nested_progress_bar()
 
842
        try:
822
843
            pb.update(gettext("Unstacking"))
823
844
            # The basic approach here is to fetch the tip of the branch,
824
845
            # including all available ghosts, from the existing stacked
825
 
            # repository into a new repository object without the fallbacks.
 
846
            # repository into a new repository object without the fallbacks. 
826
847
            #
827
848
            # XXX: See <https://launchpad.net/bugs/397286> - this may not be
828
849
            # correct for CHKMap repostiories
829
850
            old_repository = self.repository
830
851
            if len(old_repository._fallback_repositories) != 1:
831
 
                raise AssertionError(
832
 
                    "can't cope with fallback repositories "
833
 
                    "of %r (fallbacks: %r)" % (
834
 
                        old_repository, old_repository._fallback_repositories))
 
852
                raise AssertionError("can't cope with fallback repositories "
 
853
                    "of %r (fallbacks: %r)" % (old_repository,
 
854
                        old_repository._fallback_repositories))
835
855
            # Open the new repository object.
836
856
            # Repositories don't offer an interface to remove fallback
837
857
            # repositories today; take the conceptually simpler option and just
841
861
            # separate SSH connection setup, but unstacking is not a
842
862
            # common operation so it's tolerable.
843
863
            new_bzrdir = controldir.ControlDir.open(
844
 
                self.controldir.root_transport.base)
 
864
                self.bzrdir.root_transport.base)
845
865
            new_repository = new_bzrdir.find_repository()
846
866
            if new_repository._fallback_repositories:
847
 
                raise AssertionError(
848
 
                    "didn't expect %r to have fallback_repositories"
 
867
                raise AssertionError("didn't expect %r to have "
 
868
                    "fallback_repositories"
849
869
                    % (self.repository,))
850
870
            # Replace self.repository with the new repository.
851
871
            # Do our best to transfer the lock state (i.e. lock-tokens and
878
898
            if old_lock_count == 0:
879
899
                raise AssertionError(
880
900
                    'old_repository should have been locked at least once.')
881
 
            for i in range(old_lock_count - 1):
 
901
            for i in range(old_lock_count-1):
882
902
                self.repository.lock_write()
883
903
            # Fetch from the old repository into the new.
884
 
            with old_repository.lock_read():
 
904
            old_repository.lock_read()
 
905
            try:
885
906
                # XXX: If you unstack a branch while it has a working tree
886
907
                # with a pending merge, the pending-merged revisions will no
887
908
                # longer be present.  You can (probably) revert and remerge.
889
910
                    tags_to_fetch = set(self.tags.get_reverse_tag_dict())
890
911
                except errors.TagsNotSupported:
891
912
                    tags_to_fetch = set()
892
 
                fetch_spec = vf_search.NotInOtherForRevs(
893
 
                    self.repository, old_repository,
894
 
                    required_ids=[self.last_revision()],
 
913
                fetch_spec = vf_search.NotInOtherForRevs(self.repository,
 
914
                    old_repository, required_ids=[self.last_revision()],
895
915
                    if_present_ids=tags_to_fetch, find_ghosts=True).execute()
896
916
                self.repository.fetch(old_repository, fetch_spec=fetch_spec)
 
917
            finally:
 
918
                old_repository.unlock()
 
919
        finally:
 
920
            pb.finished()
 
921
 
 
922
    def _set_tags_bytes(self, bytes):
 
923
        """Mirror method for _get_tags_bytes.
 
924
 
 
925
        :seealso: Branch._get_tags_bytes.
 
926
        """
 
927
        op = cleanup.OperationWithCleanups(self._set_tags_bytes_locked)
 
928
        op.add_cleanup(self.lock_write().unlock)
 
929
        return op.run_simple(bytes)
 
930
 
 
931
    def _set_tags_bytes_locked(self, bytes):
 
932
        self._tags_bytes = bytes
 
933
        return self._transport.put_bytes('tags', bytes)
897
934
 
898
935
    def _cache_revision_history(self, rev_history):
899
936
        """Set the cached revision history to rev_history.
930
967
        self._merge_sorted_revisions_cache = None
931
968
        self._partial_revision_history_cache = []
932
969
        self._partial_revision_id_to_revno_cache = {}
 
970
        self._tags_bytes = None
933
971
 
934
972
    def _gen_revision_history(self):
935
973
        """Return sequence of revision hashes on to this branch.
972
1010
        """Return last revision id, or NULL_REVISION."""
973
1011
        return self.last_revision_info()[1]
974
1012
 
 
1013
    @needs_read_lock
975
1014
    def last_revision_info(self):
976
1015
        """Return information about the last revision.
977
1016
 
978
1017
        :return: A tuple (revno, revision_id).
979
1018
        """
980
 
        with self.lock_read():
981
 
            if self._last_revision_info_cache is None:
982
 
                self._last_revision_info_cache = (
983
 
                    self._read_last_revision_info())
984
 
            return self._last_revision_info_cache
 
1019
        if self._last_revision_info_cache is None:
 
1020
            self._last_revision_info_cache = self._read_last_revision_info()
 
1021
        return self._last_revision_info_cache
985
1022
 
986
1023
    def _read_last_revision_info(self):
987
1024
        raise NotImplementedError(self._read_last_revision_info)
1017
1054
        except ValueError:
1018
1055
            raise errors.NoSuchRevision(self, revision_id)
1019
1056
 
 
1057
    @needs_read_lock
1020
1058
    def get_rev_id(self, revno, history=None):
1021
1059
        """Find the revision id of the specified revno."""
1022
 
        with self.lock_read():
1023
 
            if revno == 0:
1024
 
                return _mod_revision.NULL_REVISION
1025
 
            last_revno, last_revid = self.last_revision_info()
1026
 
            if revno == last_revno:
1027
 
                return last_revid
1028
 
            if revno <= 0 or revno > last_revno:
1029
 
                raise errors.NoSuchRevision(self, revno)
1030
 
            distance_from_last = last_revno - revno
1031
 
            if len(self._partial_revision_history_cache) <= distance_from_last:
1032
 
                self._extend_partial_history(distance_from_last)
1033
 
            return self._partial_revision_history_cache[distance_from_last]
 
1060
        if revno == 0:
 
1061
            return _mod_revision.NULL_REVISION
 
1062
        last_revno, last_revid = self.last_revision_info()
 
1063
        if revno == last_revno:
 
1064
            return last_revid
 
1065
        if revno <= 0 or revno > last_revno:
 
1066
            raise errors.NoSuchRevision(self, revno)
 
1067
        distance_from_last = last_revno - revno
 
1068
        if len(self._partial_revision_history_cache) <= distance_from_last:
 
1069
            self._extend_partial_history(distance_from_last)
 
1070
        return self._partial_revision_history_cache[distance_from_last]
1034
1071
 
1035
1072
    def pull(self, source, overwrite=False, stop_revision=None,
1036
1073
             possible_transports=None, *args, **kwargs):
1040
1077
 
1041
1078
        :returns: PullResult instance
1042
1079
        """
1043
 
        return InterBranch.get(source, self).pull(
1044
 
            overwrite=overwrite, stop_revision=stop_revision,
 
1080
        return InterBranch.get(source, self).pull(overwrite=overwrite,
 
1081
            stop_revision=stop_revision,
1045
1082
            possible_transports=possible_transports, *args, **kwargs)
1046
1083
 
1047
1084
    def push(self, target, overwrite=False, stop_revision=None, lossy=False,
1048
 
             *args, **kwargs):
 
1085
            *args, **kwargs):
1049
1086
        """Mirror this branch into target.
1050
1087
 
1051
1088
        This branch is considered to be 'local', having low latency.
1052
1089
        """
1053
 
        return InterBranch.get(self, target).push(
1054
 
            overwrite, stop_revision, lossy, *args, **kwargs)
 
1090
        return InterBranch.get(self, target).push(overwrite, stop_revision,
 
1091
            lossy, *args, **kwargs)
1055
1092
 
1056
1093
    def basis_tree(self):
1057
1094
        """Return `Tree` object for last revision."""
1070
1107
        # This is an old-format absolute path to a local branch
1071
1108
        # turn it into a url
1072
1109
        if parent.startswith('/'):
1073
 
            parent = urlutils.local_path_to_url(parent)
 
1110
            parent = urlutils.local_path_to_url(parent.decode('utf8'))
1074
1111
        try:
1075
1112
            return urlutils.join(self.base[:-1], parent)
1076
 
        except urlutils.InvalidURLJoin:
 
1113
        except errors.InvalidURLJoin as e:
1077
1114
            raise errors.InaccessibleParent(parent, self.user_url)
1078
1115
 
1079
1116
    def _get_parent_location(self):
1165
1202
        for hook in hooks:
1166
1203
            hook(params)
1167
1204
 
 
1205
    @needs_write_lock
1168
1206
    def update(self):
1169
1207
        """Synchronise this branch with the master branch if any.
1170
1208
 
1188
1226
        if revno < 1 or revno > self.revno():
1189
1227
            raise errors.InvalidRevisionNumber(revno)
1190
1228
 
1191
 
    def clone(self, to_controldir, revision_id=None, name=None,
1192
 
              repository_policy=None, tag_selector=None):
1193
 
        """Clone this branch into to_controldir preserving all semantic values.
 
1229
    @needs_read_lock
 
1230
    def clone(self, to_bzrdir, revision_id=None, repository_policy=None):
 
1231
        """Clone this branch into to_bzrdir preserving all semantic values.
1194
1232
 
1195
1233
        Most API users will want 'create_clone_on_transport', which creates a
1196
1234
        new bzrdir and branch on the fly.
1198
1236
        revision_id: if not None, the revision history in the new branch will
1199
1237
                     be truncated to end with revision_id.
1200
1238
        """
1201
 
        result = to_controldir.create_branch(name=name)
1202
 
        with self.lock_read(), result.lock_write():
 
1239
        result = to_bzrdir.create_branch()
 
1240
        result.lock_write()
 
1241
        try:
1203
1242
            if repository_policy is not None:
1204
1243
                repository_policy.configure_branch(result)
1205
 
            self.copy_content_into(
1206
 
                result, revision_id=revision_id, tag_selector=tag_selector)
 
1244
            self.copy_content_into(result, revision_id=revision_id)
 
1245
        finally:
 
1246
            result.unlock()
1207
1247
        return result
1208
1248
 
1209
 
    def sprout(self, to_controldir, revision_id=None, repository_policy=None,
1210
 
               repository=None, lossy=False, tag_selector=None):
1211
 
        """Create a new line of development from the branch, into to_controldir.
 
1249
    @needs_read_lock
 
1250
    def sprout(self, to_bzrdir, revision_id=None, repository_policy=None,
 
1251
            repository=None):
 
1252
        """Create a new line of development from the branch, into to_bzrdir.
1212
1253
 
1213
 
        to_controldir controls the branch format.
 
1254
        to_bzrdir controls the branch format.
1214
1255
 
1215
1256
        revision_id: if not None, the revision history in the new branch will
1216
1257
                     be truncated to end with revision_id.
1217
1258
        """
1218
 
        if (repository_policy is not None
1219
 
                and repository_policy.requires_stacking()):
1220
 
            to_controldir._format.require_stacking(_skip_repo=True)
1221
 
        result = to_controldir.create_branch(repository=repository)
1222
 
        if lossy:
1223
 
            raise errors.LossyPushToSameVCS(self, result)
1224
 
        with self.lock_read(), result.lock_write():
 
1259
        if (repository_policy is not None and
 
1260
            repository_policy.requires_stacking()):
 
1261
            to_bzrdir._format.require_stacking(_skip_repo=True)
 
1262
        result = to_bzrdir.create_branch(repository=repository)
 
1263
        result.lock_write()
 
1264
        try:
1225
1265
            if repository_policy is not None:
1226
1266
                repository_policy.configure_branch(result)
1227
 
            self.copy_content_into(
1228
 
                result, revision_id=revision_id, tag_selector=tag_selector)
 
1267
            self.copy_content_into(result, revision_id=revision_id)
1229
1268
            master_url = self.get_bound_location()
1230
1269
            if master_url is None:
1231
 
                result.set_parent(self.user_url)
 
1270
                result.set_parent(self.bzrdir.root_transport.base)
1232
1271
            else:
1233
1272
                result.set_parent(master_url)
 
1273
        finally:
 
1274
            result.unlock()
1234
1275
        return result
1235
1276
 
1236
1277
    def _synchronize_history(self, destination, revision_id):
1251
1292
        else:
1252
1293
            graph = self.repository.get_graph()
1253
1294
            try:
1254
 
                revno = graph.find_distance_to_null(
1255
 
                    revision_id, [(source_revision_id, source_revno)])
 
1295
                revno = graph.find_distance_to_null(revision_id, 
 
1296
                    [(source_revision_id, source_revno)])
1256
1297
            except errors.GhostRevisionsHaveNoRevno:
1257
1298
                # Default to 1, if we can't find anything else
1258
1299
                revno = 1
1259
1300
        destination.set_last_revision_info(revno, revision_id)
1260
1301
 
1261
 
    def copy_content_into(self, destination, revision_id=None, tag_selector=None):
 
1302
    def copy_content_into(self, destination, revision_id=None):
1262
1303
        """Copy the content of self into destination.
1263
1304
 
1264
1305
        revision_id: if not None, the revision history in the new branch will
1265
1306
                     be truncated to end with revision_id.
1266
 
        tag_selector: Optional callback that receives a tag name
1267
 
            and should return a boolean to indicate whether a tag should be copied
1268
1307
        """
1269
1308
        return InterBranch.get(self, destination).copy_content_into(
1270
 
            revision_id=revision_id, tag_selector=tag_selector)
 
1309
            revision_id=revision_id)
1271
1310
 
1272
1311
    def update_references(self, target):
1273
 
        if not self._format.supports_reference_locations:
1274
 
            return
1275
 
        return InterBranch.get(self, target).update_references()
 
1312
        if not getattr(self._format, 'supports_reference_locations', False):
 
1313
            return
 
1314
        reference_dict = self._get_all_reference_info()
 
1315
        if len(reference_dict) == 0:
 
1316
            return
 
1317
        old_base = self.base
 
1318
        new_base = target.base
 
1319
        target_reference_dict = target._get_all_reference_info()
 
1320
        for file_id, (tree_path, branch_location) in viewitems(reference_dict):
 
1321
            branch_location = urlutils.rebase_url(branch_location,
 
1322
                                                  old_base, new_base)
 
1323
            target_reference_dict.setdefault(
 
1324
                file_id, (tree_path, branch_location))
 
1325
        target._set_all_reference_info(target_reference_dict)
1276
1326
 
 
1327
    @needs_read_lock
1277
1328
    def check(self, refs):
1278
1329
        """Check consistency of the branch.
1279
1330
 
1287
1338
            branch._get_check_refs()
1288
1339
        :return: A BranchCheckResult.
1289
1340
        """
1290
 
        with self.lock_read():
1291
 
            result = BranchCheckResult(self)
1292
 
            last_revno, last_revision_id = self.last_revision_info()
1293
 
            actual_revno = refs[('lefthand-distance', last_revision_id)]
1294
 
            if actual_revno != last_revno:
1295
 
                result.errors.append(errors.BzrCheckError(
1296
 
                    'revno does not match len(mainline) %s != %s' % (
1297
 
                        last_revno, actual_revno)))
1298
 
            # TODO: We should probably also check that self.revision_history
1299
 
            # matches the repository for older branch formats.
1300
 
            # If looking for the code that cross-checks repository parents
1301
 
            # against the Graph.iter_lefthand_ancestry output, that is now a
1302
 
            # repository specific check.
1303
 
            return result
 
1341
        result = BranchCheckResult(self)
 
1342
        last_revno, last_revision_id = self.last_revision_info()
 
1343
        actual_revno = refs[('lefthand-distance', last_revision_id)]
 
1344
        if actual_revno != last_revno:
 
1345
            result.errors.append(errors.BzrCheckError(
 
1346
                'revno does not match len(mainline) %s != %s' % (
 
1347
                last_revno, actual_revno)))
 
1348
        # TODO: We should probably also check that self.revision_history
 
1349
        # matches the repository for older branch formats.
 
1350
        # If looking for the code that cross-checks repository parents against
 
1351
        # the Graph.iter_lefthand_ancestry output, that is now a repository
 
1352
        # specific check.
 
1353
        return result
1304
1354
 
1305
1355
    def _get_checkout_format(self, lightweight=False):
1306
1356
        """Return the most suitable metadir for a checkout of this branch.
1307
1357
        Weaves are used if this branch's repository uses weaves.
1308
1358
        """
1309
 
        format = self.repository.controldir.checkout_metadir()
 
1359
        format = self.repository.bzrdir.checkout_metadir()
1310
1360
        format.set_branch_format(self._format)
1311
1361
        return format
1312
1362
 
1313
1363
    def create_clone_on_transport(self, to_transport, revision_id=None,
1314
 
                                  stacked_on=None, create_prefix=False,
1315
 
                                  use_existing_dir=False, no_tree=None,
1316
 
                                  tag_selector=None):
 
1364
        stacked_on=None, create_prefix=False, use_existing_dir=False,
 
1365
        no_tree=None):
1317
1366
        """Create a clone of this branch and its bzrdir.
1318
1367
 
1319
1368
        :param to_transport: The transport to clone onto.
1326
1375
        """
1327
1376
        # XXX: Fix the bzrdir API to allow getting the branch back from the
1328
1377
        # clone call. Or something. 20090224 RBC/spiv.
1329
 
        # XXX: Should this perhaps clone colocated branches as well,
 
1378
        # XXX: Should this perhaps clone colocated branches as well, 
1330
1379
        # rather than just the default branch? 20100319 JRV
1331
1380
        if revision_id is None:
1332
1381
            revision_id = self.last_revision()
1333
 
        dir_to = self.controldir.clone_on_transport(
1334
 
            to_transport, revision_id=revision_id, stacked_on=stacked_on,
 
1382
        dir_to = self.bzrdir.clone_on_transport(to_transport,
 
1383
            revision_id=revision_id, stacked_on=stacked_on,
1335
1384
            create_prefix=create_prefix, use_existing_dir=use_existing_dir,
1336
 
            no_tree=no_tree, tag_selector=tag_selector)
 
1385
            no_tree=no_tree)
1337
1386
        return dir_to.open_branch()
1338
1387
 
1339
1388
    def create_checkout(self, to_location, revision_id=None,
1340
1389
                        lightweight=False, accelerator_tree=None,
1341
 
                        hardlink=False, recurse_nested=True):
 
1390
                        hardlink=False):
1342
1391
        """Create a checkout of a branch.
1343
1392
 
1344
1393
        :param to_location: The url to produce the checkout at
1351
1400
            content is different.
1352
1401
        :param hardlink: If true, hard-link files from accelerator_tree,
1353
1402
            where possible.
1354
 
        :param recurse_nested: Whether to recurse into nested trees
1355
1403
        :return: The tree of the created checkout
1356
1404
        """
1357
1405
        t = transport.get_transport(to_location)
1369
1417
                pass
1370
1418
            else:
1371
1419
                raise errors.AlreadyControlDirError(t.base)
1372
 
            if (checkout.control_transport.base
1373
 
                    == self.controldir.control_transport.base):
 
1420
            if checkout.control_transport.base == self.bzrdir.control_transport.base:
1374
1421
                # When checking out to the same control directory,
1375
1422
                # always create a lightweight checkout
1376
1423
                lightweight = True
1379
1426
            from_branch = checkout.set_branch_reference(target_branch=self)
1380
1427
        else:
1381
1428
            policy = checkout.determine_repository_policy()
1382
 
            policy.acquire_repository()
 
1429
            repo = policy.acquire_repository()[0]
1383
1430
            checkout_branch = checkout.create_branch()
1384
1431
            checkout_branch.bind(self)
1385
1432
            # pull up to the specified revision_id to set the initial
1391
1438
                                           accelerator_tree=accelerator_tree,
1392
1439
                                           hardlink=hardlink)
1393
1440
        basis_tree = tree.basis_tree()
1394
 
        with basis_tree.lock_read():
1395
 
            for path in basis_tree.iter_references():
1396
 
                reference_parent = tree.reference_parent(path)
1397
 
                if reference_parent is None:
1398
 
                    warning('Branch location for %s unknown.', path)
1399
 
                    continue
1400
 
                reference_parent.create_checkout(
1401
 
                    tree.abspath(path),
1402
 
                    basis_tree.get_reference_revision(path), lightweight)
 
1441
        basis_tree.lock_read()
 
1442
        try:
 
1443
            for path, file_id in basis_tree.iter_references():
 
1444
                reference_parent = self.reference_parent(file_id, path)
 
1445
                reference_parent.create_checkout(tree.abspath(path),
 
1446
                    basis_tree.get_reference_revision(file_id, path),
 
1447
                    lightweight)
 
1448
        finally:
 
1449
            basis_tree.unlock()
1403
1450
        return tree
1404
1451
 
 
1452
    @needs_write_lock
1405
1453
    def reconcile(self, thorough=True):
1406
 
        """Make sure the data stored in this branch is consistent.
1407
 
 
1408
 
        :return: A `ReconcileResult` object.
 
1454
        """Make sure the data stored in this branch is consistent."""
 
1455
        from breezy.reconcile import BranchReconciler
 
1456
        reconciler = BranchReconciler(self, thorough=thorough)
 
1457
        reconciler.reconcile()
 
1458
        return reconciler
 
1459
 
 
1460
    def reference_parent(self, file_id, path, possible_transports=None):
 
1461
        """Return the parent branch for a tree-reference file_id
 
1462
 
 
1463
        :param file_id: The file_id of the tree reference
 
1464
        :param path: The path of the file_id in the tree
 
1465
        :return: A branch associated with the file_id
1409
1466
        """
1410
 
        raise NotImplementedError(self.reconcile)
 
1467
        # FIXME should provide multiple branches, based on config
 
1468
        return Branch.open(self.bzrdir.root_transport.clone(path).base,
 
1469
                           possible_transports=possible_transports)
1411
1470
 
1412
1471
    def supports_tags(self):
1413
1472
        return self._format.supports_tags()
1481
1540
        if_present_fetch.discard(_mod_revision.NULL_REVISION)
1482
1541
        return must_fetch, if_present_fetch
1483
1542
 
1484
 
    def create_memorytree(self):
1485
 
        """Create a memory tree for this branch.
1486
 
 
1487
 
        :return: An in-memory MutableTree instance
1488
 
        """
1489
 
        return memorytree.MemoryTree.create_on_branch(self)
1490
 
 
1491
1543
 
1492
1544
class BranchFormat(controldir.ControlComponentFormat):
1493
1545
    """An encapsulation of the initialization and open routines for a format.
1594
1646
        raise NotImplementedError(self.network_name)
1595
1647
 
1596
1648
    def open(self, controldir, name=None, _found=False, ignore_fallbacks=False,
1597
 
             found_repository=None, possible_transports=None):
 
1649
            found_repository=None, possible_transports=None):
1598
1650
        """Return the branch object for controldir.
1599
1651
 
1600
1652
        :param controldir: A ControlDir that contains a branch.
1616
1668
 
1617
1669
    def supports_leaving_lock(self):
1618
1670
        """True if this format supports leaving locks in place."""
1619
 
        return False  # by default
 
1671
        return False # by default
1620
1672
 
1621
1673
    def __str__(self):
1622
1674
        return self.get_format_description().rstrip()
1633
1685
        """True if tags can reference ghost revisions."""
1634
1686
        return True
1635
1687
 
1636
 
    def supports_store_uncommitted(self):
1637
 
        """True if uncommitted changes can be stored in this branch."""
1638
 
        return True
1639
 
 
1640
 
    def stores_revno(self):
1641
 
        """True if this branch format store revision numbers."""
1642
 
        return True
1643
 
 
1644
1688
 
1645
1689
class BranchHooks(Hooks):
1646
1690
    """A dictionary mapping hook name to a list of callables for branch hooks.
1656
1700
        notified.
1657
1701
        """
1658
1702
        Hooks.__init__(self, "breezy.branch", "Branch.hooks")
1659
 
        self.add_hook(
1660
 
            'open',
 
1703
        self.add_hook('open',
1661
1704
            "Called with the Branch object that has been opened after a "
1662
1705
            "branch is opened.", (1, 8))
1663
 
        self.add_hook(
1664
 
            'post_push',
 
1706
        self.add_hook('post_push',
1665
1707
            "Called after a push operation completes. post_push is called "
1666
 
            "with a breezy.branch.BranchPushResult object and only runs in "
1667
 
            "the bzr client.", (0, 15))
1668
 
        self.add_hook(
1669
 
            'post_pull',
 
1708
            "with a breezy.branch.BranchPushResult object and only runs in the "
 
1709
            "bzr client.", (0, 15))
 
1710
        self.add_hook('post_pull',
1670
1711
            "Called after a pull operation completes. post_pull is called "
1671
1712
            "with a breezy.branch.PullResult object and only runs in the "
1672
1713
            "bzr client.", (0, 15))
1673
 
        self.add_hook(
1674
 
            'pre_commit',
 
1714
        self.add_hook('pre_commit',
1675
1715
            "Called after a commit is calculated but before it is "
1676
1716
            "completed. pre_commit is called with (local, master, old_revno, "
1677
1717
            "old_revid, future_revno, future_revid, tree_delta, future_tree"
1680
1720
            "basis revision. hooks MUST NOT modify this delta. "
1681
1721
            " future_tree is an in-memory tree obtained from "
1682
1722
            "CommitBuilder.revision_tree() and hooks MUST NOT modify this "
1683
 
            "tree.", (0, 91))
1684
 
        self.add_hook(
1685
 
            'post_commit',
 
1723
            "tree.", (0,91))
 
1724
        self.add_hook('post_commit',
1686
1725
            "Called in the bzr client after a commit has completed. "
1687
1726
            "post_commit is called with (local, master, old_revno, old_revid, "
1688
1727
            "new_revno, new_revid). old_revid is NULL_REVISION for the first "
1689
1728
            "commit to a branch.", (0, 15))
1690
 
        self.add_hook(
1691
 
            'post_uncommit',
 
1729
        self.add_hook('post_uncommit',
1692
1730
            "Called in the bzr client after an uncommit completes. "
1693
1731
            "post_uncommit is called with (local, master, old_revno, "
1694
1732
            "old_revid, new_revno, new_revid) where local is the local branch "
1695
1733
            "or None, master is the target branch, and an empty branch "
1696
1734
            "receives new_revno of 0, new_revid of None.", (0, 15))
1697
 
        self.add_hook(
1698
 
            'pre_change_branch_tip',
 
1735
        self.add_hook('pre_change_branch_tip',
1699
1736
            "Called in bzr client and server before a change to the tip of a "
1700
1737
            "branch is made. pre_change_branch_tip is called with a "
1701
1738
            "breezy.branch.ChangeBranchTipParams. Note that push, pull, "
1702
1739
            "commit, uncommit will all trigger this hook.", (1, 6))
1703
 
        self.add_hook(
1704
 
            'post_change_branch_tip',
 
1740
        self.add_hook('post_change_branch_tip',
1705
1741
            "Called in bzr client and server after a change to the tip of a "
1706
1742
            "branch is made. post_change_branch_tip is called with a "
1707
1743
            "breezy.branch.ChangeBranchTipParams. Note that push, pull, "
1708
1744
            "commit, uncommit will all trigger this hook.", (1, 4))
1709
 
        self.add_hook(
1710
 
            'transform_fallback_location',
 
1745
        self.add_hook('transform_fallback_location',
1711
1746
            "Called when a stacked branch is activating its fallback "
1712
1747
            "locations. transform_fallback_location is called with (branch, "
1713
1748
            "url), and should return a new url. Returning the same url "
1719
1754
            "multiple hooks installed for transform_fallback_location, "
1720
1755
            "all are called with the url returned from the previous hook."
1721
1756
            "The order is however undefined.", (1, 9))
1722
 
        self.add_hook(
1723
 
            'automatic_tag_name',
 
1757
        self.add_hook('automatic_tag_name',
1724
1758
            "Called to determine an automatic tag name for a revision. "
1725
1759
            "automatic_tag_name is called with (branch, revision_id) and "
1726
1760
            "should return a tag name or None if no tag name could be "
1727
1761
            "determined. The first non-None tag name returned will be used.",
1728
1762
            (2, 2))
1729
 
        self.add_hook(
1730
 
            'post_branch_init',
 
1763
        self.add_hook('post_branch_init',
1731
1764
            "Called after new branch initialization completes. "
1732
1765
            "post_branch_init is called with a "
1733
1766
            "breezy.branch.BranchInitHookParams. "
1734
1767
            "Note that init, branch and checkout (both heavyweight and "
1735
1768
            "lightweight) will all trigger this hook.", (2, 2))
1736
 
        self.add_hook(
1737
 
            'post_switch',
 
1769
        self.add_hook('post_switch',
1738
1770
            "Called after a checkout switches branch. "
1739
1771
            "post_switch is called with a "
1740
1772
            "breezy.branch.SwitchHookParams.", (2, 2))
1741
1773
 
1742
1774
 
 
1775
 
1743
1776
# install the default hooks into the Branch class.
1744
1777
Branch.hooks = BranchHooks()
1745
1778
 
1811
1844
        in branch, which refer to the original branch.
1812
1845
        """
1813
1846
        self.format = format
1814
 
        self.controldir = controldir
 
1847
        self.bzrdir = controldir
1815
1848
        self.name = name
1816
1849
        self.branch = branch
1817
1850
 
1850
1883
        return self.__dict__ == other.__dict__
1851
1884
 
1852
1885
    def __repr__(self):
1853
 
        return "<%s for %s to (%s, %s)>" % (
1854
 
            self.__class__.__name__, self.control_dir, self.to_branch,
 
1886
        return "<%s for %s to (%s, %s)>" % (self.__class__.__name__,
 
1887
            self.control_dir, self.to_branch,
1855
1888
            self.revision_id)
1856
1889
 
1857
1890
 
1865
1898
 
1866
1899
    def get_default(self):
1867
1900
        """Return the current default format."""
1868
 
        if (self._default_format_key is not None
1869
 
                and self._default_format is None):
 
1901
        if (self._default_format_key is not None and
 
1902
            self._default_format is None):
1870
1903
            self._default_format = self.get(self._default_format_key)
1871
1904
        return self._default_format
1872
1905
 
1895
1928
# formats which have no format string are not discoverable
1896
1929
# and not independently creatable, so are not registered.
1897
1930
format_registry.register_lazy(
1898
 
    b"Bazaar-NG branch format 5\n", "breezy.bzr.fullhistory",
 
1931
    "Bazaar-NG branch format 5\n", "breezy.bzr.fullhistory",
1899
1932
    "BzrBranchFormat5")
1900
1933
format_registry.register_lazy(
1901
 
    b"Bazaar Branch Format 6 (bzr 0.15)\n",
 
1934
    "Bazaar Branch Format 6 (bzr 0.15)\n",
1902
1935
    "breezy.bzr.branch", "BzrBranchFormat6")
1903
1936
format_registry.register_lazy(
1904
 
    b"Bazaar Branch Format 7 (needs bzr 1.6)\n",
 
1937
    "Bazaar Branch Format 7 (needs bzr 1.6)\n",
1905
1938
    "breezy.bzr.branch", "BzrBranchFormat7")
1906
1939
format_registry.register_lazy(
1907
 
    b"Bazaar Branch Format 8 (needs bzr 1.15)\n",
 
1940
    "Bazaar Branch Format 8 (needs bzr 1.15)\n",
1908
1941
    "breezy.bzr.branch", "BzrBranchFormat8")
1909
1942
format_registry.register_lazy(
1910
 
    b"Bazaar-NG Branch Reference Format 1\n",
 
1943
    "Bazaar-NG Branch Reference Format 1\n",
1911
1944
    "breezy.bzr.branch", "BranchReferenceFormat")
1912
1945
 
1913
 
format_registry.set_default_key(b"Bazaar Branch Format 7 (needs bzr 1.6)\n")
 
1946
format_registry.set_default_key("Bazaar Branch Format 7 (needs bzr 1.6)\n")
1914
1947
 
1915
1948
 
1916
1949
class BranchWriteLockResult(LogicalLockResult):
1917
1950
    """The result of write locking a branch.
1918
1951
 
1919
 
    :ivar token: The token obtained from the underlying branch lock, or
 
1952
    :ivar branch_token: The token obtained from the underlying branch lock, or
1920
1953
        None.
1921
1954
    :ivar unlock: A callable which will unlock the lock.
1922
1955
    """
1923
1956
 
 
1957
    def __init__(self, unlock, branch_token):
 
1958
        LogicalLockResult.__init__(self, unlock)
 
1959
        self.branch_token = branch_token
 
1960
 
1924
1961
    def __repr__(self):
1925
 
        return "BranchWriteLockResult(%r, %r)" % (self.unlock, self.token)
 
1962
        return "BranchWriteLockResult(%s, %s)" % (self.branch_token,
 
1963
            self.unlock)
1926
1964
 
1927
1965
 
1928
1966
######################################################################
1999
2037
        tag_updates = getattr(self, "tag_updates", None)
2000
2038
        if not is_quiet():
2001
2039
            if self.old_revid != self.new_revid:
2002
 
                if self.new_revno is not None:
2003
 
                    note(gettext('Pushed up to revision %d.'),
2004
 
                         self.new_revno)
2005
 
                else:
2006
 
                    note(gettext('Pushed up to revision id %s.'),
2007
 
                         self.new_revid.decode('utf-8'))
 
2040
                note(gettext('Pushed up to revision %d.') % self.new_revno)
2008
2041
            if tag_updates:
2009
 
                note(ngettext('%d tag updated.', '%d tags updated.',
2010
 
                              len(tag_updates)) % len(tag_updates))
 
2042
                note(ngettext('%d tag updated.', '%d tags updated.', len(tag_updates)) % len(tag_updates))
2011
2043
            if self.old_revid == self.new_revid and not tag_updates:
2012
2044
                if not tag_conflicts:
2013
2045
                    note(gettext('No new revisions or tags to push.'))
2033
2065
            if any.
2034
2066
        """
2035
2067
        note(gettext('checked branch {0} format {1}').format(
2036
 
            self.branch.user_url, self.branch._format))
 
2068
                                self.branch.user_url, self.branch._format))
2037
2069
        for error in self.errors:
2038
2070
            note(gettext('found error:%s'), error)
2039
2071
 
2052
2084
    @classmethod
2053
2085
    def _get_branch_formats_to_test(klass):
2054
2086
        """Return an iterable of format tuples for testing.
2055
 
 
 
2087
        
2056
2088
        :return: An iterable of (from_format, to_format) to use when testing
2057
2089
            this InterBranch class. Each InterBranch class should define this
2058
2090
            method itself.
2059
2091
        """
2060
2092
        raise NotImplementedError(klass._get_branch_formats_to_test)
2061
2093
 
 
2094
    @needs_write_lock
2062
2095
    def pull(self, overwrite=False, stop_revision=None,
2063
 
             possible_transports=None, local=False, tag_selector=None):
 
2096
             possible_transports=None, local=False):
2064
2097
        """Mirror source into target branch.
2065
2098
 
2066
2099
        The target branch is considered to be 'local', having low latency.
2069
2102
        """
2070
2103
        raise NotImplementedError(self.pull)
2071
2104
 
 
2105
    @needs_write_lock
2072
2106
    def push(self, overwrite=False, stop_revision=None, lossy=False,
2073
 
             _override_hook_source_branch=None, tag_selector=None):
 
2107
             _override_hook_source_branch=None):
2074
2108
        """Mirror the source branch into the target branch.
2075
2109
 
2076
2110
        The source branch is considered to be 'local', having low latency.
2077
2111
        """
2078
2112
        raise NotImplementedError(self.push)
2079
2113
 
2080
 
    def copy_content_into(self, revision_id=None, tag_selector=None):
 
2114
    @needs_write_lock
 
2115
    def copy_content_into(self, revision_id=None):
2081
2116
        """Copy the content of source into target
2082
2117
 
2083
 
        :param revision_id:
2084
 
            if not None, the revision history in the new branch will
2085
 
            be truncated to end with revision_id.
2086
 
        :param tag_selector: Optional callback that can decide
2087
 
            to copy or not copy tags.
 
2118
        revision_id: if not None, the revision history in the new branch will
 
2119
                     be truncated to end with revision_id.
2088
2120
        """
2089
2121
        raise NotImplementedError(self.copy_content_into)
2090
2122
 
2091
 
    def fetch(self, stop_revision=None, limit=None, lossy=False):
 
2123
    @needs_write_lock
 
2124
    def fetch(self, stop_revision=None, limit=None):
2092
2125
        """Fetch revisions.
2093
2126
 
2094
2127
        :param stop_revision: Last revision to fetch
2095
2128
        :param limit: Optional rough limit of revisions to fetch
2096
 
        :return: FetchResult object
2097
2129
        """
2098
2130
        raise NotImplementedError(self.fetch)
2099
2131
 
2100
 
    def update_references(self):
2101
 
        """Import reference information from source to target.
2102
 
        """
2103
 
        raise NotImplementedError(self.update_references)
2104
 
 
2105
2132
 
2106
2133
def _fix_overwrite_type(overwrite):
2107
2134
    if isinstance(overwrite, bool):
2131
2158
            return format._custom_format
2132
2159
        return format
2133
2160
 
2134
 
    def copy_content_into(self, revision_id=None, tag_selector=None):
 
2161
    @needs_write_lock
 
2162
    def copy_content_into(self, revision_id=None):
2135
2163
        """Copy the content of source into target
2136
2164
 
2137
2165
        revision_id: if not None, the revision history in the new branch will
2138
2166
                     be truncated to end with revision_id.
2139
2167
        """
2140
 
        with self.source.lock_read(), self.target.lock_write():
2141
 
            self.source._synchronize_history(self.target, revision_id)
2142
 
            self.update_references()
2143
 
            try:
2144
 
                parent = self.source.get_parent()
2145
 
            except errors.InaccessibleParent as e:
2146
 
                mutter('parent was not accessible to copy: %s', str(e))
2147
 
            else:
2148
 
                if parent:
2149
 
                    self.target.set_parent(parent)
2150
 
            if self.source._push_should_merge_tags():
2151
 
                self.source.tags.merge_to(self.target.tags, selector=tag_selector)
 
2168
        self.source.update_references(self.target)
 
2169
        self.source._synchronize_history(self.target, revision_id)
 
2170
        try:
 
2171
            parent = self.source.get_parent()
 
2172
        except errors.InaccessibleParent as e:
 
2173
            mutter('parent was not accessible to copy: %s', e)
 
2174
        else:
 
2175
            if parent:
 
2176
                self.target.set_parent(parent)
 
2177
        if self.source._push_should_merge_tags():
 
2178
            self.source.tags.merge_to(self.target.tags)
2152
2179
 
2153
 
    def fetch(self, stop_revision=None, limit=None, lossy=False):
 
2180
    @needs_write_lock
 
2181
    def fetch(self, stop_revision=None, limit=None):
2154
2182
        if self.target.base == self.source.base:
2155
2183
            return (0, [])
2156
 
        with self.source.lock_read(), self.target.lock_write():
 
2184
        self.source.lock_read()
 
2185
        try:
2157
2186
            fetch_spec_factory = fetch.FetchSpecFactory()
2158
2187
            fetch_spec_factory.source_branch = self.source
2159
2188
            fetch_spec_factory.source_branch_stop_revision_id = stop_revision
2160
2189
            fetch_spec_factory.source_repo = self.source.repository
2161
2190
            fetch_spec_factory.target_repo = self.target.repository
2162
 
            fetch_spec_factory.target_repo_kind = (
2163
 
                fetch.TargetRepoKinds.PREEXISTING)
 
2191
            fetch_spec_factory.target_repo_kind = fetch.TargetRepoKinds.PREEXISTING
2164
2192
            fetch_spec_factory.limit = limit
2165
2193
            fetch_spec = fetch_spec_factory.make_fetch_spec()
2166
 
            return self.target.repository.fetch(
2167
 
                self.source.repository,
2168
 
                lossy=lossy,
 
2194
            return self.target.repository.fetch(self.source.repository,
2169
2195
                fetch_spec=fetch_spec)
 
2196
        finally:
 
2197
            self.source.unlock()
2170
2198
 
 
2199
    @needs_write_lock
2171
2200
    def _update_revisions(self, stop_revision=None, overwrite=False,
2172
 
                          graph=None):
2173
 
        with self.source.lock_read(), self.target.lock_write():
2174
 
            other_revno, other_last_revision = self.source.last_revision_info()
2175
 
            stop_revno = None  # unknown
2176
 
            if stop_revision is None:
2177
 
                stop_revision = other_last_revision
2178
 
                if _mod_revision.is_null(stop_revision):
2179
 
                    # if there are no commits, we're done.
2180
 
                    return
2181
 
                stop_revno = other_revno
 
2201
            graph=None):
 
2202
        other_revno, other_last_revision = self.source.last_revision_info()
 
2203
        stop_revno = None # unknown
 
2204
        if stop_revision is None:
 
2205
            stop_revision = other_last_revision
 
2206
            if _mod_revision.is_null(stop_revision):
 
2207
                # if there are no commits, we're done.
 
2208
                return
 
2209
            stop_revno = other_revno
2182
2210
 
2183
 
            # what's the current last revision, before we fetch [and change it
2184
 
            # possibly]
2185
 
            last_rev = _mod_revision.ensure_null(self.target.last_revision())
2186
 
            # we fetch here so that we don't process data twice in the common
2187
 
            # case of having something to pull, and so that the check for
2188
 
            # already merged can operate on the just fetched graph, which will
2189
 
            # be cached in memory.
2190
 
            self.fetch(stop_revision=stop_revision)
2191
 
            # Check to see if one is an ancestor of the other
2192
 
            if not overwrite:
2193
 
                if graph is None:
2194
 
                    graph = self.target.repository.get_graph()
2195
 
                if self.target._check_if_descendant_or_diverged(
2196
 
                        stop_revision, last_rev, graph, self.source):
2197
 
                    # stop_revision is a descendant of last_rev, but we aren't
2198
 
                    # overwriting, so we're done.
2199
 
                    return
2200
 
            if stop_revno is None:
2201
 
                if graph is None:
2202
 
                    graph = self.target.repository.get_graph()
2203
 
                this_revno, this_last_revision = \
 
2211
        # what's the current last revision, before we fetch [and change it
 
2212
        # possibly]
 
2213
        last_rev = _mod_revision.ensure_null(self.target.last_revision())
 
2214
        # we fetch here so that we don't process data twice in the common
 
2215
        # case of having something to pull, and so that the check for
 
2216
        # already merged can operate on the just fetched graph, which will
 
2217
        # be cached in memory.
 
2218
        self.fetch(stop_revision=stop_revision)
 
2219
        # Check to see if one is an ancestor of the other
 
2220
        if not overwrite:
 
2221
            if graph is None:
 
2222
                graph = self.target.repository.get_graph()
 
2223
            if self.target._check_if_descendant_or_diverged(
 
2224
                    stop_revision, last_rev, graph, self.source):
 
2225
                # stop_revision is a descendant of last_rev, but we aren't
 
2226
                # overwriting, so we're done.
 
2227
                return
 
2228
        if stop_revno is None:
 
2229
            if graph is None:
 
2230
                graph = self.target.repository.get_graph()
 
2231
            this_revno, this_last_revision = \
2204
2232
                    self.target.last_revision_info()
2205
 
                stop_revno = graph.find_distance_to_null(
2206
 
                    stop_revision, [(other_last_revision, other_revno),
2207
 
                                    (this_last_revision, this_revno)])
2208
 
            self.target.set_last_revision_info(stop_revno, stop_revision)
 
2233
            stop_revno = graph.find_distance_to_null(stop_revision,
 
2234
                            [(other_last_revision, other_revno),
 
2235
                             (this_last_revision, this_revno)])
 
2236
        self.target.set_last_revision_info(stop_revno, stop_revision)
2209
2237
 
 
2238
    @needs_write_lock
2210
2239
    def pull(self, overwrite=False, stop_revision=None,
2211
2240
             possible_transports=None, run_hooks=True,
2212
 
             _override_hook_target=None, local=False,
2213
 
             tag_selector=None):
 
2241
             _override_hook_target=None, local=False):
2214
2242
        """Pull from source into self, updating my master if any.
2215
2243
 
2216
2244
        :param run_hooks: Private parameter - if false, this branch
2217
2245
            is being called because it's the master of the primary branch,
2218
2246
            so it should not run its hooks.
2219
2247
        """
2220
 
        with contextlib.ExitStack() as exit_stack:
2221
 
            exit_stack.enter_context(self.target.lock_write())
2222
 
            bound_location = self.target.get_bound_location()
2223
 
            if local and not bound_location:
2224
 
                raise errors.LocalRequiresBoundBranch()
2225
 
            master_branch = None
2226
 
            source_is_master = False
2227
 
            if bound_location:
2228
 
                # bound_location comes from a config file, some care has to be
2229
 
                # taken to relate it to source.user_url
2230
 
                normalized = urlutils.normalize_url(bound_location)
2231
 
                try:
2232
 
                    relpath = self.source.user_transport.relpath(normalized)
2233
 
                    source_is_master = (relpath == '')
2234
 
                except (errors.PathNotChild, urlutils.InvalidURL):
2235
 
                    source_is_master = False
2236
 
            if not local and bound_location and not source_is_master:
2237
 
                # not pulling from master, so we need to update master.
2238
 
                master_branch = self.target.get_master_branch(
2239
 
                    possible_transports)
2240
 
                exit_stack.enter_context(master_branch.lock_write())
 
2248
        bound_location = self.target.get_bound_location()
 
2249
        if local and not bound_location:
 
2250
            raise errors.LocalRequiresBoundBranch()
 
2251
        master_branch = None
 
2252
        source_is_master = False
 
2253
        if bound_location:
 
2254
            # bound_location comes from a config file, some care has to be
 
2255
            # taken to relate it to source.user_url
 
2256
            normalized = urlutils.normalize_url(bound_location)
 
2257
            try:
 
2258
                relpath = self.source.user_transport.relpath(normalized)
 
2259
                source_is_master = (relpath == '')
 
2260
            except (errors.PathNotChild, errors.InvalidURL):
 
2261
                source_is_master = False
 
2262
        if not local and bound_location and not source_is_master:
 
2263
            # not pulling from master, so we need to update master.
 
2264
            master_branch = self.target.get_master_branch(possible_transports)
 
2265
            master_branch.lock_write()
 
2266
        try:
2241
2267
            if master_branch:
2242
2268
                # pull from source into master.
2243
 
                master_branch.pull(
2244
 
                    self.source, overwrite, stop_revision, run_hooks=False,
2245
 
                    tag_selector=tag_selector)
2246
 
            return self._pull(
2247
 
                overwrite, stop_revision, _hook_master=master_branch,
 
2269
                master_branch.pull(self.source, overwrite, stop_revision,
 
2270
                    run_hooks=False)
 
2271
            return self._pull(overwrite,
 
2272
                stop_revision, _hook_master=master_branch,
2248
2273
                run_hooks=run_hooks,
2249
2274
                _override_hook_target=_override_hook_target,
2250
 
                merge_tags_to_master=not source_is_master,
2251
 
                tag_selector=tag_selector)
 
2275
                merge_tags_to_master=not source_is_master)
 
2276
        finally:
 
2277
            if master_branch:
 
2278
                master_branch.unlock()
2252
2279
 
2253
2280
    def push(self, overwrite=False, stop_revision=None, lossy=False,
2254
 
             _override_hook_source_branch=None, tag_selector=None):
 
2281
             _override_hook_source_branch=None):
2255
2282
        """See InterBranch.push.
2256
2283
 
2257
2284
        This is the basic concrete implementation of push()
2266
2293
        # TODO: Public option to disable running hooks - should be trivial but
2267
2294
        # needs tests.
2268
2295
 
2269
 
        def _run_hooks():
2270
 
            if _override_hook_source_branch:
2271
 
                result.source_branch = _override_hook_source_branch
2272
 
            for hook in Branch.hooks['post_push']:
2273
 
                hook(result)
2274
 
 
2275
 
        with self.source.lock_read(), self.target.lock_write():
2276
 
            bound_location = self.target.get_bound_location()
2277
 
            if bound_location and self.target.base != bound_location:
2278
 
                # there is a master branch.
2279
 
                #
2280
 
                # XXX: Why the second check?  Is it even supported for a branch
2281
 
                # to be bound to itself? -- mbp 20070507
2282
 
                master_branch = self.target.get_master_branch()
2283
 
                with master_branch.lock_write():
2284
 
                    # push into the master from the source branch.
2285
 
                    master_inter = InterBranch.get(self.source, master_branch)
2286
 
                    master_inter._basic_push(
2287
 
                        overwrite, stop_revision, tag_selector=tag_selector)
2288
 
                    # and push into the target branch from the source. Note
2289
 
                    # that we push from the source branch again, because it's
2290
 
                    # considered the highest bandwidth repository.
2291
 
                    result = self._basic_push(
2292
 
                        overwrite, stop_revision, tag_selector=tag_selector)
2293
 
                    result.master_branch = master_branch
2294
 
                    result.local_branch = self.target
2295
 
                    _run_hooks()
2296
 
            else:
2297
 
                master_branch = None
2298
 
                # no master branch
2299
 
                result = self._basic_push(
2300
 
                    overwrite, stop_revision, tag_selector=tag_selector)
2301
 
                # TODO: Why set master_branch and local_branch if there's no
2302
 
                # binding?  Maybe cleaner to just leave them unset? -- mbp
2303
 
                # 20070504
2304
 
                result.master_branch = self.target
2305
 
                result.local_branch = None
2306
 
                _run_hooks()
2307
 
            return result
2308
 
 
2309
 
    def _basic_push(self, overwrite, stop_revision, tag_selector=None):
 
2296
        op = cleanup.OperationWithCleanups(self._push_with_bound_branches)
 
2297
        op.add_cleanup(self.source.lock_read().unlock)
 
2298
        op.add_cleanup(self.target.lock_write().unlock)
 
2299
        return op.run(overwrite, stop_revision,
 
2300
            _override_hook_source_branch=_override_hook_source_branch)
 
2301
 
 
2302
    def _basic_push(self, overwrite, stop_revision):
2310
2303
        """Basic implementation of push without bound branches or hooks.
2311
2304
 
2312
2305
        Must be called with source read locked and target write locked.
2315
2308
        result.source_branch = self.source
2316
2309
        result.target_branch = self.target
2317
2310
        result.old_revno, result.old_revid = self.target.last_revision_info()
 
2311
        self.source.update_references(self.target)
2318
2312
        overwrite = _fix_overwrite_type(overwrite)
2319
2313
        if result.old_revid != stop_revision:
2320
2314
            # We assume that during 'push' this repository is closer than
2321
2315
            # the target.
2322
2316
            graph = self.source.repository.get_graph(self.target.repository)
2323
 
            self._update_revisions(
2324
 
                stop_revision, overwrite=("history" in overwrite), graph=graph)
 
2317
            self._update_revisions(stop_revision,
 
2318
                overwrite=("history" in overwrite),
 
2319
                graph=graph)
2325
2320
        if self.source._push_should_merge_tags():
2326
2321
            result.tag_updates, result.tag_conflicts = (
2327
2322
                self.source.tags.merge_to(
2328
 
                    self.target.tags, "tags" in overwrite, selector=tag_selector))
2329
 
        self.update_references()
 
2323
                self.target.tags, "tags" in overwrite))
2330
2324
        result.new_revno, result.new_revid = self.target.last_revision_info()
2331
2325
        return result
2332
2326
 
 
2327
    def _push_with_bound_branches(self, operation, overwrite, stop_revision,
 
2328
            _override_hook_source_branch=None):
 
2329
        """Push from source into target, and into target's master if any.
 
2330
        """
 
2331
        def _run_hooks():
 
2332
            if _override_hook_source_branch:
 
2333
                result.source_branch = _override_hook_source_branch
 
2334
            for hook in Branch.hooks['post_push']:
 
2335
                hook(result)
 
2336
 
 
2337
        bound_location = self.target.get_bound_location()
 
2338
        if bound_location and self.target.base != bound_location:
 
2339
            # there is a master branch.
 
2340
            #
 
2341
            # XXX: Why the second check?  Is it even supported for a branch to
 
2342
            # be bound to itself? -- mbp 20070507
 
2343
            master_branch = self.target.get_master_branch()
 
2344
            master_branch.lock_write()
 
2345
            operation.add_cleanup(master_branch.unlock)
 
2346
            # push into the master from the source branch.
 
2347
            master_inter = InterBranch.get(self.source, master_branch)
 
2348
            master_inter._basic_push(overwrite, stop_revision)
 
2349
            # and push into the target branch from the source. Note that
 
2350
            # we push from the source branch again, because it's considered
 
2351
            # the highest bandwidth repository.
 
2352
            result = self._basic_push(overwrite, stop_revision)
 
2353
            result.master_branch = master_branch
 
2354
            result.local_branch = self.target
 
2355
        else:
 
2356
            master_branch = None
 
2357
            # no master branch
 
2358
            result = self._basic_push(overwrite, stop_revision)
 
2359
            # TODO: Why set master_branch and local_branch if there's no
 
2360
            # binding?  Maybe cleaner to just leave them unset? -- mbp
 
2361
            # 20070504
 
2362
            result.master_branch = self.target
 
2363
            result.local_branch = None
 
2364
        _run_hooks()
 
2365
        return result
 
2366
 
2333
2367
    def _pull(self, overwrite=False, stop_revision=None,
2334
 
              possible_transports=None, _hook_master=None, run_hooks=True,
2335
 
              _override_hook_target=None, local=False,
2336
 
              merge_tags_to_master=True, tag_selector=None):
 
2368
             possible_transports=None, _hook_master=None, run_hooks=True,
 
2369
             _override_hook_target=None, local=False,
 
2370
             merge_tags_to_master=True):
2337
2371
        """See Branch.pull.
2338
2372
 
2339
2373
        This function is the core worker, used by GenericInterBranch.pull to
2359
2393
            result.target_branch = self.target
2360
2394
        else:
2361
2395
            result.target_branch = _override_hook_target
2362
 
        with self.source.lock_read():
 
2396
        self.source.lock_read()
 
2397
        try:
2363
2398
            # We assume that during 'pull' the target repository is closer than
2364
2399
            # the source one.
 
2400
            self.source.update_references(self.target)
2365
2401
            graph = self.target.repository.get_graph(self.source.repository)
2366
 
            # TODO: Branch formats should have a flag that indicates
 
2402
            # TODO: Branch formats should have a flag that indicates 
2367
2403
            # that revno's are expensive, and pull() should honor that flag.
2368
2404
            # -- JRV20090506
2369
2405
            result.old_revno, result.old_revid = \
2370
2406
                self.target.last_revision_info()
2371
2407
            overwrite = _fix_overwrite_type(overwrite)
2372
 
            self._update_revisions(
2373
 
                stop_revision, overwrite=("history" in overwrite), graph=graph)
2374
 
            # TODO: The old revid should be specified when merging tags,
2375
 
            # so a tags implementation that versions tags can only
 
2408
            self._update_revisions(stop_revision,
 
2409
                overwrite=("history" in overwrite),
 
2410
                graph=graph)
 
2411
            # TODO: The old revid should be specified when merging tags, 
 
2412
            # so a tags implementation that versions tags can only 
2376
2413
            # pull in the most recent changes. -- JRV20090506
2377
2414
            result.tag_updates, result.tag_conflicts = (
2378
 
                self.source.tags.merge_to(
2379
 
                    self.target.tags, "tags" in overwrite,
2380
 
                    ignore_master=not merge_tags_to_master,
2381
 
                    selector=tag_selector))
2382
 
            self.update_references()
2383
 
            result.new_revno, result.new_revid = (
2384
 
                self.target.last_revision_info())
 
2415
                self.source.tags.merge_to(self.target.tags,
 
2416
                    "tags" in overwrite,
 
2417
                    ignore_master=not merge_tags_to_master))
 
2418
            result.new_revno, result.new_revid = self.target.last_revision_info()
2385
2419
            if _hook_master:
2386
2420
                result.master_branch = _hook_master
2387
2421
                result.local_branch = result.target_branch
2391
2425
            if run_hooks:
2392
2426
                for hook in Branch.hooks['post_pull']:
2393
2427
                    hook(result)
2394
 
            return result
2395
 
 
2396
 
    def update_references(self):
2397
 
        if not getattr(self.source._format, 'supports_reference_locations', False):
2398
 
            return
2399
 
        reference_dict = self.source._get_all_reference_info()
2400
 
        if len(reference_dict) == 0:
2401
 
            return
2402
 
        old_base = self.source.base
2403
 
        new_base = self.target.base
2404
 
        target_reference_dict = self.target._get_all_reference_info()
2405
 
        for tree_path, (branch_location, file_id) in reference_dict.items():
2406
 
            try:
2407
 
                branch_location = urlutils.rebase_url(branch_location,
2408
 
                                                      old_base, new_base)
2409
 
            except urlutils.InvalidRebaseURLs:
2410
 
                # Fall back to absolute URL
2411
 
                branch_location = urlutils.join(old_base, branch_location)
2412
 
            target_reference_dict.setdefault(
2413
 
                tree_path, (branch_location, file_id))
2414
 
        self.target._set_all_reference_info(target_reference_dict)
 
2428
        finally:
 
2429
            self.source.unlock()
 
2430
        return result
2415
2431
 
2416
2432
 
2417
2433
InterBranch.register_optimiser(GenericInterBranch)