/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/branch.py

  • Committer: Jelmer Vernooij
  • Date: 2017-07-23 22:06:41 UTC
  • mfrom: (6738 trunk)
  • mto: This revision was merged to the branch mainline in revision 6739.
  • Revision ID: jelmer@jelmer.uk-20170723220641-69eczax9bmv8d6kk
Merge trunk, address review comments.

Show diffs side-by-side

added added

removed removed

Lines of Context:
14
14
# along with this program; if not, write to the Free Software
15
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
 
17
from __future__ import absolute_import
 
18
 
 
19
from . import errors
 
20
 
17
21
from .lazy_import import lazy_import
18
22
lazy_import(globals(), """
19
 
import contextlib
20
23
import itertools
21
24
from breezy import (
 
25
    cleanup,
22
26
    config as _mod_config,
23
27
    debug,
24
 
    memorytree,
 
28
    fetch,
25
29
    repository,
26
30
    revision as _mod_revision,
27
31
    tag as _mod_tag,
30
34
    urlutils,
31
35
    )
32
36
from breezy.bzr import (
33
 
    fetch,
34
37
    remote,
35
38
    vf_search,
36
39
    )
39
42
 
40
43
from . import (
41
44
    controldir,
42
 
    errors,
43
45
    registry,
44
46
    )
 
47
from .decorators import (
 
48
    needs_read_lock,
 
49
    needs_write_lock,
 
50
    only_raises,
 
51
    )
45
52
from .hooks import Hooks
46
53
from .inter import InterObject
47
54
from .lock import LogicalLockResult
48
 
from .trace import mutter, mutter_callsite, note, is_quiet, warning
49
 
 
50
 
 
51
 
class UnstackableBranchFormat(errors.BzrError):
52
 
 
53
 
    _fmt = ("The branch '%(url)s'(%(format)s) is not a stackable format. "
54
 
            "You will need to upgrade the branch to permit branch stacking.")
55
 
 
56
 
    def __init__(self, format, url):
57
 
        errors.BzrError.__init__(self)
58
 
        self.format = format
59
 
        self.url = url
 
55
from .sixish import (
 
56
    BytesIO,
 
57
    viewitems,
 
58
    )
 
59
from .trace import mutter, mutter_callsite, note, is_quiet
60
60
 
61
61
 
62
62
class Branch(controldir.ControlComponent):
87
87
        self._revision_id_to_revno_cache = None
88
88
        self._partial_revision_id_to_revno_cache = {}
89
89
        self._partial_revision_history_cache = []
 
90
        self._tags_bytes = None
90
91
        self._last_revision_info_cache = None
91
92
        self._master_branch_cache = None
92
93
        self._merge_sorted_revisions_cache = None
149
150
        repository._iter_for_revno(
150
151
            self.repository, self._partial_revision_history_cache,
151
152
            stop_index=stop_index, stop_revision=stop_revision)
152
 
        if self._partial_revision_history_cache[-1] == \
153
 
                _mod_revision.NULL_REVISION:
 
153
        if self._partial_revision_history_cache[-1] == _mod_revision.NULL_REVISION:
154
154
            self._partial_revision_history_cache.pop()
155
155
 
156
156
    def _get_check_refs(self):
168
168
        For instance, if the branch is at URL/.bzr/branch,
169
169
        Branch.open(URL) -> a Branch instance.
170
170
        """
171
 
        control = controldir.ControlDir.open(
172
 
            base, possible_transports=possible_transports,
173
 
            _unsupported=_unsupported)
174
 
        return control.open_branch(
175
 
            unsupported=_unsupported,
 
171
        control = controldir.ControlDir.open(base,
 
172
            possible_transports=possible_transports, _unsupported=_unsupported)
 
173
        return control.open_branch(unsupported=_unsupported,
176
174
            possible_transports=possible_transports)
177
175
 
178
176
    @staticmethod
179
177
    def open_from_transport(transport, name=None, _unsupported=False,
180
 
                            possible_transports=None):
 
178
            possible_transports=None):
181
179
        """Open the branch rooted at transport"""
182
 
        control = controldir.ControlDir.open_from_transport(
183
 
            transport, _unsupported)
184
 
        return control.open_branch(
185
 
            name=name, unsupported=_unsupported,
 
180
        control = controldir.ControlDir.open_from_transport(transport, _unsupported)
 
181
        return control.open_branch(name=name, unsupported=_unsupported,
186
182
            possible_transports=possible_transports)
187
183
 
188
184
    @staticmethod
193
189
 
194
190
        Basically we keep looking up until we find the control directory or
195
191
        run into the root.  If there isn't one, raises NotBranchError.
196
 
        If there is one and it is either an unrecognised format or an
197
 
        unsupported format, UnknownFormatError or UnsupportedFormatError are
198
 
        raised.  If there is one, it is returned, along with the unused portion
199
 
        of url.
 
192
        If there is one and it is either an unrecognised format or an unsupported
 
193
        format, UnknownFormatError or UnsupportedFormatError are raised.
 
194
        If there is one, it is returned, along with the unused portion of url.
200
195
        """
201
 
        control, relpath = controldir.ControlDir.open_containing(
202
 
            url, possible_transports)
 
196
        control, relpath = controldir.ControlDir.open_containing(url,
 
197
                                                         possible_transports)
203
198
        branch = control.open_branch(possible_transports=possible_transports)
204
199
        return (branch, relpath)
205
200
 
231
226
        """
232
227
        return _mod_config.BranchStack(self)
233
228
 
 
229
    def _get_config(self):
 
230
        """Get the concrete config for just the config in this branch.
 
231
 
 
232
        This is not intended for client use; see Branch.get_config for the
 
233
        public API.
 
234
 
 
235
        Added in 1.14.
 
236
 
 
237
        :return: An object supporting get_option and set_option.
 
238
        """
 
239
        raise NotImplementedError(self._get_config)
 
240
 
234
241
    def store_uncommitted(self, creator):
235
242
        """Store uncommitted changes from a ShelfCreator.
236
243
 
254
261
        a_branch = Branch.open(url, possible_transports=possible_transports)
255
262
        return a_branch.repository
256
263
 
 
264
    @needs_read_lock
 
265
    def _get_tags_bytes(self):
 
266
        """Get the bytes of a serialised tags dict.
 
267
 
 
268
        Note that not all branches support tags, nor do all use the same tags
 
269
        logic: this method is specific to BasicTags. Other tag implementations
 
270
        may use the same method name and behave differently, safely, because
 
271
        of the double-dispatch via
 
272
        format.make_tags->tags_instance->get_tags_dict.
 
273
 
 
274
        :return: The bytes of the tags file.
 
275
        :seealso: Branch._set_tags_bytes.
 
276
        """
 
277
        if self._tags_bytes is None:
 
278
            self._tags_bytes = self._transport.get_bytes('tags')
 
279
        return self._tags_bytes
 
280
 
257
281
    def _get_nick(self, local=False, possible_transports=None):
258
282
        config = self.get_config()
259
283
        # explicit overrides master, but don't look for master if local is True
271
295
                # Silently fall back to local implicit nick if the master is
272
296
                # unavailable
273
297
                mutter("Could not connect to bound branch, "
274
 
                       "falling back to local nick.\n " + str(e))
 
298
                    "falling back to local nick.\n " + str(e))
275
299
        return config.get_nickname()
276
300
 
277
301
    def _set_nick(self, nick):
300
324
        new_history = []
301
325
        check_not_reserved_id = _mod_revision.check_not_reserved_id
302
326
        # Do not include ghosts or graph origin in revision_history
303
 
        while (current_rev_id in parents_map
304
 
               and len(parents_map[current_rev_id]) > 0):
 
327
        while (current_rev_id in parents_map and
 
328
               len(parents_map[current_rev_id]) > 0):
305
329
            check_not_reserved_id(current_rev_id)
306
330
            new_history.append(current_rev_id)
307
331
            current_rev_id = parents_map[current_rev_id][0]
335
359
    def get_physical_lock_status(self):
336
360
        raise NotImplementedError(self.get_physical_lock_status)
337
361
 
 
362
    @needs_read_lock
338
363
    def dotted_revno_to_revision_id(self, revno, _cache_reverse=False):
339
364
        """Return the revision_id for a dotted revno.
340
365
 
346
371
        :return: the revision_id
347
372
        :raises errors.NoSuchRevision: if the revno doesn't exist
348
373
        """
349
 
        with self.lock_read():
350
 
            rev_id = self._do_dotted_revno_to_revision_id(revno)
351
 
            if _cache_reverse:
352
 
                self._partial_revision_id_to_revno_cache[rev_id] = revno
353
 
            return rev_id
 
374
        rev_id = self._do_dotted_revno_to_revision_id(revno)
 
375
        if _cache_reverse:
 
376
            self._partial_revision_id_to_revno_cache[rev_id] = revno
 
377
        return rev_id
354
378
 
355
379
    def _do_dotted_revno_to_revision_id(self, revno):
356
380
        """Worker function for dotted_revno_to_revision_id.
359
383
        provide a more efficient implementation.
360
384
        """
361
385
        if len(revno) == 1:
362
 
            try:
363
 
                return self.get_rev_id(revno[0])
364
 
            except errors.RevisionNotPresent as e:
365
 
                raise errors.GhostRevisionsHaveNoRevno(revno[0], e.revision_id)
 
386
            return self.get_rev_id(revno[0])
366
387
        revision_id_to_revno = self.get_revision_id_to_revno_map()
367
388
        revision_ids = [revision_id for revision_id, this_revno
368
 
                        in revision_id_to_revno.items()
 
389
                        in viewitems(revision_id_to_revno)
369
390
                        if revno == this_revno]
370
391
        if len(revision_ids) == 1:
371
392
            return revision_ids[0]
373
394
            revno_str = '.'.join(map(str, revno))
374
395
            raise errors.NoSuchRevision(self, revno_str)
375
396
 
 
397
    @needs_read_lock
376
398
    def revision_id_to_dotted_revno(self, revision_id):
377
399
        """Given a revision id, return its dotted revno.
378
400
 
379
401
        :return: a tuple like (1,) or (400,1,3).
380
402
        """
381
 
        with self.lock_read():
382
 
            return self._do_revision_id_to_dotted_revno(revision_id)
 
403
        return self._do_revision_id_to_dotted_revno(revision_id)
383
404
 
384
405
    def _do_revision_id_to_dotted_revno(self, revision_id):
385
406
        """Worker function for revision_id_to_revno."""
402
423
                raise errors.NoSuchRevision(self, revision_id)
403
424
        return result
404
425
 
 
426
    @needs_read_lock
405
427
    def get_revision_id_to_revno_map(self):
406
428
        """Return the revision_id => dotted revno map.
407
429
 
410
432
        :return: A dictionary mapping revision_id => dotted revno.
411
433
            This dictionary should not be modified by the caller.
412
434
        """
413
 
        if 'evil' in debug.debug_flags:
414
 
            mutter_callsite(
415
 
                3, "get_revision_id_to_revno_map scales with ancestry.")
416
 
        with self.lock_read():
417
 
            if self._revision_id_to_revno_cache is not None:
418
 
                mapping = self._revision_id_to_revno_cache
419
 
            else:
420
 
                mapping = self._gen_revno_map()
421
 
                self._cache_revision_id_to_revno(mapping)
422
 
            # TODO: jam 20070417 Since this is being cached, should we be
423
 
            # returning a copy?
424
 
            # I would rather not, and instead just declare that users should
425
 
            # not modify the return value.
426
 
            return mapping
 
435
        if self._revision_id_to_revno_cache is not None:
 
436
            mapping = self._revision_id_to_revno_cache
 
437
        else:
 
438
            mapping = self._gen_revno_map()
 
439
            self._cache_revision_id_to_revno(mapping)
 
440
        # TODO: jam 20070417 Since this is being cached, should we be returning
 
441
        #       a copy?
 
442
        # I would rather not, and instead just declare that users should not
 
443
        # modify the return value.
 
444
        return mapping
427
445
 
428
446
    def _gen_revno_map(self):
429
447
        """Create a new mapping from revision ids to dotted revnos.
435
453
 
436
454
        :return: A dictionary mapping revision_id => dotted revno.
437
455
        """
438
 
        revision_id_to_revno = {
439
 
            rev_id: revno for rev_id, depth, revno, end_of_merge
440
 
            in self.iter_merge_sorted_revisions()}
 
456
        revision_id_to_revno = dict((rev_id, revno)
 
457
            for rev_id, depth, revno, end_of_merge
 
458
             in self.iter_merge_sorted_revisions())
441
459
        return revision_id_to_revno
442
460
 
 
461
    @needs_read_lock
443
462
    def iter_merge_sorted_revisions(self, start_revision_id=None,
444
 
                                    stop_revision_id=None,
445
 
                                    stop_rule='exclude', direction='reverse'):
 
463
            stop_revision_id=None, stop_rule='exclude', direction='reverse'):
446
464
        """Walk the revisions for a branch in merge sorted order.
447
465
 
448
466
        Merge sorted order is the output from a merge-aware,
460
478
            * 'include' - the stop revision is the last item in the result
461
479
            * 'with-merges' - include the stop revision and all of its
462
480
              merged revisions in the result
463
 
            * 'with-merges-without-common-ancestry' - filter out revisions
 
481
            * 'with-merges-without-common-ancestry' - filter out revisions 
464
482
              that are in both ancestries
465
483
        :param direction: either 'reverse' or 'forward':
466
484
 
485
503
            * end_of_merge: When True the next node (earlier in history) is
486
504
              part of a different merge.
487
505
        """
488
 
        with self.lock_read():
489
 
            # Note: depth and revno values are in the context of the branch so
490
 
            # we need the full graph to get stable numbers, regardless of the
491
 
            # start_revision_id.
492
 
            if self._merge_sorted_revisions_cache is None:
493
 
                last_revision = self.last_revision()
494
 
                known_graph = self.repository.get_known_graph_ancestry(
495
 
                    [last_revision])
496
 
                self._merge_sorted_revisions_cache = known_graph.merge_sort(
497
 
                    last_revision)
498
 
            filtered = self._filter_merge_sorted_revisions(
499
 
                self._merge_sorted_revisions_cache, start_revision_id,
500
 
                stop_revision_id, stop_rule)
501
 
            # Make sure we don't return revisions that are not part of the
502
 
            # start_revision_id ancestry.
503
 
            filtered = self._filter_start_non_ancestors(filtered)
504
 
            if direction == 'reverse':
505
 
                return filtered
506
 
            if direction == 'forward':
507
 
                return reversed(list(filtered))
508
 
            else:
509
 
                raise ValueError('invalid direction %r' % direction)
 
506
        # Note: depth and revno values are in the context of the branch so
 
507
        # we need the full graph to get stable numbers, regardless of the
 
508
        # start_revision_id.
 
509
        if self._merge_sorted_revisions_cache is None:
 
510
            last_revision = self.last_revision()
 
511
            known_graph = self.repository.get_known_graph_ancestry(
 
512
                [last_revision])
 
513
            self._merge_sorted_revisions_cache = known_graph.merge_sort(
 
514
                last_revision)
 
515
        filtered = self._filter_merge_sorted_revisions(
 
516
            self._merge_sorted_revisions_cache, start_revision_id,
 
517
            stop_revision_id, stop_rule)
 
518
        # Make sure we don't return revisions that are not part of the
 
519
        # start_revision_id ancestry.
 
520
        filtered = self._filter_start_non_ancestors(filtered)
 
521
        if direction == 'reverse':
 
522
            return filtered
 
523
        if direction == 'forward':
 
524
            return reversed(list(filtered))
 
525
        else:
 
526
            raise ValueError('invalid direction %r' % direction)
510
527
 
511
528
    def _filter_merge_sorted_revisions(self, merge_sorted_revisions,
512
 
                                       start_revision_id, stop_revision_id,
513
 
                                       stop_rule):
 
529
        start_revision_id, stop_revision_id, stop_rule):
514
530
        """Iterate over an inclusive range of sorted revisions."""
515
531
        rev_iter = iter(merge_sorted_revisions)
516
532
        if start_revision_id is not None:
571
587
                if rev_id == left_parent:
572
588
                    # reached the left parent after the stop_revision
573
589
                    return
574
 
                if (not reached_stop_revision_id
575
 
                        or rev_id in revision_id_whitelist):
 
590
                if (not reached_stop_revision_id or
 
591
                        rev_id in revision_id_whitelist):
576
592
                    yield (rev_id, node.merge_depth, node.revno,
577
 
                           node.end_of_merge)
 
593
                       node.end_of_merge)
578
594
                    if reached_stop_revision_id or rev_id == stop_revision_id:
579
595
                        # only do the merged revs of rev_id from now on
580
596
                        rev = self.repository.get_revision(rev_id)
590
606
        # ancestry. Given the order guaranteed by the merge sort, we will see
591
607
        # uninteresting descendants of the first parent of our tip before the
592
608
        # tip itself.
593
 
        try:
594
 
            first = next(rev_iter)
595
 
        except StopIteration:
596
 
            return
 
609
        first = next(rev_iter)
597
610
        (rev_id, merge_depth, revno, end_of_merge) = first
598
611
        yield first
599
612
        if not merge_depth:
636
649
        """Tell this branch object not to release the physical lock when this
637
650
        object is unlocked.
638
651
 
639
 
        If lock_write doesn't return a token, then this method is not
640
 
        supported.
 
652
        If lock_write doesn't return a token, then this method is not supported.
641
653
        """
642
654
        self.control_files.leave_in_place()
643
655
 
645
657
        """Tell this branch object to release the physical lock when this
646
658
        object is unlocked, even if it didn't originally acquire it.
647
659
 
648
 
        If lock_write doesn't return a token, then this method is not
649
 
        supported.
 
660
        If lock_write doesn't return a token, then this method is not supported.
650
661
        """
651
662
        self.control_files.dont_leave_in_place()
652
663
 
670
681
            raise errors.UpgradeRequired(self.user_url)
671
682
        self.get_config_stack().set('append_revisions_only', enabled)
672
683
 
673
 
    def fetch(self, from_branch, stop_revision=None, limit=None, lossy=False):
 
684
    def set_reference_info(self, file_id, tree_path, branch_location):
 
685
        """Set the branch location to use for a tree reference."""
 
686
        raise errors.UnsupportedOperation(self.set_reference_info, self)
 
687
 
 
688
    def get_reference_info(self, file_id):
 
689
        """Get the tree_path and branch_location for a tree reference."""
 
690
        raise errors.UnsupportedOperation(self.get_reference_info, self)
 
691
 
 
692
    @needs_write_lock
 
693
    def fetch(self, from_branch, last_revision=None, limit=None):
674
694
        """Copy revisions from from_branch into this branch.
675
695
 
676
696
        :param from_branch: Where to copy from.
677
 
        :param stop_revision: What revision to stop at (None for at the end
 
697
        :param last_revision: What revision to stop at (None for at the end
678
698
                              of the branch.
679
699
        :param limit: Optional rough limit of revisions to fetch
680
700
        :return: None
681
701
        """
682
 
        with self.lock_write():
683
 
            return InterBranch.get(from_branch, self).fetch(
684
 
                stop_revision, limit=limit, lossy=lossy)
 
702
        return InterBranch.get(from_branch, self).fetch(last_revision, limit=limit)
685
703
 
686
704
    def get_bound_location(self):
687
705
        """Return the URL of the branch we are bound to.
709
727
        :param revprops: Optional dictionary of revision properties.
710
728
        :param revision_id: Optional revision id.
711
729
        :param lossy: Whether to discard data that can not be natively
712
 
            represented, when pushing to a foreign VCS
 
730
            represented, when pushing to a foreign VCS 
713
731
        """
714
732
 
715
733
        if config_stack is None:
716
734
            config_stack = self.get_config_stack()
717
735
 
718
 
        return self.repository.get_commit_builder(
719
 
            self, parents, config_stack, timestamp, timezone, committer,
720
 
            revprops, revision_id, lossy)
 
736
        return self.repository.get_commit_builder(self, parents, config_stack,
 
737
            timestamp, timezone, committer, revprops, revision_id,
 
738
            lossy)
721
739
 
722
740
    def get_master_branch(self, possible_transports=None):
723
741
        """Return the branch we are bound to.
735
753
        """
736
754
        raise NotImplementedError(self.get_stacked_on_url)
737
755
 
 
756
    def print_file(self, file, revision_id):
 
757
        """Print `file` to stdout."""
 
758
        raise NotImplementedError(self.print_file)
 
759
 
 
760
    @needs_write_lock
738
761
    def set_last_revision_info(self, revno, revision_id):
739
762
        """Set the last revision of this branch.
740
763
 
748
771
        """
749
772
        raise NotImplementedError(self.set_last_revision_info)
750
773
 
 
774
    @needs_write_lock
751
775
    def generate_revision_history(self, revision_id, last_rev=None,
752
776
                                  other_branch=None):
753
777
        """See Branch.generate_revision_history"""
754
 
        with self.lock_write():
755
 
            graph = self.repository.get_graph()
756
 
            (last_revno, last_revid) = self.last_revision_info()
757
 
            known_revision_ids = [
758
 
                (last_revid, last_revno),
759
 
                (_mod_revision.NULL_REVISION, 0),
760
 
                ]
761
 
            if last_rev is not None:
762
 
                if not graph.is_ancestor(last_rev, revision_id):
763
 
                    # our previous tip is not merged into stop_revision
764
 
                    raise errors.DivergedBranches(self, other_branch)
765
 
            revno = graph.find_distance_to_null(
766
 
                revision_id, known_revision_ids)
767
 
            self.set_last_revision_info(revno, revision_id)
 
778
        graph = self.repository.get_graph()
 
779
        (last_revno, last_revid) = self.last_revision_info()
 
780
        known_revision_ids = [
 
781
            (last_revid, last_revno),
 
782
            (_mod_revision.NULL_REVISION, 0),
 
783
            ]
 
784
        if last_rev is not None:
 
785
            if not graph.is_ancestor(last_rev, revision_id):
 
786
                # our previous tip is not merged into stop_revision
 
787
                raise errors.DivergedBranches(self, other_branch)
 
788
        revno = graph.find_distance_to_null(revision_id, known_revision_ids)
 
789
        self.set_last_revision_info(revno, revision_id)
768
790
 
 
791
    @needs_write_lock
769
792
    def set_parent(self, url):
770
793
        """See Branch.set_parent."""
771
794
        # TODO: Maybe delete old location files?
773
796
        # FIXUP this and get_parent in a future branch format bump:
774
797
        # read and rewrite the file. RBC 20060125
775
798
        if url is not None:
776
 
            if isinstance(url, str):
 
799
            if isinstance(url, unicode):
777
800
                try:
778
 
                    url.encode('ascii')
 
801
                    url = url.encode('ascii')
779
802
                except UnicodeEncodeError:
780
 
                    raise urlutils.InvalidURL(
781
 
                        url, "Urls must be 7-bit ascii, "
 
803
                    raise urlutils.InvalidURL(url,
 
804
                        "Urls must be 7-bit ascii, "
782
805
                        "use breezy.urlutils.escape")
783
806
            url = urlutils.relative_url(self.base, url)
784
 
        with self.lock_write():
785
 
            self._set_parent_location(url)
 
807
        self._set_parent_location(url)
786
808
 
 
809
    @needs_write_lock
787
810
    def set_stacked_on_url(self, url):
788
811
        """Set the URL this branch is stacked against.
789
812
 
793
816
            stacking.
794
817
        """
795
818
        if not self._format.supports_stacking():
796
 
            raise UnstackableBranchFormat(self._format, self.user_url)
797
 
        with self.lock_write():
798
 
            # XXX: Changing from one fallback repository to another does not
799
 
            # check that all the data you need is present in the new fallback.
800
 
            # Possibly it should.
801
 
            self._check_stackable_repo()
802
 
            if not url:
803
 
                try:
804
 
                    self.get_stacked_on_url()
805
 
                except (errors.NotStacked, UnstackableBranchFormat,
806
 
                        errors.UnstackableRepositoryFormat):
807
 
                    return
808
 
                self._unstack()
809
 
            else:
810
 
                self._activate_fallback_location(
811
 
                    url, possible_transports=[self.controldir.root_transport])
812
 
            # write this out after the repository is stacked to avoid setting a
813
 
            # stacked config that doesn't work.
814
 
            self._set_config_location('stacked_on_location', url)
 
819
            raise errors.UnstackableBranchFormat(self._format, self.user_url)
 
820
        # XXX: Changing from one fallback repository to another does not check
 
821
        # that all the data you need is present in the new fallback.
 
822
        # Possibly it should.
 
823
        self._check_stackable_repo()
 
824
        if not url:
 
825
            try:
 
826
                old_url = self.get_stacked_on_url()
 
827
            except (errors.NotStacked, errors.UnstackableBranchFormat,
 
828
                errors.UnstackableRepositoryFormat):
 
829
                return
 
830
            self._unstack()
 
831
        else:
 
832
            self._activate_fallback_location(url,
 
833
                possible_transports=[self.controldir.root_transport])
 
834
        # write this out after the repository is stacked to avoid setting a
 
835
        # stacked config that doesn't work.
 
836
        self._set_config_location('stacked_on_location', url)
815
837
 
816
838
    def _unstack(self):
817
839
        """Change a branch to be unstacked, copying data as needed.
818
840
 
819
841
        Don't call this directly, use set_stacked_on_url(None).
820
842
        """
821
 
        with ui.ui_factory.nested_progress_bar() as pb:
 
843
        pb = ui.ui_factory.nested_progress_bar()
 
844
        try:
822
845
            pb.update(gettext("Unstacking"))
823
846
            # The basic approach here is to fetch the tip of the branch,
824
847
            # including all available ghosts, from the existing stacked
825
 
            # repository into a new repository object without the fallbacks.
 
848
            # repository into a new repository object without the fallbacks. 
826
849
            #
827
850
            # XXX: See <https://launchpad.net/bugs/397286> - this may not be
828
851
            # correct for CHKMap repostiories
829
852
            old_repository = self.repository
830
853
            if len(old_repository._fallback_repositories) != 1:
831
 
                raise AssertionError(
832
 
                    "can't cope with fallback repositories "
833
 
                    "of %r (fallbacks: %r)" % (
834
 
                        old_repository, old_repository._fallback_repositories))
 
854
                raise AssertionError("can't cope with fallback repositories "
 
855
                    "of %r (fallbacks: %r)" % (old_repository,
 
856
                        old_repository._fallback_repositories))
835
857
            # Open the new repository object.
836
858
            # Repositories don't offer an interface to remove fallback
837
859
            # repositories today; take the conceptually simpler option and just
844
866
                self.controldir.root_transport.base)
845
867
            new_repository = new_bzrdir.find_repository()
846
868
            if new_repository._fallback_repositories:
847
 
                raise AssertionError(
848
 
                    "didn't expect %r to have fallback_repositories"
 
869
                raise AssertionError("didn't expect %r to have "
 
870
                    "fallback_repositories"
849
871
                    % (self.repository,))
850
872
            # Replace self.repository with the new repository.
851
873
            # Do our best to transfer the lock state (i.e. lock-tokens and
878
900
            if old_lock_count == 0:
879
901
                raise AssertionError(
880
902
                    'old_repository should have been locked at least once.')
881
 
            for i in range(old_lock_count - 1):
 
903
            for i in range(old_lock_count-1):
882
904
                self.repository.lock_write()
883
905
            # Fetch from the old repository into the new.
884
 
            with old_repository.lock_read():
 
906
            old_repository.lock_read()
 
907
            try:
885
908
                # XXX: If you unstack a branch while it has a working tree
886
909
                # with a pending merge, the pending-merged revisions will no
887
910
                # longer be present.  You can (probably) revert and remerge.
889
912
                    tags_to_fetch = set(self.tags.get_reverse_tag_dict())
890
913
                except errors.TagsNotSupported:
891
914
                    tags_to_fetch = set()
892
 
                fetch_spec = vf_search.NotInOtherForRevs(
893
 
                    self.repository, old_repository,
894
 
                    required_ids=[self.last_revision()],
 
915
                fetch_spec = vf_search.NotInOtherForRevs(self.repository,
 
916
                    old_repository, required_ids=[self.last_revision()],
895
917
                    if_present_ids=tags_to_fetch, find_ghosts=True).execute()
896
918
                self.repository.fetch(old_repository, fetch_spec=fetch_spec)
 
919
            finally:
 
920
                old_repository.unlock()
 
921
        finally:
 
922
            pb.finished()
 
923
 
 
924
    def _set_tags_bytes(self, bytes):
 
925
        """Mirror method for _get_tags_bytes.
 
926
 
 
927
        :seealso: Branch._get_tags_bytes.
 
928
        """
 
929
        op = cleanup.OperationWithCleanups(self._set_tags_bytes_locked)
 
930
        op.add_cleanup(self.lock_write().unlock)
 
931
        return op.run_simple(bytes)
 
932
 
 
933
    def _set_tags_bytes_locked(self, bytes):
 
934
        self._tags_bytes = bytes
 
935
        return self._transport.put_bytes('tags', bytes)
897
936
 
898
937
    def _cache_revision_history(self, rev_history):
899
938
        """Set the cached revision history to rev_history.
930
969
        self._merge_sorted_revisions_cache = None
931
970
        self._partial_revision_history_cache = []
932
971
        self._partial_revision_id_to_revno_cache = {}
 
972
        self._tags_bytes = None
933
973
 
934
974
    def _gen_revision_history(self):
935
975
        """Return sequence of revision hashes on to this branch.
972
1012
        """Return last revision id, or NULL_REVISION."""
973
1013
        return self.last_revision_info()[1]
974
1014
 
 
1015
    @needs_read_lock
975
1016
    def last_revision_info(self):
976
1017
        """Return information about the last revision.
977
1018
 
978
1019
        :return: A tuple (revno, revision_id).
979
1020
        """
980
 
        with self.lock_read():
981
 
            if self._last_revision_info_cache is None:
982
 
                self._last_revision_info_cache = (
983
 
                    self._read_last_revision_info())
984
 
            return self._last_revision_info_cache
 
1021
        if self._last_revision_info_cache is None:
 
1022
            self._last_revision_info_cache = self._read_last_revision_info()
 
1023
        return self._last_revision_info_cache
985
1024
 
986
1025
    def _read_last_revision_info(self):
987
1026
        raise NotImplementedError(self._read_last_revision_info)
1017
1056
        except ValueError:
1018
1057
            raise errors.NoSuchRevision(self, revision_id)
1019
1058
 
 
1059
    @needs_read_lock
1020
1060
    def get_rev_id(self, revno, history=None):
1021
1061
        """Find the revision id of the specified revno."""
1022
 
        with self.lock_read():
1023
 
            if revno == 0:
1024
 
                return _mod_revision.NULL_REVISION
1025
 
            last_revno, last_revid = self.last_revision_info()
1026
 
            if revno == last_revno:
1027
 
                return last_revid
1028
 
            if revno <= 0 or revno > last_revno:
1029
 
                raise errors.NoSuchRevision(self, revno)
1030
 
            distance_from_last = last_revno - revno
1031
 
            if len(self._partial_revision_history_cache) <= distance_from_last:
1032
 
                self._extend_partial_history(distance_from_last)
1033
 
            return self._partial_revision_history_cache[distance_from_last]
 
1062
        if revno == 0:
 
1063
            return _mod_revision.NULL_REVISION
 
1064
        last_revno, last_revid = self.last_revision_info()
 
1065
        if revno == last_revno:
 
1066
            return last_revid
 
1067
        if revno <= 0 or revno > last_revno:
 
1068
            raise errors.NoSuchRevision(self, revno)
 
1069
        distance_from_last = last_revno - revno
 
1070
        if len(self._partial_revision_history_cache) <= distance_from_last:
 
1071
            self._extend_partial_history(distance_from_last)
 
1072
        return self._partial_revision_history_cache[distance_from_last]
1034
1073
 
1035
1074
    def pull(self, source, overwrite=False, stop_revision=None,
1036
1075
             possible_transports=None, *args, **kwargs):
1040
1079
 
1041
1080
        :returns: PullResult instance
1042
1081
        """
1043
 
        return InterBranch.get(source, self).pull(
1044
 
            overwrite=overwrite, stop_revision=stop_revision,
 
1082
        return InterBranch.get(source, self).pull(overwrite=overwrite,
 
1083
            stop_revision=stop_revision,
1045
1084
            possible_transports=possible_transports, *args, **kwargs)
1046
1085
 
1047
1086
    def push(self, target, overwrite=False, stop_revision=None, lossy=False,
1048
 
             *args, **kwargs):
 
1087
            *args, **kwargs):
1049
1088
        """Mirror this branch into target.
1050
1089
 
1051
1090
        This branch is considered to be 'local', having low latency.
1052
1091
        """
1053
 
        return InterBranch.get(self, target).push(
1054
 
            overwrite, stop_revision, lossy, *args, **kwargs)
 
1092
        return InterBranch.get(self, target).push(overwrite, stop_revision,
 
1093
            lossy, *args, **kwargs)
1055
1094
 
1056
1095
    def basis_tree(self):
1057
1096
        """Return `Tree` object for last revision."""
1070
1109
        # This is an old-format absolute path to a local branch
1071
1110
        # turn it into a url
1072
1111
        if parent.startswith('/'):
1073
 
            parent = urlutils.local_path_to_url(parent)
 
1112
            parent = urlutils.local_path_to_url(parent.decode('utf8'))
1074
1113
        try:
1075
1114
            return urlutils.join(self.base[:-1], parent)
1076
 
        except urlutils.InvalidURLJoin:
 
1115
        except urlutils.InvalidURLJoin as e:
1077
1116
            raise errors.InaccessibleParent(parent, self.user_url)
1078
1117
 
1079
1118
    def _get_parent_location(self):
1165
1204
        for hook in hooks:
1166
1205
            hook(params)
1167
1206
 
 
1207
    @needs_write_lock
1168
1208
    def update(self):
1169
1209
        """Synchronise this branch with the master branch if any.
1170
1210
 
1188
1228
        if revno < 1 or revno > self.revno():
1189
1229
            raise errors.InvalidRevisionNumber(revno)
1190
1230
 
1191
 
    def clone(self, to_controldir, revision_id=None, name=None,
1192
 
              repository_policy=None, tag_selector=None):
 
1231
    @needs_read_lock
 
1232
    def clone(self, to_controldir, revision_id=None, repository_policy=None):
1193
1233
        """Clone this branch into to_controldir preserving all semantic values.
1194
1234
 
1195
1235
        Most API users will want 'create_clone_on_transport', which creates a
1198
1238
        revision_id: if not None, the revision history in the new branch will
1199
1239
                     be truncated to end with revision_id.
1200
1240
        """
1201
 
        result = to_controldir.create_branch(name=name)
1202
 
        with self.lock_read(), result.lock_write():
 
1241
        result = to_controldir.create_branch()
 
1242
        result.lock_write()
 
1243
        try:
1203
1244
            if repository_policy is not None:
1204
1245
                repository_policy.configure_branch(result)
1205
 
            self.copy_content_into(
1206
 
                result, revision_id=revision_id, tag_selector=tag_selector)
 
1246
            self.copy_content_into(result, revision_id=revision_id)
 
1247
        finally:
 
1248
            result.unlock()
1207
1249
        return result
1208
1250
 
 
1251
    @needs_read_lock
1209
1252
    def sprout(self, to_controldir, revision_id=None, repository_policy=None,
1210
 
               repository=None, lossy=False, tag_selector=None):
 
1253
            repository=None):
1211
1254
        """Create a new line of development from the branch, into to_controldir.
1212
1255
 
1213
1256
        to_controldir controls the branch format.
1215
1258
        revision_id: if not None, the revision history in the new branch will
1216
1259
                     be truncated to end with revision_id.
1217
1260
        """
1218
 
        if (repository_policy is not None
1219
 
                and repository_policy.requires_stacking()):
 
1261
        if (repository_policy is not None and
 
1262
            repository_policy.requires_stacking()):
1220
1263
            to_controldir._format.require_stacking(_skip_repo=True)
1221
1264
        result = to_controldir.create_branch(repository=repository)
1222
 
        if lossy:
1223
 
            raise errors.LossyPushToSameVCS(self, result)
1224
 
        with self.lock_read(), result.lock_write():
 
1265
        result.lock_write()
 
1266
        try:
1225
1267
            if repository_policy is not None:
1226
1268
                repository_policy.configure_branch(result)
1227
 
            self.copy_content_into(
1228
 
                result, revision_id=revision_id, tag_selector=tag_selector)
 
1269
            self.copy_content_into(result, revision_id=revision_id)
1229
1270
            master_url = self.get_bound_location()
1230
1271
            if master_url is None:
1231
 
                result.set_parent(self.user_url)
 
1272
                result.set_parent(self.controldir.root_transport.base)
1232
1273
            else:
1233
1274
                result.set_parent(master_url)
 
1275
        finally:
 
1276
            result.unlock()
1234
1277
        return result
1235
1278
 
1236
1279
    def _synchronize_history(self, destination, revision_id):
1251
1294
        else:
1252
1295
            graph = self.repository.get_graph()
1253
1296
            try:
1254
 
                revno = graph.find_distance_to_null(
1255
 
                    revision_id, [(source_revision_id, source_revno)])
 
1297
                revno = graph.find_distance_to_null(revision_id, 
 
1298
                    [(source_revision_id, source_revno)])
1256
1299
            except errors.GhostRevisionsHaveNoRevno:
1257
1300
                # Default to 1, if we can't find anything else
1258
1301
                revno = 1
1259
1302
        destination.set_last_revision_info(revno, revision_id)
1260
1303
 
1261
 
    def copy_content_into(self, destination, revision_id=None, tag_selector=None):
 
1304
    def copy_content_into(self, destination, revision_id=None):
1262
1305
        """Copy the content of self into destination.
1263
1306
 
1264
1307
        revision_id: if not None, the revision history in the new branch will
1265
1308
                     be truncated to end with revision_id.
1266
 
        tag_selector: Optional callback that receives a tag name
1267
 
            and should return a boolean to indicate whether a tag should be copied
1268
1309
        """
1269
1310
        return InterBranch.get(self, destination).copy_content_into(
1270
 
            revision_id=revision_id, tag_selector=tag_selector)
 
1311
            revision_id=revision_id)
1271
1312
 
1272
1313
    def update_references(self, target):
1273
 
        if not self._format.supports_reference_locations:
1274
 
            return
1275
 
        return InterBranch.get(self, target).update_references()
 
1314
        if not getattr(self._format, 'supports_reference_locations', False):
 
1315
            return
 
1316
        reference_dict = self._get_all_reference_info()
 
1317
        if len(reference_dict) == 0:
 
1318
            return
 
1319
        old_base = self.base
 
1320
        new_base = target.base
 
1321
        target_reference_dict = target._get_all_reference_info()
 
1322
        for file_id, (tree_path, branch_location) in viewitems(reference_dict):
 
1323
            branch_location = urlutils.rebase_url(branch_location,
 
1324
                                                  old_base, new_base)
 
1325
            target_reference_dict.setdefault(
 
1326
                file_id, (tree_path, branch_location))
 
1327
        target._set_all_reference_info(target_reference_dict)
1276
1328
 
 
1329
    @needs_read_lock
1277
1330
    def check(self, refs):
1278
1331
        """Check consistency of the branch.
1279
1332
 
1287
1340
            branch._get_check_refs()
1288
1341
        :return: A BranchCheckResult.
1289
1342
        """
1290
 
        with self.lock_read():
1291
 
            result = BranchCheckResult(self)
1292
 
            last_revno, last_revision_id = self.last_revision_info()
1293
 
            actual_revno = refs[('lefthand-distance', last_revision_id)]
1294
 
            if actual_revno != last_revno:
1295
 
                result.errors.append(errors.BzrCheckError(
1296
 
                    'revno does not match len(mainline) %s != %s' % (
1297
 
                        last_revno, actual_revno)))
1298
 
            # TODO: We should probably also check that self.revision_history
1299
 
            # matches the repository for older branch formats.
1300
 
            # If looking for the code that cross-checks repository parents
1301
 
            # against the Graph.iter_lefthand_ancestry output, that is now a
1302
 
            # repository specific check.
1303
 
            return result
 
1343
        result = BranchCheckResult(self)
 
1344
        last_revno, last_revision_id = self.last_revision_info()
 
1345
        actual_revno = refs[('lefthand-distance', last_revision_id)]
 
1346
        if actual_revno != last_revno:
 
1347
            result.errors.append(errors.BzrCheckError(
 
1348
                'revno does not match len(mainline) %s != %s' % (
 
1349
                last_revno, actual_revno)))
 
1350
        # TODO: We should probably also check that self.revision_history
 
1351
        # matches the repository for older branch formats.
 
1352
        # If looking for the code that cross-checks repository parents against
 
1353
        # the Graph.iter_lefthand_ancestry output, that is now a repository
 
1354
        # specific check.
 
1355
        return result
1304
1356
 
1305
1357
    def _get_checkout_format(self, lightweight=False):
1306
1358
        """Return the most suitable metadir for a checkout of this branch.
1311
1363
        return format
1312
1364
 
1313
1365
    def create_clone_on_transport(self, to_transport, revision_id=None,
1314
 
                                  stacked_on=None, create_prefix=False,
1315
 
                                  use_existing_dir=False, no_tree=None,
1316
 
                                  tag_selector=None):
 
1366
        stacked_on=None, create_prefix=False, use_existing_dir=False,
 
1367
        no_tree=None):
1317
1368
        """Create a clone of this branch and its bzrdir.
1318
1369
 
1319
1370
        :param to_transport: The transport to clone onto.
1326
1377
        """
1327
1378
        # XXX: Fix the bzrdir API to allow getting the branch back from the
1328
1379
        # clone call. Or something. 20090224 RBC/spiv.
1329
 
        # XXX: Should this perhaps clone colocated branches as well,
 
1380
        # XXX: Should this perhaps clone colocated branches as well, 
1330
1381
        # rather than just the default branch? 20100319 JRV
1331
1382
        if revision_id is None:
1332
1383
            revision_id = self.last_revision()
1333
 
        dir_to = self.controldir.clone_on_transport(
1334
 
            to_transport, revision_id=revision_id, stacked_on=stacked_on,
 
1384
        dir_to = self.controldir.clone_on_transport(to_transport,
 
1385
            revision_id=revision_id, stacked_on=stacked_on,
1335
1386
            create_prefix=create_prefix, use_existing_dir=use_existing_dir,
1336
 
            no_tree=no_tree, tag_selector=tag_selector)
 
1387
            no_tree=no_tree)
1337
1388
        return dir_to.open_branch()
1338
1389
 
1339
1390
    def create_checkout(self, to_location, revision_id=None,
1340
1391
                        lightweight=False, accelerator_tree=None,
1341
 
                        hardlink=False, recurse_nested=True):
 
1392
                        hardlink=False):
1342
1393
        """Create a checkout of a branch.
1343
1394
 
1344
1395
        :param to_location: The url to produce the checkout at
1351
1402
            content is different.
1352
1403
        :param hardlink: If true, hard-link files from accelerator_tree,
1353
1404
            where possible.
1354
 
        :param recurse_nested: Whether to recurse into nested trees
1355
1405
        :return: The tree of the created checkout
1356
1406
        """
1357
1407
        t = transport.get_transport(to_location)
1369
1419
                pass
1370
1420
            else:
1371
1421
                raise errors.AlreadyControlDirError(t.base)
1372
 
            if (checkout.control_transport.base
1373
 
                    == self.controldir.control_transport.base):
 
1422
            if checkout.control_transport.base == self.controldir.control_transport.base:
1374
1423
                # When checking out to the same control directory,
1375
1424
                # always create a lightweight checkout
1376
1425
                lightweight = True
1379
1428
            from_branch = checkout.set_branch_reference(target_branch=self)
1380
1429
        else:
1381
1430
            policy = checkout.determine_repository_policy()
1382
 
            policy.acquire_repository()
 
1431
            repo = policy.acquire_repository()[0]
1383
1432
            checkout_branch = checkout.create_branch()
1384
1433
            checkout_branch.bind(self)
1385
1434
            # pull up to the specified revision_id to set the initial
1391
1440
                                           accelerator_tree=accelerator_tree,
1392
1441
                                           hardlink=hardlink)
1393
1442
        basis_tree = tree.basis_tree()
1394
 
        with basis_tree.lock_read():
1395
 
            for path in basis_tree.iter_references():
1396
 
                reference_parent = tree.reference_parent(path)
1397
 
                if reference_parent is None:
1398
 
                    warning('Branch location for %s unknown.', path)
1399
 
                    continue
1400
 
                reference_parent.create_checkout(
1401
 
                    tree.abspath(path),
1402
 
                    basis_tree.get_reference_revision(path), lightweight)
 
1443
        basis_tree.lock_read()
 
1444
        try:
 
1445
            for path, file_id in basis_tree.iter_references():
 
1446
                reference_parent = self.reference_parent(file_id, path)
 
1447
                reference_parent.create_checkout(tree.abspath(path),
 
1448
                    basis_tree.get_reference_revision(file_id, path),
 
1449
                    lightweight)
 
1450
        finally:
 
1451
            basis_tree.unlock()
1403
1452
        return tree
1404
1453
 
 
1454
    @needs_write_lock
1405
1455
    def reconcile(self, thorough=True):
1406
 
        """Make sure the data stored in this branch is consistent.
1407
 
 
1408
 
        :return: A `ReconcileResult` object.
 
1456
        """Make sure the data stored in this branch is consistent."""
 
1457
        from breezy.reconcile import BranchReconciler
 
1458
        reconciler = BranchReconciler(self, thorough=thorough)
 
1459
        reconciler.reconcile()
 
1460
        return reconciler
 
1461
 
 
1462
    def reference_parent(self, file_id, path, possible_transports=None):
 
1463
        """Return the parent branch for a tree-reference file_id
 
1464
 
 
1465
        :param file_id: The file_id of the tree reference
 
1466
        :param path: The path of the file_id in the tree
 
1467
        :return: A branch associated with the file_id
1409
1468
        """
1410
 
        raise NotImplementedError(self.reconcile)
 
1469
        # FIXME should provide multiple branches, based on config
 
1470
        return Branch.open(self.controldir.root_transport.clone(path).base,
 
1471
                           possible_transports=possible_transports)
1411
1472
 
1412
1473
    def supports_tags(self):
1413
1474
        return self._format.supports_tags()
1481
1542
        if_present_fetch.discard(_mod_revision.NULL_REVISION)
1482
1543
        return must_fetch, if_present_fetch
1483
1544
 
1484
 
    def create_memorytree(self):
1485
 
        """Create a memory tree for this branch.
1486
 
 
1487
 
        :return: An in-memory MutableTree instance
1488
 
        """
1489
 
        return memorytree.MemoryTree.create_on_branch(self)
1490
 
 
1491
1545
 
1492
1546
class BranchFormat(controldir.ControlComponentFormat):
1493
1547
    """An encapsulation of the initialization and open routines for a format.
1594
1648
        raise NotImplementedError(self.network_name)
1595
1649
 
1596
1650
    def open(self, controldir, name=None, _found=False, ignore_fallbacks=False,
1597
 
             found_repository=None, possible_transports=None):
 
1651
            found_repository=None, possible_transports=None):
1598
1652
        """Return the branch object for controldir.
1599
1653
 
1600
1654
        :param controldir: A ControlDir that contains a branch.
1616
1670
 
1617
1671
    def supports_leaving_lock(self):
1618
1672
        """True if this format supports leaving locks in place."""
1619
 
        return False  # by default
 
1673
        return False # by default
1620
1674
 
1621
1675
    def __str__(self):
1622
1676
        return self.get_format_description().rstrip()
1633
1687
        """True if tags can reference ghost revisions."""
1634
1688
        return True
1635
1689
 
1636
 
    def supports_store_uncommitted(self):
1637
 
        """True if uncommitted changes can be stored in this branch."""
1638
 
        return True
1639
 
 
1640
 
    def stores_revno(self):
1641
 
        """True if this branch format store revision numbers."""
1642
 
        return True
1643
 
 
1644
1690
 
1645
1691
class BranchHooks(Hooks):
1646
1692
    """A dictionary mapping hook name to a list of callables for branch hooks.
1656
1702
        notified.
1657
1703
        """
1658
1704
        Hooks.__init__(self, "breezy.branch", "Branch.hooks")
1659
 
        self.add_hook(
1660
 
            'open',
 
1705
        self.add_hook('open',
1661
1706
            "Called with the Branch object that has been opened after a "
1662
1707
            "branch is opened.", (1, 8))
1663
 
        self.add_hook(
1664
 
            'post_push',
 
1708
        self.add_hook('post_push',
1665
1709
            "Called after a push operation completes. post_push is called "
1666
 
            "with a breezy.branch.BranchPushResult object and only runs in "
1667
 
            "the bzr client.", (0, 15))
1668
 
        self.add_hook(
1669
 
            'post_pull',
 
1710
            "with a breezy.branch.BranchPushResult object and only runs in the "
 
1711
            "bzr client.", (0, 15))
 
1712
        self.add_hook('post_pull',
1670
1713
            "Called after a pull operation completes. post_pull is called "
1671
1714
            "with a breezy.branch.PullResult object and only runs in the "
1672
1715
            "bzr client.", (0, 15))
1673
 
        self.add_hook(
1674
 
            'pre_commit',
 
1716
        self.add_hook('pre_commit',
1675
1717
            "Called after a commit is calculated but before it is "
1676
1718
            "completed. pre_commit is called with (local, master, old_revno, "
1677
1719
            "old_revid, future_revno, future_revid, tree_delta, future_tree"
1680
1722
            "basis revision. hooks MUST NOT modify this delta. "
1681
1723
            " future_tree is an in-memory tree obtained from "
1682
1724
            "CommitBuilder.revision_tree() and hooks MUST NOT modify this "
1683
 
            "tree.", (0, 91))
1684
 
        self.add_hook(
1685
 
            'post_commit',
 
1725
            "tree.", (0,91))
 
1726
        self.add_hook('post_commit',
1686
1727
            "Called in the bzr client after a commit has completed. "
1687
1728
            "post_commit is called with (local, master, old_revno, old_revid, "
1688
1729
            "new_revno, new_revid). old_revid is NULL_REVISION for the first "
1689
1730
            "commit to a branch.", (0, 15))
1690
 
        self.add_hook(
1691
 
            'post_uncommit',
 
1731
        self.add_hook('post_uncommit',
1692
1732
            "Called in the bzr client after an uncommit completes. "
1693
1733
            "post_uncommit is called with (local, master, old_revno, "
1694
1734
            "old_revid, new_revno, new_revid) where local is the local branch "
1695
1735
            "or None, master is the target branch, and an empty branch "
1696
1736
            "receives new_revno of 0, new_revid of None.", (0, 15))
1697
 
        self.add_hook(
1698
 
            'pre_change_branch_tip',
 
1737
        self.add_hook('pre_change_branch_tip',
1699
1738
            "Called in bzr client and server before a change to the tip of a "
1700
1739
            "branch is made. pre_change_branch_tip is called with a "
1701
1740
            "breezy.branch.ChangeBranchTipParams. Note that push, pull, "
1702
1741
            "commit, uncommit will all trigger this hook.", (1, 6))
1703
 
        self.add_hook(
1704
 
            'post_change_branch_tip',
 
1742
        self.add_hook('post_change_branch_tip',
1705
1743
            "Called in bzr client and server after a change to the tip of a "
1706
1744
            "branch is made. post_change_branch_tip is called with a "
1707
1745
            "breezy.branch.ChangeBranchTipParams. Note that push, pull, "
1708
1746
            "commit, uncommit will all trigger this hook.", (1, 4))
1709
 
        self.add_hook(
1710
 
            'transform_fallback_location',
 
1747
        self.add_hook('transform_fallback_location',
1711
1748
            "Called when a stacked branch is activating its fallback "
1712
1749
            "locations. transform_fallback_location is called with (branch, "
1713
1750
            "url), and should return a new url. Returning the same url "
1719
1756
            "multiple hooks installed for transform_fallback_location, "
1720
1757
            "all are called with the url returned from the previous hook."
1721
1758
            "The order is however undefined.", (1, 9))
1722
 
        self.add_hook(
1723
 
            'automatic_tag_name',
 
1759
        self.add_hook('automatic_tag_name',
1724
1760
            "Called to determine an automatic tag name for a revision. "
1725
1761
            "automatic_tag_name is called with (branch, revision_id) and "
1726
1762
            "should return a tag name or None if no tag name could be "
1727
1763
            "determined. The first non-None tag name returned will be used.",
1728
1764
            (2, 2))
1729
 
        self.add_hook(
1730
 
            'post_branch_init',
 
1765
        self.add_hook('post_branch_init',
1731
1766
            "Called after new branch initialization completes. "
1732
1767
            "post_branch_init is called with a "
1733
1768
            "breezy.branch.BranchInitHookParams. "
1734
1769
            "Note that init, branch and checkout (both heavyweight and "
1735
1770
            "lightweight) will all trigger this hook.", (2, 2))
1736
 
        self.add_hook(
1737
 
            'post_switch',
 
1771
        self.add_hook('post_switch',
1738
1772
            "Called after a checkout switches branch. "
1739
1773
            "post_switch is called with a "
1740
1774
            "breezy.branch.SwitchHookParams.", (2, 2))
1741
1775
 
1742
1776
 
 
1777
 
1743
1778
# install the default hooks into the Branch class.
1744
1779
Branch.hooks = BranchHooks()
1745
1780
 
1850
1885
        return self.__dict__ == other.__dict__
1851
1886
 
1852
1887
    def __repr__(self):
1853
 
        return "<%s for %s to (%s, %s)>" % (
1854
 
            self.__class__.__name__, self.control_dir, self.to_branch,
 
1888
        return "<%s for %s to (%s, %s)>" % (self.__class__.__name__,
 
1889
            self.control_dir, self.to_branch,
1855
1890
            self.revision_id)
1856
1891
 
1857
1892
 
1865
1900
 
1866
1901
    def get_default(self):
1867
1902
        """Return the current default format."""
1868
 
        if (self._default_format_key is not None
1869
 
                and self._default_format is None):
 
1903
        if (self._default_format_key is not None and
 
1904
            self._default_format is None):
1870
1905
            self._default_format = self.get(self._default_format_key)
1871
1906
        return self._default_format
1872
1907
 
1895
1930
# formats which have no format string are not discoverable
1896
1931
# and not independently creatable, so are not registered.
1897
1932
format_registry.register_lazy(
1898
 
    b"Bazaar-NG branch format 5\n", "breezy.bzr.fullhistory",
 
1933
    "Bazaar-NG branch format 5\n", "breezy.bzr.fullhistory",
1899
1934
    "BzrBranchFormat5")
1900
1935
format_registry.register_lazy(
1901
 
    b"Bazaar Branch Format 6 (bzr 0.15)\n",
 
1936
    "Bazaar Branch Format 6 (bzr 0.15)\n",
1902
1937
    "breezy.bzr.branch", "BzrBranchFormat6")
1903
1938
format_registry.register_lazy(
1904
 
    b"Bazaar Branch Format 7 (needs bzr 1.6)\n",
 
1939
    "Bazaar Branch Format 7 (needs bzr 1.6)\n",
1905
1940
    "breezy.bzr.branch", "BzrBranchFormat7")
1906
1941
format_registry.register_lazy(
1907
 
    b"Bazaar Branch Format 8 (needs bzr 1.15)\n",
 
1942
    "Bazaar Branch Format 8 (needs bzr 1.15)\n",
1908
1943
    "breezy.bzr.branch", "BzrBranchFormat8")
1909
1944
format_registry.register_lazy(
1910
 
    b"Bazaar-NG Branch Reference Format 1\n",
 
1945
    "Bazaar-NG Branch Reference Format 1\n",
1911
1946
    "breezy.bzr.branch", "BranchReferenceFormat")
1912
1947
 
1913
 
format_registry.set_default_key(b"Bazaar Branch Format 7 (needs bzr 1.6)\n")
 
1948
format_registry.set_default_key("Bazaar Branch Format 7 (needs bzr 1.6)\n")
1914
1949
 
1915
1950
 
1916
1951
class BranchWriteLockResult(LogicalLockResult):
1917
1952
    """The result of write locking a branch.
1918
1953
 
1919
 
    :ivar token: The token obtained from the underlying branch lock, or
 
1954
    :ivar branch_token: The token obtained from the underlying branch lock, or
1920
1955
        None.
1921
1956
    :ivar unlock: A callable which will unlock the lock.
1922
1957
    """
1923
1958
 
 
1959
    def __init__(self, unlock, branch_token):
 
1960
        LogicalLockResult.__init__(self, unlock)
 
1961
        self.branch_token = branch_token
 
1962
 
1924
1963
    def __repr__(self):
1925
 
        return "BranchWriteLockResult(%r, %r)" % (self.unlock, self.token)
 
1964
        return "BranchWriteLockResult(%s, %s)" % (self.branch_token,
 
1965
            self.unlock)
1926
1966
 
1927
1967
 
1928
1968
######################################################################
1999
2039
        tag_updates = getattr(self, "tag_updates", None)
2000
2040
        if not is_quiet():
2001
2041
            if self.old_revid != self.new_revid:
2002
 
                if self.new_revno is not None:
2003
 
                    note(gettext('Pushed up to revision %d.'),
2004
 
                         self.new_revno)
2005
 
                else:
2006
 
                    note(gettext('Pushed up to revision id %s.'),
2007
 
                         self.new_revid.decode('utf-8'))
 
2042
                note(gettext('Pushed up to revision %d.') % self.new_revno)
2008
2043
            if tag_updates:
2009
 
                note(ngettext('%d tag updated.', '%d tags updated.',
2010
 
                              len(tag_updates)) % len(tag_updates))
 
2044
                note(ngettext('%d tag updated.', '%d tags updated.', len(tag_updates)) % len(tag_updates))
2011
2045
            if self.old_revid == self.new_revid and not tag_updates:
2012
2046
                if not tag_conflicts:
2013
2047
                    note(gettext('No new revisions or tags to push.'))
2033
2067
            if any.
2034
2068
        """
2035
2069
        note(gettext('checked branch {0} format {1}').format(
2036
 
            self.branch.user_url, self.branch._format))
 
2070
                                self.branch.user_url, self.branch._format))
2037
2071
        for error in self.errors:
2038
2072
            note(gettext('found error:%s'), error)
2039
2073
 
2052
2086
    @classmethod
2053
2087
    def _get_branch_formats_to_test(klass):
2054
2088
        """Return an iterable of format tuples for testing.
2055
 
 
 
2089
        
2056
2090
        :return: An iterable of (from_format, to_format) to use when testing
2057
2091
            this InterBranch class. Each InterBranch class should define this
2058
2092
            method itself.
2059
2093
        """
2060
2094
        raise NotImplementedError(klass._get_branch_formats_to_test)
2061
2095
 
 
2096
    @needs_write_lock
2062
2097
    def pull(self, overwrite=False, stop_revision=None,
2063
 
             possible_transports=None, local=False, tag_selector=None):
 
2098
             possible_transports=None, local=False):
2064
2099
        """Mirror source into target branch.
2065
2100
 
2066
2101
        The target branch is considered to be 'local', having low latency.
2069
2104
        """
2070
2105
        raise NotImplementedError(self.pull)
2071
2106
 
 
2107
    @needs_write_lock
2072
2108
    def push(self, overwrite=False, stop_revision=None, lossy=False,
2073
 
             _override_hook_source_branch=None, tag_selector=None):
 
2109
             _override_hook_source_branch=None):
2074
2110
        """Mirror the source branch into the target branch.
2075
2111
 
2076
2112
        The source branch is considered to be 'local', having low latency.
2077
2113
        """
2078
2114
        raise NotImplementedError(self.push)
2079
2115
 
2080
 
    def copy_content_into(self, revision_id=None, tag_selector=None):
 
2116
    @needs_write_lock
 
2117
    def copy_content_into(self, revision_id=None):
2081
2118
        """Copy the content of source into target
2082
2119
 
2083
 
        :param revision_id:
2084
 
            if not None, the revision history in the new branch will
2085
 
            be truncated to end with revision_id.
2086
 
        :param tag_selector: Optional callback that can decide
2087
 
            to copy or not copy tags.
 
2120
        revision_id: if not None, the revision history in the new branch will
 
2121
                     be truncated to end with revision_id.
2088
2122
        """
2089
2123
        raise NotImplementedError(self.copy_content_into)
2090
2124
 
2091
 
    def fetch(self, stop_revision=None, limit=None, lossy=False):
 
2125
    @needs_write_lock
 
2126
    def fetch(self, stop_revision=None, limit=None):
2092
2127
        """Fetch revisions.
2093
2128
 
2094
2129
        :param stop_revision: Last revision to fetch
2095
2130
        :param limit: Optional rough limit of revisions to fetch
2096
 
        :return: FetchResult object
2097
2131
        """
2098
2132
        raise NotImplementedError(self.fetch)
2099
2133
 
2100
 
    def update_references(self):
2101
 
        """Import reference information from source to target.
2102
 
        """
2103
 
        raise NotImplementedError(self.update_references)
2104
 
 
2105
2134
 
2106
2135
def _fix_overwrite_type(overwrite):
2107
2136
    if isinstance(overwrite, bool):
2131
2160
            return format._custom_format
2132
2161
        return format
2133
2162
 
2134
 
    def copy_content_into(self, revision_id=None, tag_selector=None):
 
2163
    @needs_write_lock
 
2164
    def copy_content_into(self, revision_id=None):
2135
2165
        """Copy the content of source into target
2136
2166
 
2137
2167
        revision_id: if not None, the revision history in the new branch will
2138
2168
                     be truncated to end with revision_id.
2139
2169
        """
2140
 
        with self.source.lock_read(), self.target.lock_write():
2141
 
            self.source._synchronize_history(self.target, revision_id)
2142
 
            self.update_references()
2143
 
            try:
2144
 
                parent = self.source.get_parent()
2145
 
            except errors.InaccessibleParent as e:
2146
 
                mutter('parent was not accessible to copy: %s', str(e))
2147
 
            else:
2148
 
                if parent:
2149
 
                    self.target.set_parent(parent)
2150
 
            if self.source._push_should_merge_tags():
2151
 
                self.source.tags.merge_to(self.target.tags, selector=tag_selector)
 
2170
        self.source.update_references(self.target)
 
2171
        self.source._synchronize_history(self.target, revision_id)
 
2172
        try:
 
2173
            parent = self.source.get_parent()
 
2174
        except errors.InaccessibleParent as e:
 
2175
            mutter('parent was not accessible to copy: %s', e)
 
2176
        else:
 
2177
            if parent:
 
2178
                self.target.set_parent(parent)
 
2179
        if self.source._push_should_merge_tags():
 
2180
            self.source.tags.merge_to(self.target.tags)
2152
2181
 
2153
 
    def fetch(self, stop_revision=None, limit=None, lossy=False):
 
2182
    @needs_write_lock
 
2183
    def fetch(self, stop_revision=None, limit=None):
2154
2184
        if self.target.base == self.source.base:
2155
2185
            return (0, [])
2156
 
        with self.source.lock_read(), self.target.lock_write():
 
2186
        self.source.lock_read()
 
2187
        try:
2157
2188
            fetch_spec_factory = fetch.FetchSpecFactory()
2158
2189
            fetch_spec_factory.source_branch = self.source
2159
2190
            fetch_spec_factory.source_branch_stop_revision_id = stop_revision
2160
2191
            fetch_spec_factory.source_repo = self.source.repository
2161
2192
            fetch_spec_factory.target_repo = self.target.repository
2162
 
            fetch_spec_factory.target_repo_kind = (
2163
 
                fetch.TargetRepoKinds.PREEXISTING)
 
2193
            fetch_spec_factory.target_repo_kind = fetch.TargetRepoKinds.PREEXISTING
2164
2194
            fetch_spec_factory.limit = limit
2165
2195
            fetch_spec = fetch_spec_factory.make_fetch_spec()
2166
 
            return self.target.repository.fetch(
2167
 
                self.source.repository,
2168
 
                lossy=lossy,
 
2196
            return self.target.repository.fetch(self.source.repository,
2169
2197
                fetch_spec=fetch_spec)
 
2198
        finally:
 
2199
            self.source.unlock()
2170
2200
 
 
2201
    @needs_write_lock
2171
2202
    def _update_revisions(self, stop_revision=None, overwrite=False,
2172
 
                          graph=None):
2173
 
        with self.source.lock_read(), self.target.lock_write():
2174
 
            other_revno, other_last_revision = self.source.last_revision_info()
2175
 
            stop_revno = None  # unknown
2176
 
            if stop_revision is None:
2177
 
                stop_revision = other_last_revision
2178
 
                if _mod_revision.is_null(stop_revision):
2179
 
                    # if there are no commits, we're done.
2180
 
                    return
2181
 
                stop_revno = other_revno
 
2203
            graph=None):
 
2204
        other_revno, other_last_revision = self.source.last_revision_info()
 
2205
        stop_revno = None # unknown
 
2206
        if stop_revision is None:
 
2207
            stop_revision = other_last_revision
 
2208
            if _mod_revision.is_null(stop_revision):
 
2209
                # if there are no commits, we're done.
 
2210
                return
 
2211
            stop_revno = other_revno
2182
2212
 
2183
 
            # what's the current last revision, before we fetch [and change it
2184
 
            # possibly]
2185
 
            last_rev = _mod_revision.ensure_null(self.target.last_revision())
2186
 
            # we fetch here so that we don't process data twice in the common
2187
 
            # case of having something to pull, and so that the check for
2188
 
            # already merged can operate on the just fetched graph, which will
2189
 
            # be cached in memory.
2190
 
            self.fetch(stop_revision=stop_revision)
2191
 
            # Check to see if one is an ancestor of the other
2192
 
            if not overwrite:
2193
 
                if graph is None:
2194
 
                    graph = self.target.repository.get_graph()
2195
 
                if self.target._check_if_descendant_or_diverged(
2196
 
                        stop_revision, last_rev, graph, self.source):
2197
 
                    # stop_revision is a descendant of last_rev, but we aren't
2198
 
                    # overwriting, so we're done.
2199
 
                    return
2200
 
            if stop_revno is None:
2201
 
                if graph is None:
2202
 
                    graph = self.target.repository.get_graph()
2203
 
                this_revno, this_last_revision = \
 
2213
        # what's the current last revision, before we fetch [and change it
 
2214
        # possibly]
 
2215
        last_rev = _mod_revision.ensure_null(self.target.last_revision())
 
2216
        # we fetch here so that we don't process data twice in the common
 
2217
        # case of having something to pull, and so that the check for
 
2218
        # already merged can operate on the just fetched graph, which will
 
2219
        # be cached in memory.
 
2220
        self.fetch(stop_revision=stop_revision)
 
2221
        # Check to see if one is an ancestor of the other
 
2222
        if not overwrite:
 
2223
            if graph is None:
 
2224
                graph = self.target.repository.get_graph()
 
2225
            if self.target._check_if_descendant_or_diverged(
 
2226
                    stop_revision, last_rev, graph, self.source):
 
2227
                # stop_revision is a descendant of last_rev, but we aren't
 
2228
                # overwriting, so we're done.
 
2229
                return
 
2230
        if stop_revno is None:
 
2231
            if graph is None:
 
2232
                graph = self.target.repository.get_graph()
 
2233
            this_revno, this_last_revision = \
2204
2234
                    self.target.last_revision_info()
2205
 
                stop_revno = graph.find_distance_to_null(
2206
 
                    stop_revision, [(other_last_revision, other_revno),
2207
 
                                    (this_last_revision, this_revno)])
2208
 
            self.target.set_last_revision_info(stop_revno, stop_revision)
 
2235
            stop_revno = graph.find_distance_to_null(stop_revision,
 
2236
                            [(other_last_revision, other_revno),
 
2237
                             (this_last_revision, this_revno)])
 
2238
        self.target.set_last_revision_info(stop_revno, stop_revision)
2209
2239
 
 
2240
    @needs_write_lock
2210
2241
    def pull(self, overwrite=False, stop_revision=None,
2211
2242
             possible_transports=None, run_hooks=True,
2212
 
             _override_hook_target=None, local=False,
2213
 
             tag_selector=None):
 
2243
             _override_hook_target=None, local=False):
2214
2244
        """Pull from source into self, updating my master if any.
2215
2245
 
2216
2246
        :param run_hooks: Private parameter - if false, this branch
2217
2247
            is being called because it's the master of the primary branch,
2218
2248
            so it should not run its hooks.
2219
2249
        """
2220
 
        with contextlib.ExitStack() as exit_stack:
2221
 
            exit_stack.enter_context(self.target.lock_write())
2222
 
            bound_location = self.target.get_bound_location()
2223
 
            if local and not bound_location:
2224
 
                raise errors.LocalRequiresBoundBranch()
2225
 
            master_branch = None
2226
 
            source_is_master = False
2227
 
            if bound_location:
2228
 
                # bound_location comes from a config file, some care has to be
2229
 
                # taken to relate it to source.user_url
2230
 
                normalized = urlutils.normalize_url(bound_location)
2231
 
                try:
2232
 
                    relpath = self.source.user_transport.relpath(normalized)
2233
 
                    source_is_master = (relpath == '')
2234
 
                except (errors.PathNotChild, urlutils.InvalidURL):
2235
 
                    source_is_master = False
2236
 
            if not local and bound_location and not source_is_master:
2237
 
                # not pulling from master, so we need to update master.
2238
 
                master_branch = self.target.get_master_branch(
2239
 
                    possible_transports)
2240
 
                exit_stack.enter_context(master_branch.lock_write())
 
2250
        bound_location = self.target.get_bound_location()
 
2251
        if local and not bound_location:
 
2252
            raise errors.LocalRequiresBoundBranch()
 
2253
        master_branch = None
 
2254
        source_is_master = False
 
2255
        if bound_location:
 
2256
            # bound_location comes from a config file, some care has to be
 
2257
            # taken to relate it to source.user_url
 
2258
            normalized = urlutils.normalize_url(bound_location)
 
2259
            try:
 
2260
                relpath = self.source.user_transport.relpath(normalized)
 
2261
                source_is_master = (relpath == '')
 
2262
            except (errors.PathNotChild, urlutils.InvalidURL):
 
2263
                source_is_master = False
 
2264
        if not local and bound_location and not source_is_master:
 
2265
            # not pulling from master, so we need to update master.
 
2266
            master_branch = self.target.get_master_branch(possible_transports)
 
2267
            master_branch.lock_write()
 
2268
        try:
2241
2269
            if master_branch:
2242
2270
                # pull from source into master.
2243
 
                master_branch.pull(
2244
 
                    self.source, overwrite, stop_revision, run_hooks=False,
2245
 
                    tag_selector=tag_selector)
2246
 
            return self._pull(
2247
 
                overwrite, stop_revision, _hook_master=master_branch,
 
2271
                master_branch.pull(self.source, overwrite, stop_revision,
 
2272
                    run_hooks=False)
 
2273
            return self._pull(overwrite,
 
2274
                stop_revision, _hook_master=master_branch,
2248
2275
                run_hooks=run_hooks,
2249
2276
                _override_hook_target=_override_hook_target,
2250
 
                merge_tags_to_master=not source_is_master,
2251
 
                tag_selector=tag_selector)
 
2277
                merge_tags_to_master=not source_is_master)
 
2278
        finally:
 
2279
            if master_branch:
 
2280
                master_branch.unlock()
2252
2281
 
2253
2282
    def push(self, overwrite=False, stop_revision=None, lossy=False,
2254
 
             _override_hook_source_branch=None, tag_selector=None):
 
2283
             _override_hook_source_branch=None):
2255
2284
        """See InterBranch.push.
2256
2285
 
2257
2286
        This is the basic concrete implementation of push()
2266
2295
        # TODO: Public option to disable running hooks - should be trivial but
2267
2296
        # needs tests.
2268
2297
 
2269
 
        def _run_hooks():
2270
 
            if _override_hook_source_branch:
2271
 
                result.source_branch = _override_hook_source_branch
2272
 
            for hook in Branch.hooks['post_push']:
2273
 
                hook(result)
2274
 
 
2275
 
        with self.source.lock_read(), self.target.lock_write():
2276
 
            bound_location = self.target.get_bound_location()
2277
 
            if bound_location and self.target.base != bound_location:
2278
 
                # there is a master branch.
2279
 
                #
2280
 
                # XXX: Why the second check?  Is it even supported for a branch
2281
 
                # to be bound to itself? -- mbp 20070507
2282
 
                master_branch = self.target.get_master_branch()
2283
 
                with master_branch.lock_write():
2284
 
                    # push into the master from the source branch.
2285
 
                    master_inter = InterBranch.get(self.source, master_branch)
2286
 
                    master_inter._basic_push(
2287
 
                        overwrite, stop_revision, tag_selector=tag_selector)
2288
 
                    # and push into the target branch from the source. Note
2289
 
                    # that we push from the source branch again, because it's
2290
 
                    # considered the highest bandwidth repository.
2291
 
                    result = self._basic_push(
2292
 
                        overwrite, stop_revision, tag_selector=tag_selector)
2293
 
                    result.master_branch = master_branch
2294
 
                    result.local_branch = self.target
2295
 
                    _run_hooks()
2296
 
            else:
2297
 
                master_branch = None
2298
 
                # no master branch
2299
 
                result = self._basic_push(
2300
 
                    overwrite, stop_revision, tag_selector=tag_selector)
2301
 
                # TODO: Why set master_branch and local_branch if there's no
2302
 
                # binding?  Maybe cleaner to just leave them unset? -- mbp
2303
 
                # 20070504
2304
 
                result.master_branch = self.target
2305
 
                result.local_branch = None
2306
 
                _run_hooks()
2307
 
            return result
2308
 
 
2309
 
    def _basic_push(self, overwrite, stop_revision, tag_selector=None):
 
2298
        op = cleanup.OperationWithCleanups(self._push_with_bound_branches)
 
2299
        op.add_cleanup(self.source.lock_read().unlock)
 
2300
        op.add_cleanup(self.target.lock_write().unlock)
 
2301
        return op.run(overwrite, stop_revision,
 
2302
            _override_hook_source_branch=_override_hook_source_branch)
 
2303
 
 
2304
    def _basic_push(self, overwrite, stop_revision):
2310
2305
        """Basic implementation of push without bound branches or hooks.
2311
2306
 
2312
2307
        Must be called with source read locked and target write locked.
2315
2310
        result.source_branch = self.source
2316
2311
        result.target_branch = self.target
2317
2312
        result.old_revno, result.old_revid = self.target.last_revision_info()
 
2313
        self.source.update_references(self.target)
2318
2314
        overwrite = _fix_overwrite_type(overwrite)
2319
2315
        if result.old_revid != stop_revision:
2320
2316
            # We assume that during 'push' this repository is closer than
2321
2317
            # the target.
2322
2318
            graph = self.source.repository.get_graph(self.target.repository)
2323
 
            self._update_revisions(
2324
 
                stop_revision, overwrite=("history" in overwrite), graph=graph)
 
2319
            self._update_revisions(stop_revision,
 
2320
                overwrite=("history" in overwrite),
 
2321
                graph=graph)
2325
2322
        if self.source._push_should_merge_tags():
2326
2323
            result.tag_updates, result.tag_conflicts = (
2327
2324
                self.source.tags.merge_to(
2328
 
                    self.target.tags, "tags" in overwrite, selector=tag_selector))
2329
 
        self.update_references()
 
2325
                self.target.tags, "tags" in overwrite))
2330
2326
        result.new_revno, result.new_revid = self.target.last_revision_info()
2331
2327
        return result
2332
2328
 
 
2329
    def _push_with_bound_branches(self, operation, overwrite, stop_revision,
 
2330
            _override_hook_source_branch=None):
 
2331
        """Push from source into target, and into target's master if any.
 
2332
        """
 
2333
        def _run_hooks():
 
2334
            if _override_hook_source_branch:
 
2335
                result.source_branch = _override_hook_source_branch
 
2336
            for hook in Branch.hooks['post_push']:
 
2337
                hook(result)
 
2338
 
 
2339
        bound_location = self.target.get_bound_location()
 
2340
        if bound_location and self.target.base != bound_location:
 
2341
            # there is a master branch.
 
2342
            #
 
2343
            # XXX: Why the second check?  Is it even supported for a branch to
 
2344
            # be bound to itself? -- mbp 20070507
 
2345
            master_branch = self.target.get_master_branch()
 
2346
            master_branch.lock_write()
 
2347
            operation.add_cleanup(master_branch.unlock)
 
2348
            # push into the master from the source branch.
 
2349
            master_inter = InterBranch.get(self.source, master_branch)
 
2350
            master_inter._basic_push(overwrite, stop_revision)
 
2351
            # and push into the target branch from the source. Note that
 
2352
            # we push from the source branch again, because it's considered
 
2353
            # the highest bandwidth repository.
 
2354
            result = self._basic_push(overwrite, stop_revision)
 
2355
            result.master_branch = master_branch
 
2356
            result.local_branch = self.target
 
2357
        else:
 
2358
            master_branch = None
 
2359
            # no master branch
 
2360
            result = self._basic_push(overwrite, stop_revision)
 
2361
            # TODO: Why set master_branch and local_branch if there's no
 
2362
            # binding?  Maybe cleaner to just leave them unset? -- mbp
 
2363
            # 20070504
 
2364
            result.master_branch = self.target
 
2365
            result.local_branch = None
 
2366
        _run_hooks()
 
2367
        return result
 
2368
 
2333
2369
    def _pull(self, overwrite=False, stop_revision=None,
2334
 
              possible_transports=None, _hook_master=None, run_hooks=True,
2335
 
              _override_hook_target=None, local=False,
2336
 
              merge_tags_to_master=True, tag_selector=None):
 
2370
             possible_transports=None, _hook_master=None, run_hooks=True,
 
2371
             _override_hook_target=None, local=False,
 
2372
             merge_tags_to_master=True):
2337
2373
        """See Branch.pull.
2338
2374
 
2339
2375
        This function is the core worker, used by GenericInterBranch.pull to
2359
2395
            result.target_branch = self.target
2360
2396
        else:
2361
2397
            result.target_branch = _override_hook_target
2362
 
        with self.source.lock_read():
 
2398
        self.source.lock_read()
 
2399
        try:
2363
2400
            # We assume that during 'pull' the target repository is closer than
2364
2401
            # the source one.
 
2402
            self.source.update_references(self.target)
2365
2403
            graph = self.target.repository.get_graph(self.source.repository)
2366
 
            # TODO: Branch formats should have a flag that indicates
 
2404
            # TODO: Branch formats should have a flag that indicates 
2367
2405
            # that revno's are expensive, and pull() should honor that flag.
2368
2406
            # -- JRV20090506
2369
2407
            result.old_revno, result.old_revid = \
2370
2408
                self.target.last_revision_info()
2371
2409
            overwrite = _fix_overwrite_type(overwrite)
2372
 
            self._update_revisions(
2373
 
                stop_revision, overwrite=("history" in overwrite), graph=graph)
2374
 
            # TODO: The old revid should be specified when merging tags,
2375
 
            # so a tags implementation that versions tags can only
 
2410
            self._update_revisions(stop_revision,
 
2411
                overwrite=("history" in overwrite),
 
2412
                graph=graph)
 
2413
            # TODO: The old revid should be specified when merging tags, 
 
2414
            # so a tags implementation that versions tags can only 
2376
2415
            # pull in the most recent changes. -- JRV20090506
2377
2416
            result.tag_updates, result.tag_conflicts = (
2378
 
                self.source.tags.merge_to(
2379
 
                    self.target.tags, "tags" in overwrite,
2380
 
                    ignore_master=not merge_tags_to_master,
2381
 
                    selector=tag_selector))
2382
 
            self.update_references()
2383
 
            result.new_revno, result.new_revid = (
2384
 
                self.target.last_revision_info())
 
2417
                self.source.tags.merge_to(self.target.tags,
 
2418
                    "tags" in overwrite,
 
2419
                    ignore_master=not merge_tags_to_master))
 
2420
            result.new_revno, result.new_revid = self.target.last_revision_info()
2385
2421
            if _hook_master:
2386
2422
                result.master_branch = _hook_master
2387
2423
                result.local_branch = result.target_branch
2391
2427
            if run_hooks:
2392
2428
                for hook in Branch.hooks['post_pull']:
2393
2429
                    hook(result)
2394
 
            return result
2395
 
 
2396
 
    def update_references(self):
2397
 
        if not getattr(self.source._format, 'supports_reference_locations', False):
2398
 
            return
2399
 
        reference_dict = self.source._get_all_reference_info()
2400
 
        if len(reference_dict) == 0:
2401
 
            return
2402
 
        old_base = self.source.base
2403
 
        new_base = self.target.base
2404
 
        target_reference_dict = self.target._get_all_reference_info()
2405
 
        for tree_path, (branch_location, file_id) in reference_dict.items():
2406
 
            try:
2407
 
                branch_location = urlutils.rebase_url(branch_location,
2408
 
                                                      old_base, new_base)
2409
 
            except urlutils.InvalidRebaseURLs:
2410
 
                # Fall back to absolute URL
2411
 
                branch_location = urlutils.join(old_base, branch_location)
2412
 
            target_reference_dict.setdefault(
2413
 
                tree_path, (branch_location, file_id))
2414
 
        self.target._set_all_reference_info(target_reference_dict)
 
2430
        finally:
 
2431
            self.source.unlock()
 
2432
        return result
2415
2433
 
2416
2434
 
2417
2435
InterBranch.register_optimiser(GenericInterBranch)