1
# Copyright (C) 2005-2012 Canonical Ltd
1
# Copyright (C) 2005 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
from __future__ import absolute_import
19
from .lazy_import import lazy_import
20
lazy_import(globals(), """
24
config as _mod_config,
28
revision as _mod_revision,
34
from breezy.bzr import (
39
from breezy.i18n import gettext, ngettext
47
from .hooks import Hooks
48
from .inter import InterObject
49
from .lock import LogicalLockResult
54
from .trace import mutter, mutter_callsite, note, is_quiet, warning
57
class UnstackableBranchFormat(errors.BzrError):
59
_fmt = ("The branch '%(url)s'(%(format)s) is not a stackable format. "
60
"You will need to upgrade the branch to permit branch stacking.")
62
def __init__(self, format, url):
63
errors.BzrError.__init__(self)
68
class Branch(controldir.ControlComponent):
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22
from warnings import warn
23
from cStringIO import StringIO
27
from bzrlib.inventory import InventoryEntry
28
import bzrlib.inventory as inventory
29
from bzrlib.trace import mutter, note
30
from bzrlib.osutils import (isdir, quotefn, compact_date, rand_bytes,
31
rename, splitpath, sha_file, appendpath,
33
import bzrlib.errors as errors
34
from bzrlib.errors import (BzrError, InvalidRevisionNumber, InvalidRevisionId,
35
NoSuchRevision, HistoryMissing, NotBranchError,
36
DivergedBranches, LockError, UnlistableStore,
37
UnlistableBranch, NoSuchFile, NotVersionedError,
39
from bzrlib.textui import show_status
40
from bzrlib.revision import (Revision, is_ancestor, get_intervening_revisions,
43
from bzrlib.delta import compare_trees
44
from bzrlib.tree import EmptyTree, RevisionTree
45
from bzrlib.inventory import Inventory
46
from bzrlib.store import copy_all
47
from bzrlib.store.text import TextStore
48
from bzrlib.store.weave import WeaveStore
49
from bzrlib.testament import Testament
50
import bzrlib.transactions as transactions
51
from bzrlib.transport import Transport, get_transport
54
from config import TreeConfig
57
BZR_BRANCH_FORMAT_4 = "Bazaar-NG branch, format 0.0.4\n"
58
BZR_BRANCH_FORMAT_5 = "Bazaar-NG branch, format 5\n"
59
BZR_BRANCH_FORMAT_6 = "Bazaar-NG branch, format 6\n"
60
## TODO: Maybe include checks for common corruption of newlines, etc?
63
# TODO: Some operations like log might retrieve the same revisions
64
# repeatedly to calculate deltas. We could perhaps have a weakref
65
# cache in memory to make this faster. In general anything can be
66
# cached in memory between lock and unlock operations.
68
def find_branch(*ignored, **ignored_too):
69
# XXX: leave this here for about one release, then remove it
70
raise NotImplementedError('find_branch() is not supported anymore, '
71
'please use one of the new branch constructors')
74
def needs_read_lock(unbound):
75
"""Decorate unbound to take out and release a read lock."""
76
def decorated(self, *args, **kwargs):
79
return unbound(self, *args, **kwargs)
85
def needs_write_lock(unbound):
86
"""Decorate unbound to take out and release a write lock."""
87
def decorated(self, *args, **kwargs):
90
return unbound(self, *args, **kwargs)
95
######################################################################
69
99
"""Branch holding a history of revisions.
72
Base directory/url of the branch; using control_url and
73
control_transport is more standardized.
74
:ivar hooks: An instance of BranchHooks.
75
:ivar _master_branch_cache: cached result of get_master_branch, see
102
Base directory/url of the branch.
78
# this is really an instance variable - FIXME move it there
83
def control_transport(self):
84
return self._transport
87
def user_transport(self):
88
return self.controldir.user_transport
90
def __init__(self, possible_transports=None):
91
self.tags = self._format.make_tags(self)
92
self._revision_history_cache = None
93
self._revision_id_to_revno_cache = None
94
self._partial_revision_id_to_revno_cache = {}
95
self._partial_revision_history_cache = []
96
self._last_revision_info_cache = None
97
self._master_branch_cache = None
98
self._merge_sorted_revisions_cache = None
99
self._open_hook(possible_transports)
100
hooks = Branch.hooks['open']
104
def _open_hook(self, possible_transports):
105
"""Called by init to allow simpler extension of the base class."""
107
def _activate_fallback_location(self, url, possible_transports):
108
"""Activate the branch/repository from url as a fallback repository."""
109
for existing_fallback_repo in self.repository._fallback_repositories:
110
if existing_fallback_repo.user_url == url:
111
# This fallback is already configured. This probably only
112
# happens because ControlDir.sprout is a horrible mess. To
113
# avoid confusing _unstack we don't add this a second time.
114
mutter('duplicate activation of fallback %r on %r', url, self)
116
repo = self._get_fallback_repository(url, possible_transports)
117
if repo.has_same_location(self.repository):
118
raise errors.UnstackableLocationError(self.user_url, url)
119
self.repository.add_fallback_repository(repo)
121
def break_lock(self):
122
"""Break a lock if one is present from another instance.
124
Uses the ui factory to ask for confirmation if the lock may be from
127
This will probe the repository for its lock as well.
129
self.control_files.break_lock()
130
self.repository.break_lock()
131
master = self.get_master_branch()
132
if master is not None:
135
def _check_stackable_repo(self):
136
if not self.repository._format.supports_external_lookups:
137
raise errors.UnstackableRepositoryFormat(
138
self.repository._format, self.repository.base)
140
def _extend_partial_history(self, stop_index=None, stop_revision=None):
141
"""Extend the partial history to include a given index
143
If a stop_index is supplied, stop when that index has been reached.
144
If a stop_revision is supplied, stop when that revision is
145
encountered. Otherwise, stop when the beginning of history is
148
:param stop_index: The index which should be present. When it is
149
present, history extension will stop.
150
:param stop_revision: The revision id which should be present. When
151
it is encountered, history extension will stop.
153
if len(self._partial_revision_history_cache) == 0:
154
self._partial_revision_history_cache = [self.last_revision()]
155
repository._iter_for_revno(
156
self.repository, self._partial_revision_history_cache,
157
stop_index=stop_index, stop_revision=stop_revision)
158
if self._partial_revision_history_cache[-1] == \
159
_mod_revision.NULL_REVISION:
160
self._partial_revision_history_cache.pop()
162
def _get_check_refs(self):
163
"""Get the references needed for check().
167
revid = self.last_revision()
168
return [('revision-existence', revid), ('lefthand-distance', revid)]
171
def open(base, _unsupported=False, possible_transports=None):
172
"""Open the branch rooted at base.
174
For instance, if the branch is at URL/.bzr/branch,
175
Branch.open(URL) -> a Branch instance.
177
control = controldir.ControlDir.open(
178
base, possible_transports=possible_transports,
179
_unsupported=_unsupported)
180
return control.open_branch(
181
unsupported=_unsupported,
182
possible_transports=possible_transports)
185
def open_from_transport(transport, name=None, _unsupported=False,
186
possible_transports=None):
187
"""Open the branch rooted at transport"""
188
control = controldir.ControlDir.open_from_transport(
189
transport, _unsupported)
190
return control.open_branch(
191
name=name, unsupported=_unsupported,
192
possible_transports=possible_transports)
195
def open_containing(url, possible_transports=None):
106
def __init__(self, *ignored, **ignored_too):
107
raise NotImplementedError('The Branch class is abstract')
110
def open_downlevel(base):
111
"""Open a branch which may be of an old format.
113
Only local branches are supported."""
114
return BzrBranch(get_transport(base), relax_version_check=True)
118
"""Open an existing branch, rooted at 'base' (url)"""
119
t = get_transport(base)
120
mutter("trying to open %r with transport %r", base, t)
124
def open_containing(url):
196
125
"""Open an existing branch which contains url.
198
127
This probes for a branch at url, and searches upwards from there.
200
129
Basically we keep looking up until we find the control directory or
201
130
run into the root. If there isn't one, raises NotBranchError.
202
If there is one and it is either an unrecognised format or an
203
unsupported format, UnknownFormatError or UnsupportedFormatError are
204
raised. If there is one, it is returned, along with the unused portion
207
control, relpath = controldir.ControlDir.open_containing(
208
url, possible_transports)
209
branch = control.open_branch(possible_transports=possible_transports)
210
return (branch, relpath)
212
def _push_should_merge_tags(self):
213
"""Should _basic_push merge this branch's tags into the target?
215
The default implementation returns False if this branch has no tags,
216
and True the rest of the time. Subclasses may override this.
218
return self.supports_tags() and self.tags.get_tag_dict()
220
def get_config(self):
221
"""Get a breezy.config.BranchConfig for this Branch.
223
This can then be used to get and set configuration options for the
226
:return: A breezy.config.BranchConfig.
228
return _mod_config.BranchConfig(self)
230
def get_config_stack(self):
231
"""Get a breezy.config.BranchStack for this Branch.
233
This can then be used to get and set configuration options for the
236
:return: A breezy.config.BranchStack.
238
return _mod_config.BranchStack(self)
240
def store_uncommitted(self, creator):
241
"""Store uncommitted changes from a ShelfCreator.
243
:param creator: The ShelfCreator containing uncommitted changes, or
244
None to delete any stored changes.
245
:raises: ChangesAlreadyStored if the branch already has changes.
247
raise NotImplementedError(self.store_uncommitted)
249
def get_unshelver(self, tree):
250
"""Return a shelf.Unshelver for this branch and tree.
252
:param tree: The tree to use to construct the Unshelver.
253
:return: an Unshelver or None if no changes are stored.
255
raise NotImplementedError(self.get_unshelver)
257
def _get_fallback_repository(self, url, possible_transports):
258
"""Get the repository we fallback to at url."""
259
url = urlutils.join(self.base, url)
260
a_branch = Branch.open(url, possible_transports=possible_transports)
261
return a_branch.repository
263
def _get_nick(self, local=False, possible_transports=None):
264
config = self.get_config()
265
# explicit overrides master, but don't look for master if local is True
266
if not local and not config.has_explicit_nickname():
131
If there is one, it is returned, along with the unused portion of url.
133
t = get_transport(url)
268
master = self.get_master_branch(possible_transports)
269
if master and self.user_url == master.user_url:
270
raise errors.RecursiveBind(self.user_url)
271
if master is not None:
272
# return the master branch value
274
except errors.RecursiveBind as e:
276
except errors.BzrError as e:
277
# Silently fall back to local implicit nick if the master is
279
mutter("Could not connect to bound branch, "
280
"falling back to local nick.\n " + str(e))
281
return config.get_nickname()
136
return BzrBranch(t), t.relpath(url)
137
except NotBranchError:
139
new_t = t.clone('..')
140
if new_t.base == t.base:
141
# reached the root, whatever that may be
142
raise NotBranchError(path=url)
146
def initialize(base):
147
"""Create a new branch, rooted at 'base' (url)"""
148
t = get_transport(base)
149
return BzrBranch(t, init=True)
151
def setup_caching(self, cache_root):
152
"""Subclasses that care about caching should override this, and set
153
up cached stores located under cache_root.
155
self.cache_root = cache_root
158
cfg = self.tree_config()
159
return cfg.get_option(u"nickname", default=self.base.split('/')[-1])
283
161
def _set_nick(self, nick):
284
self.get_config().set_user_option('nickname', nick, warn_masked=True)
162
cfg = self.tree_config()
163
cfg.set_option(nick, "nickname")
164
assert cfg.get_option("nickname") == nick
286
166
nick = property(_get_nick, _set_nick)
289
raise NotImplementedError(self.is_locked)
291
def _lefthand_history(self, revision_id, last_rev=None,
293
if 'evil' in debug.debug_flags:
294
mutter_callsite(4, "_lefthand_history scales with history.")
295
# stop_revision must be a descendant of last_revision
296
graph = self.repository.get_graph()
297
if last_rev is not None:
298
if not graph.is_ancestor(last_rev, revision_id):
299
# our previous tip is not merged into stop_revision
300
raise errors.DivergedBranches(self, other_branch)
301
# make a new revision history from the graph
302
parents_map = graph.get_parent_map([revision_id])
303
if revision_id not in parents_map:
304
raise errors.NoSuchRevision(self, revision_id)
305
current_rev_id = revision_id
307
check_not_reserved_id = _mod_revision.check_not_reserved_id
308
# Do not include ghosts or graph origin in revision_history
309
while (current_rev_id in parents_map
310
and len(parents_map[current_rev_id]) > 0):
311
check_not_reserved_id(current_rev_id)
312
new_history.append(current_rev_id)
313
current_rev_id = parents_map[current_rev_id][0]
314
parents_map = graph.get_parent_map([current_rev_id])
315
new_history.reverse()
318
def lock_write(self, token=None):
319
"""Lock the branch for write operations.
321
:param token: A token to permit reacquiring a previously held and
323
:return: A BranchWriteLockResult.
168
def push_stores(self, branch_to):
169
"""Copy the content of this branches store to branch_to."""
170
raise NotImplementedError('push_stores is abstract')
172
def get_transaction(self):
173
"""Return the current active transaction.
175
If no transaction is active, this returns a passthrough object
176
for which all data is immediately flushed and no caching happens.
325
raise NotImplementedError(self.lock_write)
178
raise NotImplementedError('get_transaction is abstract')
180
def lock_write(self):
181
raise NotImplementedError('lock_write is abstract')
327
183
def lock_read(self):
328
"""Lock the branch for read operations.
330
:return: A breezy.lock.LogicalLockResult.
332
raise NotImplementedError(self.lock_read)
184
raise NotImplementedError('lock_read is abstract')
334
186
def unlock(self):
335
raise NotImplementedError(self.unlock)
337
def peek_lock_mode(self):
338
"""Return lock mode for the Branch: 'r', 'w' or None"""
339
raise NotImplementedError(self.peek_lock_mode)
341
def get_physical_lock_status(self):
342
raise NotImplementedError(self.get_physical_lock_status)
344
def dotted_revno_to_revision_id(self, revno, _cache_reverse=False):
345
"""Return the revision_id for a dotted revno.
347
:param revno: a tuple like (1,) or (1,1,2)
348
:param _cache_reverse: a private parameter enabling storage
349
of the reverse mapping in a top level cache. (This should
350
only be done in selective circumstances as we want to
351
avoid having the mapping cached multiple times.)
352
:return: the revision_id
353
:raises errors.NoSuchRevision: if the revno doesn't exist
355
with self.lock_read():
356
rev_id = self._do_dotted_revno_to_revision_id(revno)
358
self._partial_revision_id_to_revno_cache[rev_id] = revno
361
def _do_dotted_revno_to_revision_id(self, revno):
362
"""Worker function for dotted_revno_to_revision_id.
364
Subclasses should override this if they wish to
365
provide a more efficient implementation.
369
return self.get_rev_id(revno[0])
370
except errors.RevisionNotPresent as e:
371
raise errors.GhostRevisionsHaveNoRevno(revno[0], e.revision_id)
372
revision_id_to_revno = self.get_revision_id_to_revno_map()
373
revision_ids = [revision_id for revision_id, this_revno
374
in viewitems(revision_id_to_revno)
375
if revno == this_revno]
376
if len(revision_ids) == 1:
377
return revision_ids[0]
379
revno_str = '.'.join(map(str, revno))
380
raise errors.NoSuchRevision(self, revno_str)
382
def revision_id_to_dotted_revno(self, revision_id):
383
"""Given a revision id, return its dotted revno.
385
:return: a tuple like (1,) or (400,1,3).
387
with self.lock_read():
388
return self._do_revision_id_to_dotted_revno(revision_id)
390
def _do_revision_id_to_dotted_revno(self, revision_id):
391
"""Worker function for revision_id_to_revno."""
392
# Try the caches if they are loaded
393
result = self._partial_revision_id_to_revno_cache.get(revision_id)
394
if result is not None:
396
if self._revision_id_to_revno_cache:
397
result = self._revision_id_to_revno_cache.get(revision_id)
399
raise errors.NoSuchRevision(self, revision_id)
400
# Try the mainline as it's optimised
402
revno = self.revision_id_to_revno(revision_id)
404
except errors.NoSuchRevision:
405
# We need to load and use the full revno map after all
406
result = self.get_revision_id_to_revno_map().get(revision_id)
408
raise errors.NoSuchRevision(self, revision_id)
411
def get_revision_id_to_revno_map(self):
412
"""Return the revision_id => dotted revno map.
414
This will be regenerated on demand, but will be cached.
416
:return: A dictionary mapping revision_id => dotted revno.
417
This dictionary should not be modified by the caller.
419
if 'evil' in debug.debug_flags:
421
3, "get_revision_id_to_revno_map scales with ancestry.")
422
with self.lock_read():
423
if self._revision_id_to_revno_cache is not None:
424
mapping = self._revision_id_to_revno_cache
426
mapping = self._gen_revno_map()
427
self._cache_revision_id_to_revno(mapping)
428
# TODO: jam 20070417 Since this is being cached, should we be
430
# I would rather not, and instead just declare that users should
431
# not modify the return value.
434
def _gen_revno_map(self):
435
"""Create a new mapping from revision ids to dotted revnos.
437
Dotted revnos are generated based on the current tip in the revision
439
This is the worker function for get_revision_id_to_revno_map, which
440
just caches the return value.
442
:return: A dictionary mapping revision_id => dotted revno.
444
revision_id_to_revno = {
445
rev_id: revno for rev_id, depth, revno, end_of_merge
446
in self.iter_merge_sorted_revisions()}
447
return revision_id_to_revno
449
def iter_merge_sorted_revisions(self, start_revision_id=None,
450
stop_revision_id=None,
451
stop_rule='exclude', direction='reverse'):
452
"""Walk the revisions for a branch in merge sorted order.
454
Merge sorted order is the output from a merge-aware,
455
topological sort, i.e. all parents come before their
456
children going forward; the opposite for reverse.
458
:param start_revision_id: the revision_id to begin walking from.
459
If None, the branch tip is used.
460
:param stop_revision_id: the revision_id to terminate the walk
461
after. If None, the rest of history is included.
462
:param stop_rule: if stop_revision_id is not None, the precise rule
463
to use for termination:
465
* 'exclude' - leave the stop revision out of the result (default)
466
* 'include' - the stop revision is the last item in the result
467
* 'with-merges' - include the stop revision and all of its
468
merged revisions in the result
469
* 'with-merges-without-common-ancestry' - filter out revisions
470
that are in both ancestries
471
:param direction: either 'reverse' or 'forward':
473
* reverse means return the start_revision_id first, i.e.
474
start at the most recent revision and go backwards in history
475
* forward returns tuples in the opposite order to reverse.
476
Note in particular that forward does *not* do any intelligent
477
ordering w.r.t. depth as some clients of this API may like.
478
(If required, that ought to be done at higher layers.)
480
:return: an iterator over (revision_id, depth, revno, end_of_merge)
483
* revision_id: the unique id of the revision
484
* depth: How many levels of merging deep this node has been
486
* revno_sequence: This field provides a sequence of
487
revision numbers for all revisions. The format is:
488
(REVNO, BRANCHNUM, BRANCHREVNO). BRANCHNUM is the number of the
489
branch that the revno is on. From left to right the REVNO numbers
490
are the sequence numbers within that branch of the revision.
491
* end_of_merge: When True the next node (earlier in history) is
492
part of a different merge.
494
with self.lock_read():
495
# Note: depth and revno values are in the context of the branch so
496
# we need the full graph to get stable numbers, regardless of the
498
if self._merge_sorted_revisions_cache is None:
499
last_revision = self.last_revision()
500
known_graph = self.repository.get_known_graph_ancestry(
502
self._merge_sorted_revisions_cache = known_graph.merge_sort(
504
filtered = self._filter_merge_sorted_revisions(
505
self._merge_sorted_revisions_cache, start_revision_id,
506
stop_revision_id, stop_rule)
507
# Make sure we don't return revisions that are not part of the
508
# start_revision_id ancestry.
509
filtered = self._filter_start_non_ancestors(filtered)
510
if direction == 'reverse':
512
if direction == 'forward':
513
return reversed(list(filtered))
515
raise ValueError('invalid direction %r' % direction)
517
def _filter_merge_sorted_revisions(self, merge_sorted_revisions,
518
start_revision_id, stop_revision_id,
520
"""Iterate over an inclusive range of sorted revisions."""
521
rev_iter = iter(merge_sorted_revisions)
522
if start_revision_id is not None:
523
for node in rev_iter:
525
if rev_id != start_revision_id:
528
# The decision to include the start or not
529
# depends on the stop_rule if a stop is provided
530
# so pop this node back into the iterator
531
rev_iter = itertools.chain(iter([node]), rev_iter)
533
if stop_revision_id is None:
535
for node in rev_iter:
537
yield (rev_id, node.merge_depth, node.revno,
539
elif stop_rule == 'exclude':
540
for node in rev_iter:
542
if rev_id == stop_revision_id:
544
yield (rev_id, node.merge_depth, node.revno,
546
elif stop_rule == 'include':
547
for node in rev_iter:
549
yield (rev_id, node.merge_depth, node.revno,
551
if rev_id == stop_revision_id:
553
elif stop_rule == 'with-merges-without-common-ancestry':
554
# We want to exclude all revisions that are already part of the
555
# stop_revision_id ancestry.
556
graph = self.repository.get_graph()
557
ancestors = graph.find_unique_ancestors(start_revision_id,
559
for node in rev_iter:
561
if rev_id not in ancestors:
563
yield (rev_id, node.merge_depth, node.revno,
565
elif stop_rule == 'with-merges':
566
stop_rev = self.repository.get_revision(stop_revision_id)
567
if stop_rev.parent_ids:
568
left_parent = stop_rev.parent_ids[0]
570
left_parent = _mod_revision.NULL_REVISION
571
# left_parent is the actual revision we want to stop logging at,
572
# since we want to show the merged revisions after the stop_rev too
573
reached_stop_revision_id = False
574
revision_id_whitelist = []
575
for node in rev_iter:
577
if rev_id == left_parent:
578
# reached the left parent after the stop_revision
580
if (not reached_stop_revision_id
581
or rev_id in revision_id_whitelist):
582
yield (rev_id, node.merge_depth, node.revno,
584
if reached_stop_revision_id or rev_id == stop_revision_id:
585
# only do the merged revs of rev_id from now on
586
rev = self.repository.get_revision(rev_id)
588
reached_stop_revision_id = True
589
revision_id_whitelist.extend(rev.parent_ids)
591
raise ValueError('invalid stop_rule %r' % stop_rule)
593
def _filter_start_non_ancestors(self, rev_iter):
594
# If we started from a dotted revno, we want to consider it as a tip
595
# and don't want to yield revisions that are not part of its
596
# ancestry. Given the order guaranteed by the merge sort, we will see
597
# uninteresting descendants of the first parent of our tip before the
600
first = next(rev_iter)
601
except StopIteration:
603
(rev_id, merge_depth, revno, end_of_merge) = first
606
# We start at a mainline revision so by definition, all others
607
# revisions in rev_iter are ancestors
608
for node in rev_iter:
613
pmap = self.repository.get_parent_map([rev_id])
614
parents = pmap.get(rev_id, [])
616
whitelist.update(parents)
618
# If there is no parents, there is nothing of interest left
620
# FIXME: It's hard to test this scenario here as this code is never
621
# called in that case. -- vila 20100322
624
for (rev_id, merge_depth, revno, end_of_merge) in rev_iter:
626
if rev_id in whitelist:
627
pmap = self.repository.get_parent_map([rev_id])
628
parents = pmap.get(rev_id, [])
629
whitelist.remove(rev_id)
630
whitelist.update(parents)
632
# We've reached the mainline, there is nothing left to
636
# A revision that is not part of the ancestry of our
639
yield (rev_id, merge_depth, revno, end_of_merge)
641
def leave_lock_in_place(self):
642
"""Tell this branch object not to release the physical lock when this
645
If lock_write doesn't return a token, then this method is not
648
self.control_files.leave_in_place()
650
def dont_leave_lock_in_place(self):
651
"""Tell this branch object to release the physical lock when this
652
object is unlocked, even if it didn't originally acquire it.
654
If lock_write doesn't return a token, then this method is not
657
self.control_files.dont_leave_in_place()
659
def bind(self, other):
660
"""Bind the local branch the other branch.
662
:param other: The branch to bind to
665
raise errors.UpgradeRequired(self.user_url)
667
def get_append_revisions_only(self):
668
"""Whether it is only possible to append revisions to the history.
670
if not self._format.supports_set_append_revisions_only():
672
return self.get_config_stack().get('append_revisions_only')
674
def set_append_revisions_only(self, enabled):
675
if not self._format.supports_set_append_revisions_only():
676
raise errors.UpgradeRequired(self.user_url)
677
self.get_config_stack().set('append_revisions_only', enabled)
679
def fetch(self, from_branch, stop_revision=None, limit=None, lossy=False):
680
"""Copy revisions from from_branch into this branch.
682
:param from_branch: Where to copy from.
683
:param stop_revision: What revision to stop at (None for at the end
685
:param limit: Optional rough limit of revisions to fetch
688
with self.lock_write():
689
return InterBranch.get(from_branch, self).fetch(
690
stop_revision, limit=limit, lossy=lossy)
692
def get_bound_location(self):
693
"""Return the URL of the branch we are bound to.
695
Older format branches cannot bind, please be sure to use a metadir
700
def get_old_bound_location(self):
701
"""Return the URL of the branch we used to be bound to
703
raise errors.UpgradeRequired(self.user_url)
705
def get_commit_builder(self, parents, config_stack=None, timestamp=None,
706
timezone=None, committer=None, revprops=None,
707
revision_id=None, lossy=False):
708
"""Obtain a CommitBuilder for this branch.
710
:param parents: Revision ids of the parents of the new revision.
711
:param config: Optional configuration to use.
712
:param timestamp: Optional timestamp recorded for commit.
713
:param timezone: Optional timezone for timestamp.
714
:param committer: Optional committer to set for commit.
715
:param revprops: Optional dictionary of revision properties.
716
:param revision_id: Optional revision id.
717
:param lossy: Whether to discard data that can not be natively
718
represented, when pushing to a foreign VCS
721
if config_stack is None:
722
config_stack = self.get_config_stack()
724
return self.repository.get_commit_builder(
725
self, parents, config_stack, timestamp, timezone, committer,
726
revprops, revision_id, lossy)
728
def get_master_branch(self, possible_transports=None):
729
"""Return the branch we are bound to.
731
:return: Either a Branch, or None
735
def get_stacked_on_url(self):
736
"""Get the URL this branch is stacked against.
738
:raises NotStacked: If the branch is not stacked.
739
:raises UnstackableBranchFormat: If the branch does not support
742
raise NotImplementedError(self.get_stacked_on_url)
744
def set_last_revision_info(self, revno, revision_id):
745
"""Set the last revision of this branch.
747
The caller is responsible for checking that the revno is correct
748
for this revision id.
750
It may be possible to set the branch last revision to an id not
751
present in the repository. However, branches can also be
752
configured to check constraints on history, in which case this may not
755
raise NotImplementedError(self.set_last_revision_info)
757
def generate_revision_history(self, revision_id, last_rev=None,
759
"""See Branch.generate_revision_history"""
760
with self.lock_write():
761
graph = self.repository.get_graph()
762
(last_revno, last_revid) = self.last_revision_info()
763
known_revision_ids = [
764
(last_revid, last_revno),
765
(_mod_revision.NULL_REVISION, 0),
767
if last_rev is not None:
768
if not graph.is_ancestor(last_rev, revision_id):
769
# our previous tip is not merged into stop_revision
770
raise errors.DivergedBranches(self, other_branch)
771
revno = graph.find_distance_to_null(
772
revision_id, known_revision_ids)
773
self.set_last_revision_info(revno, revision_id)
775
def set_parent(self, url):
776
"""See Branch.set_parent."""
777
# TODO: Maybe delete old location files?
778
# URLs should never be unicode, even on the local fs,
779
# FIXUP this and get_parent in a future branch format bump:
780
# read and rewrite the file. RBC 20060125
782
if isinstance(url, text_type):
785
except UnicodeEncodeError:
786
raise urlutils.InvalidURL(
787
url, "Urls must be 7-bit ascii, "
788
"use breezy.urlutils.escape")
789
url = urlutils.relative_url(self.base, url)
790
with self.lock_write():
791
self._set_parent_location(url)
793
def set_stacked_on_url(self, url):
794
"""Set the URL this branch is stacked against.
796
:raises UnstackableBranchFormat: If the branch does not support
798
:raises UnstackableRepositoryFormat: If the repository does not support
801
if not self._format.supports_stacking():
802
raise UnstackableBranchFormat(self._format, self.user_url)
803
with self.lock_write():
804
# XXX: Changing from one fallback repository to another does not
805
# check that all the data you need is present in the new fallback.
806
# Possibly it should.
807
self._check_stackable_repo()
810
self.get_stacked_on_url()
811
except (errors.NotStacked, UnstackableBranchFormat,
812
errors.UnstackableRepositoryFormat):
816
self._activate_fallback_location(
817
url, possible_transports=[self.controldir.root_transport])
818
# write this out after the repository is stacked to avoid setting a
819
# stacked config that doesn't work.
820
self._set_config_location('stacked_on_location', url)
823
"""Change a branch to be unstacked, copying data as needed.
825
Don't call this directly, use set_stacked_on_url(None).
827
with ui.ui_factory.nested_progress_bar() as pb:
828
pb.update(gettext("Unstacking"))
829
# The basic approach here is to fetch the tip of the branch,
830
# including all available ghosts, from the existing stacked
831
# repository into a new repository object without the fallbacks.
833
# XXX: See <https://launchpad.net/bugs/397286> - this may not be
834
# correct for CHKMap repostiories
835
old_repository = self.repository
836
if len(old_repository._fallback_repositories) != 1:
837
raise AssertionError(
838
"can't cope with fallback repositories "
839
"of %r (fallbacks: %r)" % (
840
old_repository, old_repository._fallback_repositories))
841
# Open the new repository object.
842
# Repositories don't offer an interface to remove fallback
843
# repositories today; take the conceptually simpler option and just
844
# reopen it. We reopen it starting from the URL so that we
845
# get a separate connection for RemoteRepositories and can
846
# stream from one of them to the other. This does mean doing
847
# separate SSH connection setup, but unstacking is not a
848
# common operation so it's tolerable.
849
new_bzrdir = controldir.ControlDir.open(
850
self.controldir.root_transport.base)
851
new_repository = new_bzrdir.find_repository()
852
if new_repository._fallback_repositories:
853
raise AssertionError(
854
"didn't expect %r to have fallback_repositories"
855
% (self.repository,))
856
# Replace self.repository with the new repository.
857
# Do our best to transfer the lock state (i.e. lock-tokens and
858
# lock count) of self.repository to the new repository.
859
lock_token = old_repository.lock_write().repository_token
860
self.repository = new_repository
861
if isinstance(self, remote.RemoteBranch):
862
# Remote branches can have a second reference to the old
863
# repository that need to be replaced.
864
if self._real_branch is not None:
865
self._real_branch.repository = new_repository
866
self.repository.lock_write(token=lock_token)
867
if lock_token is not None:
868
old_repository.leave_lock_in_place()
869
old_repository.unlock()
870
if lock_token is not None:
871
# XXX: self.repository.leave_lock_in_place() before this
872
# function will not be preserved. Fortunately that doesn't
873
# affect the current default format (2a), and would be a
874
# corner-case anyway.
875
# - Andrew Bennetts, 2010/06/30
876
self.repository.dont_leave_lock_in_place()
880
old_repository.unlock()
881
except errors.LockNotHeld:
884
if old_lock_count == 0:
885
raise AssertionError(
886
'old_repository should have been locked at least once.')
887
for i in range(old_lock_count - 1):
888
self.repository.lock_write()
889
# Fetch from the old repository into the new.
890
with old_repository.lock_read():
891
# XXX: If you unstack a branch while it has a working tree
892
# with a pending merge, the pending-merged revisions will no
893
# longer be present. You can (probably) revert and remerge.
895
tags_to_fetch = set(self.tags.get_reverse_tag_dict())
896
except errors.TagsNotSupported:
897
tags_to_fetch = set()
898
fetch_spec = vf_search.NotInOtherForRevs(
899
self.repository, old_repository,
900
required_ids=[self.last_revision()],
901
if_present_ids=tags_to_fetch, find_ghosts=True).execute()
902
self.repository.fetch(old_repository, fetch_spec=fetch_spec)
904
def _cache_revision_history(self, rev_history):
905
"""Set the cached revision history to rev_history.
907
The revision_history method will use this cache to avoid regenerating
908
the revision history.
910
This API is semi-public; it only for use by subclasses, all other code
911
should consider it to be private.
913
self._revision_history_cache = rev_history
915
def _cache_revision_id_to_revno(self, revision_id_to_revno):
916
"""Set the cached revision_id => revno map to revision_id_to_revno.
918
This API is semi-public; it only for use by subclasses, all other code
919
should consider it to be private.
921
self._revision_id_to_revno_cache = revision_id_to_revno
923
def _clear_cached_state(self):
924
"""Clear any cached data on this branch, e.g. cached revision history.
926
This means the next call to revision_history will need to call
927
_gen_revision_history.
929
This API is semi-public; it is only for use by subclasses, all other
930
code should consider it to be private.
932
self._revision_history_cache = None
933
self._revision_id_to_revno_cache = None
934
self._last_revision_info_cache = None
935
self._master_branch_cache = None
936
self._merge_sorted_revisions_cache = None
937
self._partial_revision_history_cache = []
938
self._partial_revision_id_to_revno_cache = {}
940
def _gen_revision_history(self):
941
"""Return sequence of revision hashes on to this branch.
943
Unlike revision_history, this method always regenerates or rereads the
944
revision history, i.e. it does not cache the result, so repeated calls
947
Concrete subclasses should override this instead of revision_history so
948
that subclasses do not need to deal with caching logic.
950
This API is semi-public; it only for use by subclasses, all other code
951
should consider it to be private.
953
raise NotImplementedError(self._gen_revision_history)
955
def _revision_history(self):
956
if 'evil' in debug.debug_flags:
957
mutter_callsite(3, "revision_history scales with history.")
958
if self._revision_history_cache is not None:
959
history = self._revision_history_cache
961
history = self._gen_revision_history()
962
self._cache_revision_history(history)
187
raise NotImplementedError('unlock is abstract')
189
def abspath(self, name):
190
"""Return absolute filename for something in the branch
192
XXX: Robert Collins 20051017 what is this used for? why is it a branch
193
method and not a tree method.
195
raise NotImplementedError('abspath is abstract')
197
def controlfilename(self, file_or_path):
198
"""Return location relative to branch."""
199
raise NotImplementedError('controlfilename is abstract')
201
def controlfile(self, file_or_path, mode='r'):
202
"""Open a control file for this branch.
204
There are two classes of file in the control directory: text
205
and binary. binary files are untranslated byte streams. Text
206
control files are stored with Unix newlines and in UTF-8, even
207
if the platform or locale defaults are different.
209
Controlfiles should almost never be opened in write mode but
210
rather should be atomically copied and replaced using atomicfile.
212
raise NotImplementedError('controlfile is abstract')
214
def put_controlfile(self, path, f, encode=True):
215
"""Write an entry as a controlfile.
217
:param path: The path to put the file, relative to the .bzr control
219
:param f: A file-like or string object whose contents should be copied.
220
:param encode: If true, encode the contents as utf-8
222
raise NotImplementedError('put_controlfile is abstract')
224
def put_controlfiles(self, files, encode=True):
225
"""Write several entries as controlfiles.
227
:param files: A list of [(path, file)] pairs, where the path is the directory
228
underneath the bzr control directory
229
:param encode: If true, encode the contents as utf-8
231
raise NotImplementedError('put_controlfiles is abstract')
233
def get_root_id(self):
234
"""Return the id of this branches root"""
235
raise NotImplementedError('get_root_id is abstract')
237
def set_root_id(self, file_id):
238
raise NotImplementedError('set_root_id is abstract')
240
def add(self, files, ids=None):
241
"""Make files versioned.
243
Note that the command line normally calls smart_add instead,
244
which can automatically recurse.
246
This puts the files in the Added state, so that they will be
247
recorded by the next commit.
250
List of paths to add, relative to the base of the tree.
253
If set, use these instead of automatically generated ids.
254
Must be the same length as the list of files, but may
255
contain None for ids that are to be autogenerated.
257
TODO: Perhaps have an option to add the ids even if the files do
260
TODO: Perhaps yield the ids and paths as they're added.
262
raise NotImplementedError('add is abstract')
264
def print_file(self, file, revno):
265
"""Print `file` to stdout."""
266
raise NotImplementedError('print_file is abstract')
269
"""Return all unknown files.
271
These are files in the working directory that are not versioned or
272
control files or ignored.
274
>>> from bzrlib.workingtree import WorkingTree
275
>>> b = ScratchBranch(files=['foo', 'foo~'])
276
>>> map(str, b.unknowns())
279
>>> list(b.unknowns())
281
>>> WorkingTree(b.base, b).remove('foo')
282
>>> list(b.unknowns())
285
raise NotImplementedError('unknowns is abstract')
287
def append_revision(self, *revision_ids):
288
raise NotImplementedError('append_revision is abstract')
290
def set_revision_history(self, rev_history):
291
raise NotImplementedError('set_revision_history is abstract')
293
def has_revision(self, revision_id):
294
"""True if this branch has a copy of the revision.
296
This does not necessarily imply the revision is merge
297
or on the mainline."""
298
raise NotImplementedError('has_revision is abstract')
300
def get_revision_xml_file(self, revision_id):
301
"""Return XML file object for revision object."""
302
raise NotImplementedError('get_revision_xml_file is abstract')
304
def get_revision_xml(self, revision_id):
305
raise NotImplementedError('get_revision_xml is abstract')
307
def get_revision(self, revision_id):
308
"""Return the Revision object for a named revision"""
309
raise NotImplementedError('get_revision is abstract')
311
def get_revision_delta(self, revno):
312
"""Return the delta for one revision.
314
The delta is relative to its mainline predecessor, or the
315
empty tree for revision 1.
317
assert isinstance(revno, int)
318
rh = self.revision_history()
319
if not (1 <= revno <= len(rh)):
320
raise InvalidRevisionNumber(revno)
322
# revno is 1-based; list is 0-based
324
new_tree = self.revision_tree(rh[revno-1])
326
old_tree = EmptyTree()
328
old_tree = self.revision_tree(rh[revno-2])
330
return compare_trees(old_tree, new_tree)
332
def get_revision_sha1(self, revision_id):
333
"""Hash the stored value of a revision, and return it."""
334
raise NotImplementedError('get_revision_sha1 is abstract')
336
def get_ancestry(self, revision_id):
337
"""Return a list of revision-ids integrated by a revision.
339
This currently returns a list, but the ordering is not guaranteed:
342
raise NotImplementedError('get_ancestry is abstract')
344
def get_inventory(self, revision_id):
345
"""Get Inventory object by hash."""
346
raise NotImplementedError('get_inventory is abstract')
348
def get_inventory_xml(self, revision_id):
349
"""Get inventory XML as a file object."""
350
raise NotImplementedError('get_inventory_xml is abstract')
352
def get_inventory_sha1(self, revision_id):
353
"""Return the sha1 hash of the inventory entry."""
354
raise NotImplementedError('get_inventory_sha1 is abstract')
356
def get_revision_inventory(self, revision_id):
357
"""Return inventory of a past revision."""
358
raise NotImplementedError('get_revision_inventory is abstract')
360
def revision_history(self):
361
"""Return sequence of revision hashes on to this branch."""
362
raise NotImplementedError('revision_history is abstract')
966
365
"""Return current revision number for this branch.
1187
554
self.check_real_revno(revno)
1189
556
def check_real_revno(self, revno):
1191
558
Check whether a revno corresponds to a real revision.
1192
559
Zero (the NULL revision) is considered invalid
1194
561
if revno < 1 or revno > self.revno():
1195
raise errors.InvalidRevisionNumber(revno)
1197
def clone(self, to_controldir, revision_id=None, name=None,
1198
repository_policy=None, tag_selector=None):
1199
"""Clone this branch into to_controldir preserving all semantic values.
1201
Most API users will want 'create_clone_on_transport', which creates a
1202
new bzrdir and branch on the fly.
1204
revision_id: if not None, the revision history in the new branch will
1205
be truncated to end with revision_id.
1207
result = to_controldir.create_branch(name=name)
1208
with self.lock_read(), result.lock_write():
1209
if repository_policy is not None:
1210
repository_policy.configure_branch(result)
1211
self.copy_content_into(
1212
result, revision_id=revision_id, tag_selector=tag_selector)
1215
def sprout(self, to_controldir, revision_id=None, repository_policy=None,
1216
repository=None, lossy=False, tag_selector=None):
1217
"""Create a new line of development from the branch, into to_controldir.
1219
to_controldir controls the branch format.
1221
revision_id: if not None, the revision history in the new branch will
1222
be truncated to end with revision_id.
1224
if (repository_policy is not None
1225
and repository_policy.requires_stacking()):
1226
to_controldir._format.require_stacking(_skip_repo=True)
1227
result = to_controldir.create_branch(repository=repository)
1229
raise errors.LossyPushToSameVCS(self, result)
1230
with self.lock_read(), result.lock_write():
1231
if repository_policy is not None:
1232
repository_policy.configure_branch(result)
1233
self.copy_content_into(
1234
result, revision_id=revision_id, tag_selector=tag_selector)
1235
master_url = self.get_bound_location()
1236
if master_url is None:
1237
result.set_parent(self.user_url)
1239
result.set_parent(master_url)
1242
def _synchronize_history(self, destination, revision_id):
1243
"""Synchronize last revision and revision history between branches.
1245
This version is most efficient when the destination is also a
1246
BzrBranch6, but works for BzrBranch5, as long as the destination's
1247
repository contains all the lefthand ancestors of the intended
1248
last_revision. If not, set_last_revision_info will fail.
1250
:param destination: The branch to copy the history into
1251
:param revision_id: The revision-id to truncate history at. May
1252
be None to copy complete history.
1254
source_revno, source_revision_id = self.last_revision_info()
1255
if revision_id is None:
1256
revno, revision_id = source_revno, source_revision_id
1258
graph = self.repository.get_graph()
1260
revno = graph.find_distance_to_null(
1261
revision_id, [(source_revision_id, source_revno)])
1262
except errors.GhostRevisionsHaveNoRevno:
1263
# Default to 1, if we can't find anything else
1265
destination.set_last_revision_info(revno, revision_id)
1267
def copy_content_into(self, destination, revision_id=None, tag_selector=None):
1268
"""Copy the content of self into destination.
1270
revision_id: if not None, the revision history in the new branch will
1271
be truncated to end with revision_id.
1272
tag_selector: Optional callback that receives a tag name
1273
and should return a boolean to indicate whether a tag should be copied
1275
return InterBranch.get(self, destination).copy_content_into(
1276
revision_id=revision_id, tag_selector=tag_selector)
1278
def update_references(self, target):
1279
if not self._format.supports_reference_locations:
562
raise InvalidRevisionNumber(revno)
564
def sign_revision(self, revision_id, gpg_strategy):
565
raise NotImplementedError('sign_revision is abstract')
567
def store_revision_signature(self, gpg_strategy, plaintext, revision_id):
568
raise NotImplementedError('store_revision_signature is abstract')
570
class BzrBranch(Branch):
571
"""A branch stored in the actual filesystem.
573
Note that it's "local" in the context of the filesystem; it doesn't
574
really matter if it's on an nfs/smb/afs/coda/... share, as long as
575
it's writable, and can be accessed via the normal filesystem API.
581
If _lock_mode is true, a positive count of the number of times the
585
Lock object from bzrlib.lock.
587
# We actually expect this class to be somewhat short-lived; part of its
588
# purpose is to try to isolate what bits of the branch logic are tied to
589
# filesystem access, so that in a later step, we can extricate them to
590
# a separarte ("storage") class.
594
_inventory_weave = None
596
# Map some sort of prefix into a namespace
597
# stuff like "revno:10", "revid:", etc.
598
# This should match a prefix with a function which accepts
599
REVISION_NAMESPACES = {}
601
def push_stores(self, branch_to):
602
"""See Branch.push_stores."""
603
if (self._branch_format != branch_to._branch_format
604
or self._branch_format != 4):
605
from bzrlib.fetch import greedy_fetch
606
mutter("falling back to fetch logic to push between %s(%s) and %s(%s)",
607
self, self._branch_format, branch_to, branch_to._branch_format)
608
greedy_fetch(to_branch=branch_to, from_branch=self,
609
revision=self.last_revision())
1281
return InterBranch.get(self, target).update_references()
1283
def check(self, refs):
1284
"""Check consistency of the branch.
1286
In particular this checks that revisions given in the revision-history
1287
do actually match up in the revision graph, and that they're all
1288
present in the repository.
1290
Callers will typically also want to check the repository.
1292
:param refs: Calculated refs for this branch as specified by
1293
branch._get_check_refs()
1294
:return: A BranchCheckResult.
1296
with self.lock_read():
1297
result = BranchCheckResult(self)
1298
last_revno, last_revision_id = self.last_revision_info()
1299
actual_revno = refs[('lefthand-distance', last_revision_id)]
1300
if actual_revno != last_revno:
1301
result.errors.append(errors.BzrCheckError(
1302
'revno does not match len(mainline) %s != %s' % (
1303
last_revno, actual_revno)))
1304
# TODO: We should probably also check that self.revision_history
1305
# matches the repository for older branch formats.
1306
# If looking for the code that cross-checks repository parents
1307
# against the Graph.iter_lefthand_ancestry output, that is now a
1308
# repository specific check.
1311
def _get_checkout_format(self, lightweight=False):
1312
"""Return the most suitable metadir for a checkout of this branch.
1313
Weaves are used if this branch's repository uses weaves.
1315
format = self.repository.controldir.checkout_metadir()
1316
format.set_branch_format(self._format)
1319
def create_clone_on_transport(self, to_transport, revision_id=None,
1320
stacked_on=None, create_prefix=False,
1321
use_existing_dir=False, no_tree=None,
1323
"""Create a clone of this branch and its bzrdir.
1325
:param to_transport: The transport to clone onto.
1326
:param revision_id: The revision id to use as tip in the new branch.
1327
If None the tip is obtained from this branch.
1328
:param stacked_on: An optional URL to stack the clone on.
1329
:param create_prefix: Create any missing directories leading up to
1331
:param use_existing_dir: Use an existing directory if one exists.
1333
# XXX: Fix the bzrdir API to allow getting the branch back from the
1334
# clone call. Or something. 20090224 RBC/spiv.
1335
# XXX: Should this perhaps clone colocated branches as well,
1336
# rather than just the default branch? 20100319 JRV
1337
if revision_id is None:
1338
revision_id = self.last_revision()
1339
dir_to = self.controldir.clone_on_transport(
1340
to_transport, revision_id=revision_id, stacked_on=stacked_on,
1341
create_prefix=create_prefix, use_existing_dir=use_existing_dir,
1342
no_tree=no_tree, tag_selector=tag_selector)
1343
return dir_to.open_branch()
1345
def create_checkout(self, to_location, revision_id=None,
1346
lightweight=False, accelerator_tree=None,
1347
hardlink=False, recurse_nested=True):
1348
"""Create a checkout of a branch.
1350
:param to_location: The url to produce the checkout at
1351
:param revision_id: The revision to check out
1352
:param lightweight: If True, produce a lightweight checkout, otherwise,
1353
produce a bound branch (heavyweight checkout)
1354
:param accelerator_tree: A tree which can be used for retrieving file
1355
contents more quickly than the revision tree, i.e. a workingtree.
1356
The revision tree will be used for cases where accelerator_tree's
1357
content is different.
1358
:param hardlink: If true, hard-link files from accelerator_tree,
1360
:param recurse_nested: Whether to recurse into nested trees
1361
:return: The tree of the created checkout
1363
t = transport.get_transport(to_location)
1365
format = self._get_checkout_format(lightweight=lightweight)
612
store_pairs = ((self.text_store, branch_to.text_store),
613
(self.inventory_store, branch_to.inventory_store),
614
(self.revision_store, branch_to.revision_store))
1367
checkout = format.initialize_on_transport(t)
1368
except errors.AlreadyControlDirError:
1369
# It's fine if the control directory already exists,
1370
# as long as there is no existing branch and working tree.
1371
checkout = controldir.ControlDir.open_from_transport(t)
616
for from_store, to_store in store_pairs:
617
copy_all(from_store, to_store)
618
except UnlistableStore:
619
raise UnlistableBranch(from_store)
621
def __init__(self, transport, init=False,
622
relax_version_check=False):
623
"""Create new branch object at a particular location.
625
transport -- A Transport object, defining how to access files.
627
init -- If True, create new control files in a previously
628
unversioned directory. If False, the branch must already
631
relax_version_check -- If true, the usual check for the branch
632
version is not applied. This is intended only for
633
upgrade/recovery type use; it's not guaranteed that
634
all operations will work on old format branches.
636
In the test suite, creation of new trees is tested using the
637
`ScratchBranch` class.
639
assert isinstance(transport, Transport), \
640
"%r is not a Transport" % transport
641
self._transport = transport
644
self._check_format(relax_version_check)
646
def get_store(name, compressed=True, prefixed=False):
647
# FIXME: This approach of assuming stores are all entirely compressed
648
# or entirely uncompressed is tidy, but breaks upgrade from
649
# some existing branches where there's a mixture; we probably
650
# still want the option to look for both.
651
relpath = self._rel_controlfilename(name)
652
store = TextStore(self._transport.clone(relpath),
654
compressed=compressed)
655
#if self._transport.should_cache():
656
# cache_path = os.path.join(self.cache_root, name)
657
# os.mkdir(cache_path)
658
# store = bzrlib.store.CachedStore(store, cache_path)
660
def get_weave(name, prefixed=False):
661
relpath = self._rel_controlfilename(name)
662
ws = WeaveStore(self._transport.clone(relpath), prefixed=prefixed)
663
if self._transport.should_cache():
664
ws.enable_cache = True
667
if self._branch_format == 4:
668
self.inventory_store = get_store('inventory-store')
669
self.text_store = get_store('text-store')
670
self.revision_store = get_store('revision-store')
671
elif self._branch_format == 5:
672
self.control_weaves = get_weave('')
673
self.weave_store = get_weave('weaves')
674
self.revision_store = get_store('revision-store', compressed=False)
675
elif self._branch_format == 6:
676
self.control_weaves = get_weave('')
677
self.weave_store = get_weave('weaves', prefixed=True)
678
self.revision_store = get_store('revision-store', compressed=False,
680
self.revision_store.register_suffix('sig')
681
self._transaction = None
684
return '%s(%r)' % (self.__class__.__name__, self._transport.base)
689
if self._lock_mode or self._lock:
690
# XXX: This should show something every time, and be suitable for
691
# headless operation and embedding
692
warn("branch %r was not explicitly unlocked" % self)
695
# TODO: It might be best to do this somewhere else,
696
# but it is nice for a Branch object to automatically
697
# cache it's information.
698
# Alternatively, we could have the Transport objects cache requests
699
# See the earlier discussion about how major objects (like Branch)
700
# should never expect their __del__ function to run.
701
if hasattr(self, 'cache_root') and self.cache_root is not None:
1373
checkout.open_branch()
1374
except errors.NotBranchError:
703
shutil.rmtree(self.cache_root)
706
self.cache_root = None
710
return self._transport.base
713
base = property(_get_base, doc="The URL for the root of this branch.")
715
def _finish_transaction(self):
716
"""Exit the current transaction."""
717
if self._transaction is None:
718
raise errors.LockError('Branch %s is not in a transaction' %
720
transaction = self._transaction
721
self._transaction = None
724
def get_transaction(self):
725
"""See Branch.get_transaction."""
726
if self._transaction is None:
727
return transactions.PassThroughTransaction()
729
return self._transaction
731
def _set_transaction(self, new_transaction):
732
"""Set a new active transaction."""
733
if self._transaction is not None:
734
raise errors.LockError('Branch %s is in a transaction already.' %
736
self._transaction = new_transaction
738
def lock_write(self):
739
mutter("lock write: %s (%s)", self, self._lock_count)
740
# TODO: Upgrade locking to support using a Transport,
741
# and potentially a remote locking protocol
743
if self._lock_mode != 'w':
744
raise LockError("can't upgrade to a write lock from %r" %
746
self._lock_count += 1
748
self._lock = self._transport.lock_write(
749
self._rel_controlfilename('branch-lock'))
750
self._lock_mode = 'w'
752
self._set_transaction(transactions.PassThroughTransaction())
755
mutter("lock read: %s (%s)", self, self._lock_count)
757
assert self._lock_mode in ('r', 'w'), \
758
"invalid lock mode %r" % self._lock_mode
759
self._lock_count += 1
761
self._lock = self._transport.lock_read(
762
self._rel_controlfilename('branch-lock'))
763
self._lock_mode = 'r'
765
self._set_transaction(transactions.ReadOnlyTransaction())
766
# 5K may be excessive, but hey, its a knob.
767
self.get_transaction().set_cache_size(5000)
770
mutter("unlock: %s (%s)", self, self._lock_count)
771
if not self._lock_mode:
772
raise LockError('branch %r is not locked' % (self))
774
if self._lock_count > 1:
775
self._lock_count -= 1
777
self._finish_transaction()
780
self._lock_mode = self._lock_count = None
782
def abspath(self, name):
783
"""See Branch.abspath."""
784
return self._transport.abspath(name)
786
def _rel_controlfilename(self, file_or_path):
787
if not isinstance(file_or_path, basestring):
788
file_or_path = '/'.join(file_or_path)
789
if file_or_path == '':
791
return bzrlib.transport.urlescape(bzrlib.BZRDIR + '/' + file_or_path)
793
def controlfilename(self, file_or_path):
794
"""See Branch.controlfilename."""
795
return self._transport.abspath(self._rel_controlfilename(file_or_path))
797
def controlfile(self, file_or_path, mode='r'):
798
"""See Branch.controlfile."""
801
relpath = self._rel_controlfilename(file_or_path)
802
#TODO: codecs.open() buffers linewise, so it was overloaded with
803
# a much larger buffer, do we need to do the same for getreader/getwriter?
805
return self._transport.get(relpath)
807
raise BzrError("Branch.controlfile(mode='wb') is not supported, use put_controlfiles")
809
# XXX: Do we really want errors='replace'? Perhaps it should be
810
# an error, or at least reported, if there's incorrectly-encoded
811
# data inside a file.
812
# <https://launchpad.net/products/bzr/+bug/3823>
813
return codecs.getreader('utf-8')(self._transport.get(relpath), errors='replace')
815
raise BzrError("Branch.controlfile(mode='w') is not supported, use put_controlfiles")
817
raise BzrError("invalid controlfile mode %r" % mode)
819
def put_controlfile(self, path, f, encode=True):
820
"""See Branch.put_controlfile."""
821
self.put_controlfiles([(path, f)], encode=encode)
823
def put_controlfiles(self, files, encode=True):
824
"""See Branch.put_controlfiles."""
827
for path, f in files:
829
if isinstance(f, basestring):
830
f = f.encode('utf-8', 'replace')
832
f = codecs.getwriter('utf-8')(f, errors='replace')
833
path = self._rel_controlfilename(path)
834
ctrl_files.append((path, f))
835
self._transport.put_multi(ctrl_files)
837
def _make_control(self):
838
from bzrlib.inventory import Inventory
839
from bzrlib.weavefile import write_weave_v5
840
from bzrlib.weave import Weave
842
# Create an empty inventory
844
# if we want per-tree root ids then this is the place to set
845
# them; they're not needed for now and so ommitted for
847
bzrlib.xml5.serializer_v5.write_inventory(Inventory(), sio)
848
empty_inv = sio.getvalue()
850
bzrlib.weavefile.write_weave_v5(Weave(), sio)
851
empty_weave = sio.getvalue()
853
dirs = [[], 'revision-store', 'weaves']
855
"This is a Bazaar-NG control directory.\n"
856
"Do not change any files in this directory.\n"),
857
('branch-format', BZR_BRANCH_FORMAT_6),
858
('revision-history', ''),
861
('pending-merges', ''),
862
('inventory', empty_inv),
863
('inventory.weave', empty_weave),
864
('ancestry.weave', empty_weave)
866
cfn = self._rel_controlfilename
867
self._transport.mkdir_multi([cfn(d) for d in dirs])
868
self.put_controlfiles(files)
869
mutter('created control directory in ' + self._transport.base)
871
def _check_format(self, relax_version_check):
872
"""Check this branch format is supported.
874
The format level is stored, as an integer, in
875
self._branch_format for code that needs to check it later.
877
In the future, we might need different in-memory Branch
878
classes to support downlevel branches. But not yet.
881
fmt = self.controlfile('branch-format', 'r').read()
883
raise NotBranchError(path=self.base)
884
mutter("got branch format %r", fmt)
885
if fmt == BZR_BRANCH_FORMAT_6:
886
self._branch_format = 6
887
elif fmt == BZR_BRANCH_FORMAT_5:
888
self._branch_format = 5
889
elif fmt == BZR_BRANCH_FORMAT_4:
890
self._branch_format = 4
892
if (not relax_version_check
893
and self._branch_format not in (5, 6)):
894
raise errors.UnsupportedFormatError(
895
'sorry, branch format %r not supported' % fmt,
896
['use a different bzr version',
897
'or remove the .bzr directory'
898
' and "bzr init" again'])
900
def get_root_id(self):
901
"""See Branch.get_root_id."""
902
inv = self.get_inventory(self.last_revision())
903
return inv.root.file_id
906
def set_root_id(self, file_id):
907
"""See Branch.set_root_id."""
908
inv = self.working_tree().read_working_inventory()
909
orig_root_id = inv.root.file_id
910
del inv._byid[inv.root.file_id]
911
inv.root.file_id = file_id
912
inv._byid[inv.root.file_id] = inv.root
915
if entry.parent_id in (None, orig_root_id):
916
entry.parent_id = inv.root.file_id
917
self._write_inventory(inv)
920
def add(self, files, ids=None):
921
"""See Branch.add."""
922
# TODO: Re-adding a file that is removed in the working copy
923
# should probably put it back with the previous ID.
924
if isinstance(files, basestring):
925
assert(ids is None or isinstance(ids, basestring))
931
ids = [None] * len(files)
933
assert(len(ids) == len(files))
935
inv = self.working_tree().read_working_inventory()
936
for f,file_id in zip(files, ids):
937
if is_control_file(f):
938
raise BzrError("cannot add control file %s" % quotefn(f))
943
raise BzrError("cannot add top-level %r" % f)
945
fullpath = os.path.normpath(self.abspath(f))
948
kind = file_kind(fullpath)
950
# maybe something better?
951
raise BzrError('cannot add: not a regular file, symlink or directory: %s' % quotefn(f))
953
if not InventoryEntry.versionable_kind(kind):
954
raise BzrError('cannot add: not a versionable file ('
955
'i.e. regular file, symlink or directory): %s' % quotefn(f))
958
file_id = gen_file_id(f)
959
inv.add_path(f, kind=kind, file_id=file_id)
961
mutter("add file %s file_id:{%s} kind=%r" % (f, file_id, kind))
963
self.working_tree()._write_inventory(inv)
966
def print_file(self, file, revno):
967
"""See Branch.print_file."""
968
tree = self.revision_tree(self.get_rev_id(revno))
969
# use inventory as it was in that revision
970
file_id = tree.inventory.path2id(file)
972
raise BzrError("%r is not present in revision %s" % (file, revno))
973
tree.print_file(file_id)
976
"""See Branch.unknowns."""
977
return self.working_tree().unknowns()
980
def append_revision(self, *revision_ids):
981
"""See Branch.append_revision."""
982
for revision_id in revision_ids:
983
mutter("add {%s} to revision-history" % revision_id)
984
rev_history = self.revision_history()
985
rev_history.extend(revision_ids)
986
self.set_revision_history(rev_history)
989
def set_revision_history(self, rev_history):
990
"""See Branch.set_revision_history."""
991
bound_loc = self.get_bound_location()
992
if bound_loc is not None:
993
# TODO: At this point, we could get a NotBranchError
994
# because we can't connect to the remote location.
995
# How do we distinguish this from a remote branch
996
# which has been deleted?
998
rev_history = self._update_remote_location(bound_loc,
1000
except DivergedBranches:
1001
raise errors.CannotInstallRevisions('Remote tree has commits.'
1002
' Use bzr update to come up to date')
1003
self.put_controlfile('revision-history', '\n'.join(rev_history))
1005
def has_revision(self, revision_id):
1006
"""See Branch.has_revision."""
1007
return (revision_id is None
1008
or self.revision_store.has_id(revision_id))
1011
def get_revision_xml_file(self, revision_id):
1012
"""See Branch.get_revision_xml_file."""
1013
if not revision_id or not isinstance(revision_id, basestring):
1014
raise InvalidRevisionId(revision_id=revision_id, branch=self)
1016
return self.revision_store.get(revision_id)
1017
except (IndexError, KeyError):
1018
raise bzrlib.errors.NoSuchRevision(self, revision_id)
1021
get_revision_xml = get_revision_xml_file
1023
def get_revision_xml(self, revision_id):
1024
"""See Branch.get_revision_xml."""
1025
return self.get_revision_xml_file(revision_id).read()
1028
def get_revision(self, revision_id):
1029
"""See Branch.get_revision."""
1030
xml_file = self.get_revision_xml_file(revision_id)
1033
r = bzrlib.xml5.serializer_v5.read_revision(xml_file)
1034
except SyntaxError, e:
1035
raise bzrlib.errors.BzrError('failed to unpack revision_xml',
1039
assert r.revision_id == revision_id
1042
def get_revision_sha1(self, revision_id):
1043
"""See Branch.get_revision_sha1."""
1044
# In the future, revision entries will be signed. At that
1045
# point, it is probably best *not* to include the signature
1046
# in the revision hash. Because that lets you re-sign
1047
# the revision, (add signatures/remove signatures) and still
1048
# have all hash pointers stay consistent.
1049
# But for now, just hash the contents.
1050
return bzrlib.osutils.sha_file(self.get_revision_xml_file(revision_id))
1052
def get_ancestry(self, revision_id):
1053
"""See Branch.get_ancestry."""
1054
if revision_id is None:
1056
w = self._get_inventory_weave()
1057
return [None] + map(w.idx_to_name,
1058
w.inclusions([w.lookup(revision_id)]))
1060
def _get_inventory_weave(self):
1061
return self.control_weaves.get_weave('inventory',
1062
self.get_transaction())
1064
def get_inventory(self, revision_id):
1065
"""See Branch.get_inventory."""
1066
xml = self.get_inventory_xml(revision_id)
1067
return bzrlib.xml5.serializer_v5.read_inventory_from_string(xml)
1069
def get_inventory_xml(self, revision_id):
1070
"""See Branch.get_inventory_xml."""
1072
assert isinstance(revision_id, basestring), type(revision_id)
1073
iw = self._get_inventory_weave()
1074
return iw.get_text(iw.lookup(revision_id))
1076
raise bzrlib.errors.HistoryMissing(self, 'inventory', revision_id)
1078
def get_inventory_sha1(self, revision_id):
1079
"""See Branch.get_inventory_sha1."""
1080
return self.get_revision(revision_id).inventory_sha1
1082
def get_revision_inventory(self, revision_id):
1083
"""See Branch.get_revision_inventory."""
1084
# TODO: Unify this with get_inventory()
1085
# bzr 0.0.6 and later imposes the constraint that the inventory_id
1086
# must be the same as its revision, so this is trivial.
1087
if revision_id == None:
1088
# This does not make sense: if there is no revision,
1089
# then it is the current tree inventory surely ?!
1090
# and thus get_root_id() is something that looks at the last
1091
# commit on the branch, and the get_root_id is an inventory check.
1092
raise NotImplementedError
1093
# return Inventory(self.get_root_id())
1095
return self.get_inventory(revision_id)
1098
def revision_history(self):
1099
"""See Branch.revision_history."""
1100
transaction = self.get_transaction()
1101
history = transaction.map.find_revision_history()
1102
if history is not None:
1103
mutter("cache hit for revision-history in %s", self)
1104
return list(history)
1105
history = [l.rstrip('\r\n') for l in
1106
self.controlfile('revision-history', 'r').readlines()]
1107
transaction.map.add_revision_history(history)
1108
# this call is disabled because revision_history is
1109
# not really an object yet, and the transaction is for objects.
1110
# transaction.register_clean(history, precious=True)
1111
return list(history)
1113
def update_revisions(self, other, stop_revision=None, other_history=None):
1114
"""See Branch.update_revisions."""
1115
from bzrlib.fetch import greedy_fetch
1116
if stop_revision is None:
1117
if other_history is not None:
1118
stop_revision = other_history[-1]
1377
raise errors.AlreadyControlDirError(t.base)
1378
if (checkout.control_transport.base
1379
== self.controldir.control_transport.base):
1380
# When checking out to the same control directory,
1381
# always create a lightweight checkout
1385
from_branch = checkout.set_branch_reference(target_branch=self)
1387
policy = checkout.determine_repository_policy()
1388
policy.acquire_repository()
1389
checkout_branch = checkout.create_branch()
1390
checkout_branch.bind(self)
1391
# pull up to the specified revision_id to set the initial
1392
# branch tip correctly, and seed it with history.
1393
checkout_branch.pull(self, stop_revision=revision_id)
1395
tree = checkout.create_workingtree(revision_id,
1396
from_branch=from_branch,
1397
accelerator_tree=accelerator_tree,
1399
basis_tree = tree.basis_tree()
1400
with basis_tree.lock_read():
1401
for path in basis_tree.iter_references():
1402
reference_parent = tree.reference_parent(path)
1403
if reference_parent is None:
1404
warning('Branch location for %s unknown.', path)
1406
reference_parent.create_checkout(
1408
basis_tree.get_reference_revision(path), lightweight)
1411
def reconcile(self, thorough=True):
1412
"""Make sure the data stored in this branch is consistent.
1414
:return: A `ReconcileResult` object.
1416
raise NotImplementedError(self.reconcile)
1418
def supports_tags(self):
1419
return self._format.supports_tags()
1421
def automatic_tag_name(self, revision_id):
1422
"""Try to automatically find the tag name for a revision.
1424
:param revision_id: Revision id of the revision.
1425
:return: A tag name or None if no tag name could be determined.
1427
for hook in Branch.hooks['automatic_tag_name']:
1428
ret = hook(self, revision_id)
1120
stop_revision = other.last_revision()
1121
### Should this be checking is_ancestor instead of revision_history?
1122
if (stop_revision is not None and
1123
stop_revision in self.revision_history()):
1125
greedy_fetch(to_branch=self, from_branch=other,
1126
revision=stop_revision)
1127
pullable_revs = self.pullable_revisions(other, stop_revision,
1128
other_history=other_history)
1129
if len(pullable_revs) > 0:
1130
self.append_revision(*pullable_revs)
1132
def pullable_revisions(self, other, stop_revision, other_history=None):
1133
if other_history is not None:
1135
other_revno = other_history.index(stop_revision) + 1
1137
raise errors.NoSuchRevision(self, stop_revision)
1139
other_revno = other.revision_id_to_revno(stop_revision)
1141
return self.missing_revisions(other, other_revno,
1142
other_history=other_history)
1143
except DivergedBranches, e:
1145
pullable_revs = get_intervening_revisions(self.last_revision(),
1146
stop_revision, self)
1147
assert self.last_revision() not in pullable_revs
1148
return pullable_revs
1149
except bzrlib.errors.NotAncestor:
1150
if is_ancestor(self.last_revision(), stop_revision, self):
1155
def revision_id_to_revno(self, revision_id):
1156
"""Given a revision id, return its revno"""
1157
if revision_id is None:
1159
history = self.revision_history()
1161
return history.index(revision_id) + 1
1163
raise bzrlib.errors.NoSuchRevision(self, revision_id)
1165
def get_rev_id(self, revno, history=None):
1166
"""Find the revision id of the specified revno."""
1170
history = self.revision_history()
1171
elif revno <= 0 or revno > len(history):
1172
raise bzrlib.errors.NoSuchRevision(self, revno)
1173
return history[revno - 1]
1175
def revision_tree(self, revision_id):
1176
"""See Branch.revision_tree."""
1177
# TODO: refactor this to use an existing revision object
1178
# so we don't need to read it in twice.
1179
if revision_id == None or revision_id == NULL_REVISION:
1182
inv = self.get_revision_inventory(revision_id)
1183
return RevisionTree(self.weave_store, inv, revision_id)
1185
def working_tree(self):
1186
"""See Branch.working_tree."""
1187
from bzrlib.workingtree import WorkingTree
1188
# TODO: In the future, perhaps WorkingTree should utilize Transport
1189
# RobertCollins 20051003 - I don't think it should - working trees are
1190
# much more complex to keep consistent than our careful .bzr subset.
1191
# instead, we should say that working trees are local only, and optimise
1193
if self._transport.base.find('://') != -1:
1194
raise NoWorkingTree(self.base)
1195
return WorkingTree(self.base, branch=self)
1198
def pull(self, source, overwrite=False):
1199
"""See Branch.pull."""
1203
self.update_revisions(source)
1204
except DivergedBranches:
1207
self.set_revision_history(source.revision_history())
1212
def rename_one(self, from_rel, to_rel):
1213
"""See Branch.rename_one."""
1214
tree = self.working_tree()
1215
inv = tree.inventory
1216
if not tree.has_filename(from_rel):
1217
raise BzrError("can't rename: old working file %r does not exist" % from_rel)
1218
if tree.has_filename(to_rel):
1219
raise BzrError("can't rename: new working file %r already exists" % to_rel)
1221
file_id = inv.path2id(from_rel)
1223
raise BzrError("can't rename: old name %r is not versioned" % from_rel)
1225
if inv.path2id(to_rel):
1226
raise BzrError("can't rename: new name %r is already versioned" % to_rel)
1228
to_dir, to_tail = os.path.split(to_rel)
1229
to_dir_id = inv.path2id(to_dir)
1230
if to_dir_id == None and to_dir != '':
1231
raise BzrError("can't determine destination directory id for %r" % to_dir)
1233
mutter("rename_one:")
1234
mutter(" file_id {%s}" % file_id)
1235
mutter(" from_rel %r" % from_rel)
1236
mutter(" to_rel %r" % to_rel)
1237
mutter(" to_dir %r" % to_dir)
1238
mutter(" to_dir_id {%s}" % to_dir_id)
1240
inv.rename(file_id, to_dir_id, to_tail)
1242
from_abs = self.abspath(from_rel)
1243
to_abs = self.abspath(to_rel)
1245
rename(from_abs, to_abs)
1247
raise BzrError("failed to rename %r to %r: %s"
1248
% (from_abs, to_abs, e[1]),
1249
["rename rolled back"])
1251
self.working_tree()._write_inventory(inv)
1254
def move(self, from_paths, to_name):
1255
"""See Branch.move."""
1257
## TODO: Option to move IDs only
1258
assert not isinstance(from_paths, basestring)
1259
tree = self.working_tree()
1260
inv = tree.inventory
1261
to_abs = self.abspath(to_name)
1262
if not isdir(to_abs):
1263
raise BzrError("destination %r is not a directory" % to_abs)
1264
if not tree.has_filename(to_name):
1265
raise BzrError("destination %r not in working directory" % to_abs)
1266
to_dir_id = inv.path2id(to_name)
1267
if to_dir_id == None and to_name != '':
1268
raise BzrError("destination %r is not a versioned directory" % to_name)
1269
to_dir_ie = inv[to_dir_id]
1270
if to_dir_ie.kind not in ('directory', 'root_directory'):
1271
raise BzrError("destination %r is not a directory" % to_abs)
1273
to_idpath = inv.get_idpath(to_dir_id)
1275
for f in from_paths:
1276
if not tree.has_filename(f):
1277
raise BzrError("%r does not exist in working tree" % f)
1278
f_id = inv.path2id(f)
1280
raise BzrError("%r is not versioned" % f)
1281
name_tail = splitpath(f)[-1]
1282
dest_path = appendpath(to_name, name_tail)
1283
if tree.has_filename(dest_path):
1284
raise BzrError("destination %r already exists" % dest_path)
1285
if f_id in to_idpath:
1286
raise BzrError("can't move %r to a subdirectory of itself" % f)
1288
# OK, so there's a race here, it's possible that someone will
1289
# create a file in this interval and then the rename might be
1290
# left half-done. But we should have caught most problems.
1292
for f in from_paths:
1293
name_tail = splitpath(f)[-1]
1294
dest_path = appendpath(to_name, name_tail)
1295
result.append((f, dest_path))
1296
inv.rename(inv.path2id(f), to_dir_id, name_tail)
1298
rename(self.abspath(f), self.abspath(dest_path))
1300
raise BzrError("failed to rename %r to %r: %s" % (f, dest_path, e[1]),
1301
["rename rolled back"])
1303
self.working_tree()._write_inventory(inv)
1306
def get_parent(self):
1307
"""See Branch.get_parent."""
1309
_locs = ['parent', 'pull', 'x-pull']
1312
return self.controlfile(l, 'r').read().strip('\n')
1314
if e.errno != errno.ENOENT:
1433
def _check_if_descendant_or_diverged(self, revision_a, revision_b, graph,
1435
"""Ensure that revision_b is a descendant of revision_a.
1437
This is a helper function for update_revisions.
1439
:raises: DivergedBranches if revision_b has diverged from revision_a.
1440
:returns: True if revision_b is a descendant of revision_a.
1442
relation = self._revision_relations(revision_a, revision_b, graph)
1443
if relation == 'b_descends_from_a':
1318
def get_push_location(self):
1319
"""See Branch.get_push_location."""
1320
config = bzrlib.config.BranchConfig(self)
1321
push_loc = config.get_user_option('push_location')
1324
def set_push_location(self, location):
1325
"""See Branch.set_push_location."""
1326
config = bzrlib.config.LocationConfig(self.base)
1327
config.set_user_option('push_location', location)
1330
def set_parent(self, url):
1331
"""See Branch.set_parent."""
1332
# TODO: Maybe delete old location files?
1333
from bzrlib.atomicfile import AtomicFile
1334
f = AtomicFile(self.controlfilename('parent'))
1341
def tree_config(self):
1342
return TreeConfig(self)
1344
def check_revno(self, revno):
1346
Check whether a revno corresponds to any revision.
1347
Zero (the NULL revision) is considered valid.
1350
self.check_real_revno(revno)
1352
def check_real_revno(self, revno):
1354
Check whether a revno corresponds to a real revision.
1355
Zero (the NULL revision) is considered invalid
1357
if revno < 1 or revno > self.revno():
1358
raise InvalidRevisionNumber(revno)
1360
def sign_revision(self, revision_id, gpg_strategy):
1361
"""See Branch.sign_revision."""
1362
plaintext = Testament.from_revision(self, revision_id).as_short_text()
1363
self.store_revision_signature(gpg_strategy, plaintext, revision_id)
1366
def store_revision_signature(self, gpg_strategy, plaintext, revision_id):
1367
"""See Branch.store_revision_signature."""
1368
self.revision_store.add(StringIO(gpg_strategy.sign(plaintext)),
1371
# Do we want a read lock?
1372
def get_bound_location(self):
1373
bound_path = self._rel_controlfilename('bound')
1375
f = self._transport.get(bound_path)
1379
return f.read().strip()
1382
def set_bound_location(self, location):
1383
self.put_controlfile('bound', location+'\n')
1386
def bind(self, other):
1387
"""Bind the local branch the other branch.
1389
:param other: The branch to bind to
1392
# It is debatable whether you should be able to bind to
1393
# a branch which is itself bound.
1394
# Committing is obviously forbidden, but binding itself may not be.
1395
#if other.is_bound():
1396
# raise errors.CannotBind(msg='branch %s is bound' % (other.base))
1398
self.working_tree().pull(other)
1399
except NoWorkingTree:
1402
# Since we have 'pulled' from the remote location,
1403
# now we should try to pull in the opposite direction
1404
# in case the local tree has more revisions than the
1406
# There may be a different check you could do here
1407
# rather than actually trying to install revisions remotely.
1408
# TODO: capture an exception which indicates the remote branch
1410
# If it is up-to-date, this probably should not be a failure
1412
other.working_tree().pull(self)
1413
except NoWorkingTree:
1416
# Make sure the revision histories are now identical
1417
other_rh = other.revision_history()
1418
self.set_revision_history(other_rh)
1420
# Both branches should now be at the same revision
1421
self.set_bound_location(other.base)
1425
"""If bound, unbind"""
1426
bound_path = self._rel_controlfilename('bound')
1428
self._transport.delete(bound_path)
1433
def _update_remote_location(self, other_loc, revision_history):
1434
"""Make sure the remote location has the local changes.
1436
:param other_loc: Path to the other location
1437
:param revision_history: Total history to be updated
1438
:return: The remote revision_history
1440
from bzrlib.fetch import greedy_fetch
1441
mutter('_update_remote_location: %r, %r', other_loc, revision_history)
1442
other = Branch.open(other_loc)
1443
bound_loc = other.get_bound_location()
1444
if bound_loc is not None:
1445
raise errors.CannotInstallRevisions('Remote tree is bound')
1448
# update_revisions should also append to the revision history.
1449
other.update_revisions(self, other_history=revision_history)
1450
return other.revision_history()
1455
class ScratchBranch(BzrBranch):
1456
"""Special test class: a branch that cleans up after itself.
1458
>>> b = ScratchBranch()
1462
>>> b._transport.__del__()
1467
def __init__(self, files=[], dirs=[], transport=None):
1468
"""Make a test branch.
1470
This creates a temporary directory and runs init-tree in it.
1472
If any files are listed, they are created in the working copy.
1474
if transport is None:
1475
transport = bzrlib.transport.local.ScratchTransport()
1476
super(ScratchBranch, self).__init__(transport, init=True)
1478
super(ScratchBranch, self).__init__(transport)
1481
self._transport.mkdir(d)
1484
self._transport.put(f, 'content of %s' % f)
1489
>>> orig = ScratchBranch(files=["file1", "file2"])
1490
>>> clone = orig.clone()
1491
>>> if os.name != 'nt':
1492
... os.path.samefile(orig.base, clone.base)
1494
... orig.base == clone.base
1497
>>> os.path.isfile(os.path.join(clone.base, "file1"))
1500
from shutil import copytree
1501
from tempfile import mkdtemp
1504
copytree(self.base, base, symlinks=True)
1505
return ScratchBranch(
1506
transport=bzrlib.transport.local.ScratchTransport(base))
1509
######################################################################
1513
def is_control_file(filename):
1514
## FIXME: better check
1515
filename = os.path.normpath(filename)
1516
while filename != '':
1517
head, tail = os.path.split(filename)
1518
## mutter('check %r for control file' % ((head, tail), ))
1519
if tail == bzrlib.BZRDIR:
1445
elif relation == 'diverged':
1446
raise errors.DivergedBranches(self, other_branch)
1447
elif relation == 'a_descends_from_b':
1450
raise AssertionError("invalid relation: %r" % (relation,))
1452
def _revision_relations(self, revision_a, revision_b, graph):
1453
"""Determine the relationship between two revisions.
1455
:returns: One of: 'a_descends_from_b', 'b_descends_from_a', 'diverged'
1457
heads = graph.heads([revision_a, revision_b])
1458
if heads == {revision_b}:
1459
return 'b_descends_from_a'
1460
elif heads == {revision_a, revision_b}:
1461
# These branches have diverged
1463
elif heads == {revision_a}:
1464
return 'a_descends_from_b'
1466
raise AssertionError("invalid heads: %r" % (heads,))
1468
def heads_to_fetch(self):
1469
"""Return the heads that must and that should be fetched to copy this
1470
branch into another repo.
1472
:returns: a 2-tuple of (must_fetch, if_present_fetch). must_fetch is a
1473
set of heads that must be fetched. if_present_fetch is a set of
1474
heads that must be fetched if present, but no error is necessary if
1475
they are not present.
1477
# For bzr native formats must_fetch is just the tip, and
1478
# if_present_fetch are the tags.
1479
must_fetch = {self.last_revision()}
1480
if_present_fetch = set()
1481
if self.get_config_stack().get('branch.fetch_tags'):
1483
if_present_fetch = set(self.tags.get_reverse_tag_dict())
1484
except errors.TagsNotSupported:
1486
must_fetch.discard(_mod_revision.NULL_REVISION)
1487
if_present_fetch.discard(_mod_revision.NULL_REVISION)
1488
return must_fetch, if_present_fetch
1490
def create_memorytree(self):
1491
"""Create a memory tree for this branch.
1493
:return: An in-memory MutableTree instance
1495
return memorytree.MemoryTree.create_on_branch(self)
1498
class BranchFormat(controldir.ControlComponentFormat):
1499
"""An encapsulation of the initialization and open routines for a format.
1501
Formats provide three things:
1502
* An initialization routine,
1503
* a format description
1506
Formats are placed in an dict by their format string for reference
1507
during branch opening. It's not required that these be instances, they
1508
can be classes themselves with class methods - it simply depends on
1509
whether state is needed for a given format or not.
1511
Once a format is deprecated, just deprecate the initialize and open
1512
methods on the format class. Do not deprecate the object, as the
1513
object will be created every time regardless.
1516
def __eq__(self, other):
1517
return self.__class__ is other.__class__
1519
def __ne__(self, other):
1520
return not (self == other)
1522
def get_reference(self, controldir, name=None):
1523
"""Get the target reference of the branch in controldir.
1525
format probing must have been completed before calling
1526
this method - it is assumed that the format of the branch
1527
in controldir is correct.
1529
:param controldir: The controldir to get the branch data from.
1530
:param name: Name of the colocated branch to fetch
1531
:return: None if the branch is not a reference branch.
1536
def set_reference(self, controldir, name, to_branch):
1537
"""Set the target reference of the branch in controldir.
1539
format probing must have been completed before calling
1540
this method - it is assumed that the format of the branch
1541
in controldir is correct.
1543
:param controldir: The controldir to set the branch reference for.
1544
:param name: Name of colocated branch to set, None for default
1545
:param to_branch: branch that the checkout is to reference
1547
raise NotImplementedError(self.set_reference)
1549
def get_format_description(self):
1550
"""Return the short format description for this format."""
1551
raise NotImplementedError(self.get_format_description)
1553
def _run_post_branch_init_hooks(self, controldir, name, branch):
1554
hooks = Branch.hooks['post_branch_init']
1557
params = BranchInitHookParams(self, controldir, name, branch)
1561
def initialize(self, controldir, name=None, repository=None,
1562
append_revisions_only=None):
1563
"""Create a branch of this format in controldir.
1565
:param name: Name of the colocated branch to create.
1567
raise NotImplementedError(self.initialize)
1569
def is_supported(self):
1570
"""Is this format supported?
1572
Supported formats can be initialized and opened.
1573
Unsupported formats may not support initialization or committing or
1574
some other features depending on the reason for not being supported.
1578
def make_tags(self, branch):
1579
"""Create a tags object for branch.
1581
This method is on BranchFormat, because BranchFormats are reflected
1582
over the wire via network_name(), whereas full Branch instances require
1583
multiple VFS method calls to operate at all.
1585
The default implementation returns a disabled-tags instance.
1587
Note that it is normal for branch to be a RemoteBranch when using tags
1590
return _mod_tag.DisabledTags(branch)
1592
def network_name(self):
1593
"""A simple byte string uniquely identifying this format for RPC calls.
1595
MetaDir branch formats use their disk format string to identify the
1596
repository over the wire. All in one formats such as bzr < 0.8, and
1597
foreign formats like svn/git and hg should use some marker which is
1598
unique and immutable.
1600
raise NotImplementedError(self.network_name)
1602
def open(self, controldir, name=None, _found=False, ignore_fallbacks=False,
1603
found_repository=None, possible_transports=None):
1604
"""Return the branch object for controldir.
1606
:param controldir: A ControlDir that contains a branch.
1607
:param name: Name of colocated branch to open
1608
:param _found: a private parameter, do not use it. It is used to
1609
indicate if format probing has already be done.
1610
:param ignore_fallbacks: when set, no fallback branches will be opened
1611
(if there are any). Default is to open fallbacks.
1613
raise NotImplementedError(self.open)
1615
def supports_set_append_revisions_only(self):
1616
"""True if this format supports set_append_revisions_only."""
1619
def supports_stacking(self):
1620
"""True if this format records a stacked-on branch."""
1623
def supports_leaving_lock(self):
1624
"""True if this format supports leaving locks in place."""
1625
return False # by default
1628
return self.get_format_description().rstrip()
1630
def supports_tags(self):
1631
"""True if this format supports tags stored in the branch"""
1632
return False # by default
1634
def tags_are_versioned(self):
1635
"""Whether the tag container for this branch versions tags."""
1638
def supports_tags_referencing_ghosts(self):
1639
"""True if tags can reference ghost revisions."""
1642
def supports_store_uncommitted(self):
1643
"""True if uncommitted changes can be stored in this branch."""
1646
def stores_revno(self):
1647
"""True if this branch format store revision numbers."""
1651
class BranchHooks(Hooks):
1652
"""A dictionary mapping hook name to a list of callables for branch hooks.
1654
e.g. ['post_push'] Is the list of items to be called when the
1655
push function is invoked.
1659
"""Create the default hooks.
1661
These are all empty initially, because by default nothing should get
1664
Hooks.__init__(self, "breezy.branch", "Branch.hooks")
1667
"Called with the Branch object that has been opened after a "
1668
"branch is opened.", (1, 8))
1671
"Called after a push operation completes. post_push is called "
1672
"with a breezy.branch.BranchPushResult object and only runs in "
1673
"the bzr client.", (0, 15))
1676
"Called after a pull operation completes. post_pull is called "
1677
"with a breezy.branch.PullResult object and only runs in the "
1678
"bzr client.", (0, 15))
1681
"Called after a commit is calculated but before it is "
1682
"completed. pre_commit is called with (local, master, old_revno, "
1683
"old_revid, future_revno, future_revid, tree_delta, future_tree"
1684
"). old_revid is NULL_REVISION for the first commit to a branch, "
1685
"tree_delta is a TreeDelta object describing changes from the "
1686
"basis revision. hooks MUST NOT modify this delta. "
1687
" future_tree is an in-memory tree obtained from "
1688
"CommitBuilder.revision_tree() and hooks MUST NOT modify this "
1692
"Called in the bzr client after a commit has completed. "
1693
"post_commit is called with (local, master, old_revno, old_revid, "
1694
"new_revno, new_revid). old_revid is NULL_REVISION for the first "
1695
"commit to a branch.", (0, 15))
1698
"Called in the bzr client after an uncommit completes. "
1699
"post_uncommit is called with (local, master, old_revno, "
1700
"old_revid, new_revno, new_revid) where local is the local branch "
1701
"or None, master is the target branch, and an empty branch "
1702
"receives new_revno of 0, new_revid of None.", (0, 15))
1704
'pre_change_branch_tip',
1705
"Called in bzr client and server before a change to the tip of a "
1706
"branch is made. pre_change_branch_tip is called with a "
1707
"breezy.branch.ChangeBranchTipParams. Note that push, pull, "
1708
"commit, uncommit will all trigger this hook.", (1, 6))
1710
'post_change_branch_tip',
1711
"Called in bzr client and server after a change to the tip of a "
1712
"branch is made. post_change_branch_tip is called with a "
1713
"breezy.branch.ChangeBranchTipParams. Note that push, pull, "
1714
"commit, uncommit will all trigger this hook.", (1, 4))
1716
'transform_fallback_location',
1717
"Called when a stacked branch is activating its fallback "
1718
"locations. transform_fallback_location is called with (branch, "
1719
"url), and should return a new url. Returning the same url "
1720
"allows it to be used as-is, returning a different one can be "
1721
"used to cause the branch to stack on a closer copy of that "
1722
"fallback_location. Note that the branch cannot have history "
1723
"accessing methods called on it during this hook because the "
1724
"fallback locations have not been activated. When there are "
1725
"multiple hooks installed for transform_fallback_location, "
1726
"all are called with the url returned from the previous hook."
1727
"The order is however undefined.", (1, 9))
1729
'automatic_tag_name',
1730
"Called to determine an automatic tag name for a revision. "
1731
"automatic_tag_name is called with (branch, revision_id) and "
1732
"should return a tag name or None if no tag name could be "
1733
"determined. The first non-None tag name returned will be used.",
1737
"Called after new branch initialization completes. "
1738
"post_branch_init is called with a "
1739
"breezy.branch.BranchInitHookParams. "
1740
"Note that init, branch and checkout (both heavyweight and "
1741
"lightweight) will all trigger this hook.", (2, 2))
1744
"Called after a checkout switches branch. "
1745
"post_switch is called with a "
1746
"breezy.branch.SwitchHookParams.", (2, 2))
1749
# install the default hooks into the Branch class.
1750
Branch.hooks = BranchHooks()
1753
class ChangeBranchTipParams(object):
1754
"""Object holding parameters passed to `*_change_branch_tip` hooks.
1756
There are 5 fields that hooks may wish to access:
1758
:ivar branch: the branch being changed
1759
:ivar old_revno: revision number before the change
1760
:ivar new_revno: revision number after the change
1761
:ivar old_revid: revision id before the change
1762
:ivar new_revid: revision id after the change
1764
The revid fields are strings. The revno fields are integers.
1767
def __init__(self, branch, old_revno, new_revno, old_revid, new_revid):
1768
"""Create a group of ChangeBranchTip parameters.
1770
:param branch: The branch being changed.
1771
:param old_revno: Revision number before the change.
1772
:param new_revno: Revision number after the change.
1773
:param old_revid: Tip revision id before the change.
1774
:param new_revid: Tip revision id after the change.
1776
self.branch = branch
1777
self.old_revno = old_revno
1778
self.new_revno = new_revno
1779
self.old_revid = old_revid
1780
self.new_revid = new_revid
1782
def __eq__(self, other):
1783
return self.__dict__ == other.__dict__
1786
return "<%s of %s from (%s, %s) to (%s, %s)>" % (
1787
self.__class__.__name__, self.branch,
1788
self.old_revno, self.old_revid, self.new_revno, self.new_revid)
1791
class BranchInitHookParams(object):
1792
"""Object holding parameters passed to `*_branch_init` hooks.
1794
There are 4 fields that hooks may wish to access:
1796
:ivar format: the branch format
1797
:ivar bzrdir: the ControlDir where the branch will be/has been initialized
1798
:ivar name: name of colocated branch, if any (or None)
1799
:ivar branch: the branch created
1801
Note that for lightweight checkouts, the bzrdir and format fields refer to
1802
the checkout, hence they are different from the corresponding fields in
1803
branch, which refer to the original branch.
1806
def __init__(self, format, controldir, name, branch):
1807
"""Create a group of BranchInitHook parameters.
1809
:param format: the branch format
1810
:param controldir: the ControlDir where the branch will be/has been
1812
:param name: name of colocated branch, if any (or None)
1813
:param branch: the branch created
1815
Note that for lightweight checkouts, the bzrdir and format fields refer
1816
to the checkout, hence they are different from the corresponding fields
1817
in branch, which refer to the original branch.
1819
self.format = format
1820
self.controldir = controldir
1822
self.branch = branch
1824
def __eq__(self, other):
1825
return self.__dict__ == other.__dict__
1828
return "<%s of %s>" % (self.__class__.__name__, self.branch)
1831
class SwitchHookParams(object):
1832
"""Object holding parameters passed to `*_switch` hooks.
1834
There are 4 fields that hooks may wish to access:
1836
:ivar control_dir: ControlDir of the checkout to change
1837
:ivar to_branch: branch that the checkout is to reference
1838
:ivar force: skip the check for local commits in a heavy checkout
1839
:ivar revision_id: revision ID to switch to (or None)
1842
def __init__(self, control_dir, to_branch, force, revision_id):
1843
"""Create a group of SwitchHook parameters.
1845
:param control_dir: ControlDir of the checkout to change
1846
:param to_branch: branch that the checkout is to reference
1847
:param force: skip the check for local commits in a heavy checkout
1848
:param revision_id: revision ID to switch to (or None)
1850
self.control_dir = control_dir
1851
self.to_branch = to_branch
1853
self.revision_id = revision_id
1855
def __eq__(self, other):
1856
return self.__dict__ == other.__dict__
1859
return "<%s for %s to (%s, %s)>" % (
1860
self.__class__.__name__, self.control_dir, self.to_branch,
1864
class BranchFormatRegistry(controldir.ControlComponentFormatRegistry):
1865
"""Branch format registry."""
1867
def __init__(self, other_registry=None):
1868
super(BranchFormatRegistry, self).__init__(other_registry)
1869
self._default_format = None
1870
self._default_format_key = None
1872
def get_default(self):
1873
"""Return the current default format."""
1874
if (self._default_format_key is not None
1875
and self._default_format is None):
1876
self._default_format = self.get(self._default_format_key)
1877
return self._default_format
1879
def set_default(self, format):
1880
"""Set the default format."""
1881
self._default_format = format
1882
self._default_format_key = None
1884
def set_default_key(self, format_string):
1885
"""Set the default format by its format string."""
1886
self._default_format_key = format_string
1887
self._default_format = None
1890
network_format_registry = registry.FormatRegistry()
1891
"""Registry of formats indexed by their network name.
1893
The network name for a branch format is an identifier that can be used when
1894
referring to formats with smart server operations. See
1895
BranchFormat.network_name() for more detail.
1898
format_registry = BranchFormatRegistry(network_format_registry)
1901
# formats which have no format string are not discoverable
1902
# and not independently creatable, so are not registered.
1903
format_registry.register_lazy(
1904
b"Bazaar-NG branch format 5\n", "breezy.bzr.fullhistory",
1906
format_registry.register_lazy(
1907
b"Bazaar Branch Format 6 (bzr 0.15)\n",
1908
"breezy.bzr.branch", "BzrBranchFormat6")
1909
format_registry.register_lazy(
1910
b"Bazaar Branch Format 7 (needs bzr 1.6)\n",
1911
"breezy.bzr.branch", "BzrBranchFormat7")
1912
format_registry.register_lazy(
1913
b"Bazaar Branch Format 8 (needs bzr 1.15)\n",
1914
"breezy.bzr.branch", "BzrBranchFormat8")
1915
format_registry.register_lazy(
1916
b"Bazaar-NG Branch Reference Format 1\n",
1917
"breezy.bzr.branch", "BranchReferenceFormat")
1919
format_registry.set_default_key(b"Bazaar Branch Format 7 (needs bzr 1.6)\n")
1922
class BranchWriteLockResult(LogicalLockResult):
1923
"""The result of write locking a branch.
1925
:ivar token: The token obtained from the underlying branch lock, or
1927
:ivar unlock: A callable which will unlock the lock.
1931
return "BranchWriteLockResult(%r, %r)" % (self.unlock, self.token)
1934
######################################################################
1935
# results of operations
1938
class _Result(object):
1940
def _show_tag_conficts(self, to_file):
1941
if not getattr(self, 'tag_conflicts', None):
1943
to_file.write('Conflicting tags:\n')
1944
for name, value1, value2 in self.tag_conflicts:
1945
to_file.write(' %s\n' % (name, ))
1948
class PullResult(_Result):
1949
"""Result of a Branch.pull operation.
1951
:ivar old_revno: Revision number before pull.
1952
:ivar new_revno: Revision number after pull.
1953
:ivar old_revid: Tip revision id before pull.
1954
:ivar new_revid: Tip revision id after pull.
1955
:ivar source_branch: Source (local) branch object. (read locked)
1956
:ivar master_branch: Master branch of the target, or the target if no
1958
:ivar local_branch: target branch if there is a Master, else None
1959
:ivar target_branch: Target/destination branch object. (write locked)
1960
:ivar tag_conflicts: A list of tag conflicts, see BasicTags.merge_to
1961
:ivar tag_updates: A dict with new tags, see BasicTags.merge_to
1964
def report(self, to_file):
1965
tag_conflicts = getattr(self, "tag_conflicts", None)
1966
tag_updates = getattr(self, "tag_updates", None)
1968
if self.old_revid != self.new_revid:
1969
to_file.write('Now on revision %d.\n' % self.new_revno)
1971
to_file.write('%d tag(s) updated.\n' % len(tag_updates))
1972
if self.old_revid == self.new_revid and not tag_updates:
1973
if not tag_conflicts:
1974
to_file.write('No revisions or tags to pull.\n')
1976
to_file.write('No revisions to pull.\n')
1977
self._show_tag_conficts(to_file)
1980
class BranchPushResult(_Result):
1981
"""Result of a Branch.push operation.
1983
:ivar old_revno: Revision number (eg 10) of the target before push.
1984
:ivar new_revno: Revision number (eg 12) of the target after push.
1985
:ivar old_revid: Tip revision id (eg joe@foo.com-1234234-aoeua34) of target
1987
:ivar new_revid: Tip revision id (eg joe@foo.com-5676566-boa234a) of target
1989
:ivar source_branch: Source branch object that the push was from. This is
1990
read locked, and generally is a local (and thus low latency) branch.
1991
:ivar master_branch: If target is a bound branch, the master branch of
1992
target, or target itself. Always write locked.
1993
:ivar target_branch: The direct Branch where data is being sent (write
1995
:ivar local_branch: If the target is a bound branch this will be the
1996
target, otherwise it will be None.
1999
def report(self, to_file):
2000
# TODO: This function gets passed a to_file, but then
2001
# ignores it and calls note() instead. This is also
2002
# inconsistent with PullResult(), which writes to stdout.
2003
# -- JRV20110901, bug #838853
2004
tag_conflicts = getattr(self, "tag_conflicts", None)
2005
tag_updates = getattr(self, "tag_updates", None)
2007
if self.old_revid != self.new_revid:
2008
if self.new_revno is not None:
2009
note(gettext('Pushed up to revision %d.'),
2012
note(gettext('Pushed up to revision id %s.'),
2013
self.new_revid.decode('utf-8'))
2015
note(ngettext('%d tag updated.', '%d tags updated.',
2016
len(tag_updates)) % len(tag_updates))
2017
if self.old_revid == self.new_revid and not tag_updates:
2018
if not tag_conflicts:
2019
note(gettext('No new revisions or tags to push.'))
2021
note(gettext('No new revisions to push.'))
2022
self._show_tag_conficts(to_file)
2025
class BranchCheckResult(object):
2026
"""Results of checking branch consistency.
2031
def __init__(self, branch):
2032
self.branch = branch
2035
def report_results(self, verbose):
2036
"""Report the check results via trace.note.
2038
:param verbose: Requests more detailed display of what was checked,
2041
note(gettext('checked branch {0} format {1}').format(
2042
self.branch.user_url, self.branch._format))
2043
for error in self.errors:
2044
note(gettext('found error:%s'), error)
2047
class InterBranch(InterObject):
2048
"""This class represents operations taking place between two branches.
2050
Its instances have methods like pull() and push() and contain
2051
references to the source and target repositories these operations
2052
can be carried out on.
2056
"""The available optimised InterBranch types."""
2059
def _get_branch_formats_to_test(klass):
2060
"""Return an iterable of format tuples for testing.
2062
:return: An iterable of (from_format, to_format) to use when testing
2063
this InterBranch class. Each InterBranch class should define this
2066
raise NotImplementedError(klass._get_branch_formats_to_test)
2068
def pull(self, overwrite=False, stop_revision=None,
2069
possible_transports=None, local=False, tag_selector=None):
2070
"""Mirror source into target branch.
2072
The target branch is considered to be 'local', having low latency.
2074
:returns: PullResult instance
2076
raise NotImplementedError(self.pull)
2078
def push(self, overwrite=False, stop_revision=None, lossy=False,
2079
_override_hook_source_branch=None, tag_selector=None):
2080
"""Mirror the source branch into the target branch.
2082
The source branch is considered to be 'local', having low latency.
2084
raise NotImplementedError(self.push)
2086
def copy_content_into(self, revision_id=None, tag_selector=None):
2087
"""Copy the content of source into target
2090
if not None, the revision history in the new branch will
2091
be truncated to end with revision_id.
2092
:param tag_selector: Optional callback that can decide
2093
to copy or not copy tags.
2095
raise NotImplementedError(self.copy_content_into)
2097
def fetch(self, stop_revision=None, limit=None, lossy=False):
2100
:param stop_revision: Last revision to fetch
2101
:param limit: Optional rough limit of revisions to fetch
2102
:return: FetchResult object
2104
raise NotImplementedError(self.fetch)
2106
def update_references(self):
2107
"""Import reference information from source to target.
2109
raise NotImplementedError(self.update_references)
2112
def _fix_overwrite_type(overwrite):
2113
if isinstance(overwrite, bool):
2115
return ["history", "tags"]
2121
class GenericInterBranch(InterBranch):
2122
"""InterBranch implementation that uses public Branch functions."""
2125
def is_compatible(klass, source, target):
2126
# GenericBranch uses the public API, so always compatible
2130
def _get_branch_formats_to_test(klass):
2131
return [(format_registry.get_default(), format_registry.get_default())]
2134
def unwrap_format(klass, format):
2135
if isinstance(format, remote.RemoteBranchFormat):
2136
format._ensure_real()
2137
return format._custom_format
2140
def copy_content_into(self, revision_id=None, tag_selector=None):
2141
"""Copy the content of source into target
2143
revision_id: if not None, the revision history in the new branch will
2144
be truncated to end with revision_id.
2146
with self.source.lock_read(), self.target.lock_write():
2147
self.source._synchronize_history(self.target, revision_id)
2148
self.update_references()
2150
parent = self.source.get_parent()
2151
except errors.InaccessibleParent as e:
2152
mutter('parent was not accessible to copy: %s', str(e))
2155
self.target.set_parent(parent)
2156
if self.source._push_should_merge_tags():
2157
self.source.tags.merge_to(self.target.tags, selector=tag_selector)
2159
def fetch(self, stop_revision=None, limit=None, lossy=False):
2160
if self.target.base == self.source.base:
2162
with self.source.lock_read(), self.target.lock_write():
2163
fetch_spec_factory = fetch.FetchSpecFactory()
2164
fetch_spec_factory.source_branch = self.source
2165
fetch_spec_factory.source_branch_stop_revision_id = stop_revision
2166
fetch_spec_factory.source_repo = self.source.repository
2167
fetch_spec_factory.target_repo = self.target.repository
2168
fetch_spec_factory.target_repo_kind = (
2169
fetch.TargetRepoKinds.PREEXISTING)
2170
fetch_spec_factory.limit = limit
2171
fetch_spec = fetch_spec_factory.make_fetch_spec()
2172
return self.target.repository.fetch(
2173
self.source.repository,
2175
fetch_spec=fetch_spec)
2177
def _update_revisions(self, stop_revision=None, overwrite=False,
2179
with self.source.lock_read(), self.target.lock_write():
2180
other_revno, other_last_revision = self.source.last_revision_info()
2181
stop_revno = None # unknown
2182
if stop_revision is None:
2183
stop_revision = other_last_revision
2184
if _mod_revision.is_null(stop_revision):
2185
# if there are no commits, we're done.
2187
stop_revno = other_revno
2189
# what's the current last revision, before we fetch [and change it
2191
last_rev = _mod_revision.ensure_null(self.target.last_revision())
2192
# we fetch here so that we don't process data twice in the common
2193
# case of having something to pull, and so that the check for
2194
# already merged can operate on the just fetched graph, which will
2195
# be cached in memory.
2196
self.fetch(stop_revision=stop_revision)
2197
# Check to see if one is an ancestor of the other
2200
graph = self.target.repository.get_graph()
2201
if self.target._check_if_descendant_or_diverged(
2202
stop_revision, last_rev, graph, self.source):
2203
# stop_revision is a descendant of last_rev, but we aren't
2204
# overwriting, so we're done.
2206
if stop_revno is None:
2208
graph = self.target.repository.get_graph()
2209
this_revno, this_last_revision = \
2210
self.target.last_revision_info()
2211
stop_revno = graph.find_distance_to_null(
2212
stop_revision, [(other_last_revision, other_revno),
2213
(this_last_revision, this_revno)])
2214
self.target.set_last_revision_info(stop_revno, stop_revision)
2216
def pull(self, overwrite=False, stop_revision=None,
2217
possible_transports=None, run_hooks=True,
2218
_override_hook_target=None, local=False,
2220
"""Pull from source into self, updating my master if any.
2222
:param run_hooks: Private parameter - if false, this branch
2223
is being called because it's the master of the primary branch,
2224
so it should not run its hooks.
2226
with cleanup.ExitStack() as exit_stack:
2227
exit_stack.enter_context(self.target.lock_write())
2228
bound_location = self.target.get_bound_location()
2229
if local and not bound_location:
2230
raise errors.LocalRequiresBoundBranch()
2231
master_branch = None
2232
source_is_master = False
2234
# bound_location comes from a config file, some care has to be
2235
# taken to relate it to source.user_url
2236
normalized = urlutils.normalize_url(bound_location)
2238
relpath = self.source.user_transport.relpath(normalized)
2239
source_is_master = (relpath == '')
2240
except (errors.PathNotChild, urlutils.InvalidURL):
2241
source_is_master = False
2242
if not local and bound_location and not source_is_master:
2243
# not pulling from master, so we need to update master.
2244
master_branch = self.target.get_master_branch(
2245
possible_transports)
2246
exit_stack.enter_context(master_branch.lock_write())
2248
# pull from source into master.
2250
self.source, overwrite, stop_revision, run_hooks=False,
2251
tag_selector=tag_selector)
2253
overwrite, stop_revision, _hook_master=master_branch,
2254
run_hooks=run_hooks,
2255
_override_hook_target=_override_hook_target,
2256
merge_tags_to_master=not source_is_master,
2257
tag_selector=tag_selector)
2259
def push(self, overwrite=False, stop_revision=None, lossy=False,
2260
_override_hook_source_branch=None, tag_selector=None):
2261
"""See InterBranch.push.
2263
This is the basic concrete implementation of push()
2265
:param _override_hook_source_branch: If specified, run the hooks
2266
passing this Branch as the source, rather than self. This is for
2267
use of RemoteBranch, where push is delegated to the underlying
2271
raise errors.LossyPushToSameVCS(self.source, self.target)
2272
# TODO: Public option to disable running hooks - should be trivial but
2276
if _override_hook_source_branch:
2277
result.source_branch = _override_hook_source_branch
2278
for hook in Branch.hooks['post_push']:
2281
with self.source.lock_read(), self.target.lock_write():
2282
bound_location = self.target.get_bound_location()
2283
if bound_location and self.target.base != bound_location:
2284
# there is a master branch.
2286
# XXX: Why the second check? Is it even supported for a branch
2287
# to be bound to itself? -- mbp 20070507
2288
master_branch = self.target.get_master_branch()
2289
with master_branch.lock_write():
2290
# push into the master from the source branch.
2291
master_inter = InterBranch.get(self.source, master_branch)
2292
master_inter._basic_push(
2293
overwrite, stop_revision, tag_selector=tag_selector)
2294
# and push into the target branch from the source. Note
2295
# that we push from the source branch again, because it's
2296
# considered the highest bandwidth repository.
2297
result = self._basic_push(
2298
overwrite, stop_revision, tag_selector=tag_selector)
2299
result.master_branch = master_branch
2300
result.local_branch = self.target
2303
master_branch = None
2305
result = self._basic_push(
2306
overwrite, stop_revision, tag_selector=tag_selector)
2307
# TODO: Why set master_branch and local_branch if there's no
2308
# binding? Maybe cleaner to just leave them unset? -- mbp
2310
result.master_branch = self.target
2311
result.local_branch = None
2315
def _basic_push(self, overwrite, stop_revision, tag_selector=None):
2316
"""Basic implementation of push without bound branches or hooks.
2318
Must be called with source read locked and target write locked.
2320
result = BranchPushResult()
2321
result.source_branch = self.source
2322
result.target_branch = self.target
2323
result.old_revno, result.old_revid = self.target.last_revision_info()
2324
overwrite = _fix_overwrite_type(overwrite)
2325
if result.old_revid != stop_revision:
2326
# We assume that during 'push' this repository is closer than
2328
graph = self.source.repository.get_graph(self.target.repository)
2329
self._update_revisions(
2330
stop_revision, overwrite=("history" in overwrite), graph=graph)
2331
if self.source._push_should_merge_tags():
2332
result.tag_updates, result.tag_conflicts = (
2333
self.source.tags.merge_to(
2334
self.target.tags, "tags" in overwrite, selector=tag_selector))
2335
self.update_references()
2336
result.new_revno, result.new_revid = self.target.last_revision_info()
2339
def _pull(self, overwrite=False, stop_revision=None,
2340
possible_transports=None, _hook_master=None, run_hooks=True,
2341
_override_hook_target=None, local=False,
2342
merge_tags_to_master=True, tag_selector=None):
2345
This function is the core worker, used by GenericInterBranch.pull to
2346
avoid duplication when pulling source->master and source->local.
2348
:param _hook_master: Private parameter - set the branch to
2349
be supplied as the master to pull hooks.
2350
:param run_hooks: Private parameter - if false, this branch
2351
is being called because it's the master of the primary branch,
2352
so it should not run its hooks.
2353
is being called because it's the master of the primary branch,
2354
so it should not run its hooks.
2355
:param _override_hook_target: Private parameter - set the branch to be
2356
supplied as the target_branch to pull hooks.
2357
:param local: Only update the local branch, and not the bound branch.
2359
# This type of branch can't be bound.
2361
raise errors.LocalRequiresBoundBranch()
2362
result = PullResult()
2363
result.source_branch = self.source
2364
if _override_hook_target is None:
2365
result.target_branch = self.target
2367
result.target_branch = _override_hook_target
2368
with self.source.lock_read():
2369
# We assume that during 'pull' the target repository is closer than
2371
graph = self.target.repository.get_graph(self.source.repository)
2372
# TODO: Branch formats should have a flag that indicates
2373
# that revno's are expensive, and pull() should honor that flag.
2375
result.old_revno, result.old_revid = \
2376
self.target.last_revision_info()
2377
overwrite = _fix_overwrite_type(overwrite)
2378
self._update_revisions(
2379
stop_revision, overwrite=("history" in overwrite), graph=graph)
2380
# TODO: The old revid should be specified when merging tags,
2381
# so a tags implementation that versions tags can only
2382
# pull in the most recent changes. -- JRV20090506
2383
result.tag_updates, result.tag_conflicts = (
2384
self.source.tags.merge_to(
2385
self.target.tags, "tags" in overwrite,
2386
ignore_master=not merge_tags_to_master,
2387
selector=tag_selector))
2388
self.update_references()
2389
result.new_revno, result.new_revid = (
2390
self.target.last_revision_info())
2392
result.master_branch = _hook_master
2393
result.local_branch = result.target_branch
2395
result.master_branch = result.target_branch
2396
result.local_branch = None
2398
for hook in Branch.hooks['post_pull']:
2402
def update_references(self):
2403
if not getattr(self.source._format, 'supports_reference_locations', False):
2405
reference_dict = self.source._get_all_reference_info()
2406
if len(reference_dict) == 0:
2408
old_base = self.source.base
2409
new_base = self.target.base
2410
target_reference_dict = self.target._get_all_reference_info()
2411
for tree_path, (branch_location, file_id) in viewitems(reference_dict):
2413
branch_location = urlutils.rebase_url(branch_location,
2415
except urlutils.InvalidRebaseURLs:
2416
# Fall back to absolute URL
2417
branch_location = urlutils.join(old_base, branch_location)
2418
target_reference_dict.setdefault(
2419
tree_path, (branch_location, file_id))
2420
self.target._set_all_reference_info(target_reference_dict)
2423
InterBranch.register_optimiser(GenericInterBranch)
1521
if filename == head:
1528
def gen_file_id(name):
1529
"""Return new file id.
1531
This should probably generate proper UUIDs, but for the moment we
1532
cope with just randomness because running uuidgen every time is
1535
from binascii import hexlify
1536
from time import time
1538
# get last component
1539
idx = name.rfind('/')
1541
name = name[idx+1 : ]
1542
idx = name.rfind('\\')
1544
name = name[idx+1 : ]
1546
# make it not a hidden file
1547
name = name.lstrip('.')
1549
# remove any wierd characters; we don't escape them but rather
1550
# just pull them out
1551
name = re.sub(r'[^\w.]', '', name)
1553
s = hexlify(rand_bytes(8))
1554
return '-'.join((name, compact_date(time()), s))
1558
"""Return a new tree-root file id."""
1559
return gen_file_id('TREE_ROOT')