1
# Copyright (C) 2005-2012 Canonical Ltd
1
# Copyright (C) 2005 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
from __future__ import absolute_import
19
from .lazy_import import lazy_import
20
lazy_import(globals(), """
24
config as _mod_config,
28
revision as _mod_revision,
34
from breezy.bzr import (
39
from breezy.i18n import gettext, ngettext
47
from .hooks import Hooks
48
from .inter import InterObject
49
from .lock import LogicalLockResult
54
from .trace import mutter, mutter_callsite, note, is_quiet, warning
57
class UnstackableBranchFormat(errors.BzrError):
59
_fmt = ("The branch '%(url)s'(%(format)s) is not a stackable format. "
60
"You will need to upgrade the branch to permit branch stacking.")
62
def __init__(self, format, url):
63
errors.BzrError.__init__(self)
68
class Branch(controldir.ControlComponent):
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21
from warnings import warn
22
from cStringIO import StringIO
26
from bzrlib.inventory import InventoryEntry
27
import bzrlib.inventory as inventory
28
from bzrlib.trace import mutter, note
29
from bzrlib.osutils import (isdir, quotefn, compact_date, rand_bytes,
30
rename, splitpath, sha_file, appendpath,
32
import bzrlib.errors as errors
33
from bzrlib.errors import (BzrError, InvalidRevisionNumber, InvalidRevisionId,
34
NoSuchRevision, HistoryMissing, NotBranchError,
35
DivergedBranches, LockError, UnlistableStore,
36
UnlistableBranch, NoSuchFile, NotVersionedError)
37
from bzrlib.textui import show_status
38
from bzrlib.revision import Revision, is_ancestor, get_intervening_revisions
40
from bzrlib.delta import compare_trees
41
from bzrlib.tree import EmptyTree, RevisionTree
42
from bzrlib.inventory import Inventory
43
from bzrlib.store import copy_all
44
from bzrlib.store.compressed_text import CompressedTextStore
45
from bzrlib.store.text import TextStore
46
from bzrlib.store.weave import WeaveStore
47
from bzrlib.testament import Testament
48
import bzrlib.transactions as transactions
49
from bzrlib.transport import Transport, get_transport
54
BZR_BRANCH_FORMAT_4 = "Bazaar-NG branch, format 0.0.4\n"
55
BZR_BRANCH_FORMAT_5 = "Bazaar-NG branch, format 5\n"
56
BZR_BRANCH_FORMAT_6 = "Bazaar-NG branch, format 6\n"
57
## TODO: Maybe include checks for common corruption of newlines, etc?
60
# TODO: Some operations like log might retrieve the same revisions
61
# repeatedly to calculate deltas. We could perhaps have a weakref
62
# cache in memory to make this faster. In general anything can be
63
# cached in memory between lock and unlock operations.
65
def find_branch(*ignored, **ignored_too):
66
# XXX: leave this here for about one release, then remove it
67
raise NotImplementedError('find_branch() is not supported anymore, '
68
'please use one of the new branch constructors')
71
def needs_read_lock(unbound):
72
"""Decorate unbound to take out and release a read lock."""
73
def decorated(self, *args, **kwargs):
76
return unbound(self, *args, **kwargs)
82
def needs_write_lock(unbound):
83
"""Decorate unbound to take out and release a write lock."""
84
def decorated(self, *args, **kwargs):
87
return unbound(self, *args, **kwargs)
92
######################################################################
69
96
"""Branch holding a history of revisions.
72
Base directory/url of the branch; using control_url and
73
control_transport is more standardized.
74
:ivar hooks: An instance of BranchHooks.
75
:ivar _master_branch_cache: cached result of get_master_branch, see
99
Base directory/url of the branch.
78
# this is really an instance variable - FIXME move it there
83
def control_transport(self):
84
return self._transport
87
def user_transport(self):
88
return self.controldir.user_transport
90
def __init__(self, possible_transports=None):
91
self.tags = self._format.make_tags(self)
92
self._revision_history_cache = None
93
self._revision_id_to_revno_cache = None
94
self._partial_revision_id_to_revno_cache = {}
95
self._partial_revision_history_cache = []
96
self._last_revision_info_cache = None
97
self._master_branch_cache = None
98
self._merge_sorted_revisions_cache = None
99
self._open_hook(possible_transports)
100
hooks = Branch.hooks['open']
104
def _open_hook(self, possible_transports):
105
"""Called by init to allow simpler extension of the base class."""
107
def _activate_fallback_location(self, url, possible_transports):
108
"""Activate the branch/repository from url as a fallback repository."""
109
for existing_fallback_repo in self.repository._fallback_repositories:
110
if existing_fallback_repo.user_url == url:
111
# This fallback is already configured. This probably only
112
# happens because ControlDir.sprout is a horrible mess. To
113
# avoid confusing _unstack we don't add this a second time.
114
mutter('duplicate activation of fallback %r on %r', url, self)
116
repo = self._get_fallback_repository(url, possible_transports)
117
if repo.has_same_location(self.repository):
118
raise errors.UnstackableLocationError(self.user_url, url)
119
self.repository.add_fallback_repository(repo)
121
def break_lock(self):
122
"""Break a lock if one is present from another instance.
124
Uses the ui factory to ask for confirmation if the lock may be from
127
This will probe the repository for its lock as well.
129
self.control_files.break_lock()
130
self.repository.break_lock()
131
master = self.get_master_branch()
132
if master is not None:
135
def _check_stackable_repo(self):
136
if not self.repository._format.supports_external_lookups:
137
raise errors.UnstackableRepositoryFormat(
138
self.repository._format, self.repository.base)
140
def _extend_partial_history(self, stop_index=None, stop_revision=None):
141
"""Extend the partial history to include a given index
143
If a stop_index is supplied, stop when that index has been reached.
144
If a stop_revision is supplied, stop when that revision is
145
encountered. Otherwise, stop when the beginning of history is
148
:param stop_index: The index which should be present. When it is
149
present, history extension will stop.
150
:param stop_revision: The revision id which should be present. When
151
it is encountered, history extension will stop.
153
if len(self._partial_revision_history_cache) == 0:
154
self._partial_revision_history_cache = [self.last_revision()]
155
repository._iter_for_revno(
156
self.repository, self._partial_revision_history_cache,
157
stop_index=stop_index, stop_revision=stop_revision)
158
if self._partial_revision_history_cache[-1] == \
159
_mod_revision.NULL_REVISION:
160
self._partial_revision_history_cache.pop()
162
def _get_check_refs(self):
163
"""Get the references needed for check().
167
revid = self.last_revision()
168
return [('revision-existence', revid), ('lefthand-distance', revid)]
171
def open(base, _unsupported=False, possible_transports=None):
172
"""Open the branch rooted at base.
174
For instance, if the branch is at URL/.bzr/branch,
175
Branch.open(URL) -> a Branch instance.
177
control = controldir.ControlDir.open(
178
base, possible_transports=possible_transports,
179
_unsupported=_unsupported)
180
return control.open_branch(
181
unsupported=_unsupported,
182
possible_transports=possible_transports)
185
def open_from_transport(transport, name=None, _unsupported=False,
186
possible_transports=None):
187
"""Open the branch rooted at transport"""
188
control = controldir.ControlDir.open_from_transport(
189
transport, _unsupported)
190
return control.open_branch(
191
name=name, unsupported=_unsupported,
192
possible_transports=possible_transports)
195
def open_containing(url, possible_transports=None):
103
def __init__(self, *ignored, **ignored_too):
104
raise NotImplementedError('The Branch class is abstract')
107
def open_downlevel(base):
108
"""Open a branch which may be of an old format.
110
Only local branches are supported."""
111
return _Branch(get_transport(base), relax_version_check=True)
115
"""Open an existing branch, rooted at 'base' (url)"""
116
t = get_transport(base)
117
mutter("trying to open %r with transport %r", base, t)
121
def open_containing(url):
196
122
"""Open an existing branch which contains url.
198
124
This probes for a branch at url, and searches upwards from there.
200
126
Basically we keep looking up until we find the control directory or
201
127
run into the root. If there isn't one, raises NotBranchError.
202
If there is one and it is either an unrecognised format or an
203
unsupported format, UnknownFormatError or UnsupportedFormatError are
204
raised. If there is one, it is returned, along with the unused portion
207
control, relpath = controldir.ControlDir.open_containing(
208
url, possible_transports)
209
branch = control.open_branch(possible_transports=possible_transports)
210
return (branch, relpath)
212
def _push_should_merge_tags(self):
213
"""Should _basic_push merge this branch's tags into the target?
215
The default implementation returns False if this branch has no tags,
216
and True the rest of the time. Subclasses may override this.
218
return self.supports_tags() and self.tags.get_tag_dict()
220
def get_config(self):
221
"""Get a breezy.config.BranchConfig for this Branch.
223
This can then be used to get and set configuration options for the
226
:return: A breezy.config.BranchConfig.
228
return _mod_config.BranchConfig(self)
230
def get_config_stack(self):
231
"""Get a breezy.config.BranchStack for this Branch.
233
This can then be used to get and set configuration options for the
236
:return: A breezy.config.BranchStack.
238
return _mod_config.BranchStack(self)
240
def store_uncommitted(self, creator):
241
"""Store uncommitted changes from a ShelfCreator.
243
:param creator: The ShelfCreator containing uncommitted changes, or
244
None to delete any stored changes.
245
:raises: ChangesAlreadyStored if the branch already has changes.
247
raise NotImplementedError(self.store_uncommitted)
249
def get_unshelver(self, tree):
250
"""Return a shelf.Unshelver for this branch and tree.
252
:param tree: The tree to use to construct the Unshelver.
253
:return: an Unshelver or None if no changes are stored.
255
raise NotImplementedError(self.get_unshelver)
257
def _get_fallback_repository(self, url, possible_transports):
258
"""Get the repository we fallback to at url."""
259
url = urlutils.join(self.base, url)
260
a_branch = Branch.open(url, possible_transports=possible_transports)
261
return a_branch.repository
263
def _get_nick(self, local=False, possible_transports=None):
264
config = self.get_config()
265
# explicit overrides master, but don't look for master if local is True
266
if not local and not config.has_explicit_nickname():
268
master = self.get_master_branch(possible_transports)
269
if master and self.user_url == master.user_url:
270
raise errors.RecursiveBind(self.user_url)
271
if master is not None:
272
# return the master branch value
274
except errors.RecursiveBind as e:
276
except errors.BzrError as e:
277
# Silently fall back to local implicit nick if the master is
279
mutter("Could not connect to bound branch, "
280
"falling back to local nick.\n " + str(e))
281
return config.get_nickname()
283
def _set_nick(self, nick):
284
self.get_config().set_user_option('nickname', nick, warn_masked=True)
286
nick = property(_get_nick, _set_nick)
289
raise NotImplementedError(self.is_locked)
291
def _lefthand_history(self, revision_id, last_rev=None,
293
if 'evil' in debug.debug_flags:
294
mutter_callsite(4, "_lefthand_history scales with history.")
295
# stop_revision must be a descendant of last_revision
296
graph = self.repository.get_graph()
297
if last_rev is not None:
298
if not graph.is_ancestor(last_rev, revision_id):
299
# our previous tip is not merged into stop_revision
300
raise errors.DivergedBranches(self, other_branch)
301
# make a new revision history from the graph
302
parents_map = graph.get_parent_map([revision_id])
303
if revision_id not in parents_map:
304
raise errors.NoSuchRevision(self, revision_id)
305
current_rev_id = revision_id
307
check_not_reserved_id = _mod_revision.check_not_reserved_id
308
# Do not include ghosts or graph origin in revision_history
309
while (current_rev_id in parents_map
310
and len(parents_map[current_rev_id]) > 0):
311
check_not_reserved_id(current_rev_id)
312
new_history.append(current_rev_id)
313
current_rev_id = parents_map[current_rev_id][0]
314
parents_map = graph.get_parent_map([current_rev_id])
315
new_history.reverse()
318
def lock_write(self, token=None):
319
"""Lock the branch for write operations.
321
:param token: A token to permit reacquiring a previously held and
323
:return: A BranchWriteLockResult.
325
raise NotImplementedError(self.lock_write)
128
If there is one, it is returned, along with the unused portion of url.
130
t = get_transport(url)
133
return _Branch(t), t.relpath(url)
134
except NotBranchError:
136
new_t = t.clone('..')
137
if new_t.base == t.base:
138
# reached the root, whatever that may be
139
raise NotBranchError(path=url)
143
def initialize(base):
144
"""Create a new branch, rooted at 'base' (url)"""
145
t = get_transport(base)
146
return _Branch(t, init=True)
148
def setup_caching(self, cache_root):
149
"""Subclasses that care about caching should override this, and set
150
up cached stores located under cache_root.
152
self.cache_root = cache_root
155
class _Branch(Branch):
156
"""A branch stored in the actual filesystem.
158
Note that it's "local" in the context of the filesystem; it doesn't
159
really matter if it's on an nfs/smb/afs/coda/... share, as long as
160
it's writable, and can be accessed via the normal filesystem API.
166
If _lock_mode is true, a positive count of the number of times the
170
Lock object from bzrlib.lock.
172
# We actually expect this class to be somewhat short-lived; part of its
173
# purpose is to try to isolate what bits of the branch logic are tied to
174
# filesystem access, so that in a later step, we can extricate them to
175
# a separarte ("storage") class.
179
_inventory_weave = None
181
# Map some sort of prefix into a namespace
182
# stuff like "revno:10", "revid:", etc.
183
# This should match a prefix with a function which accepts
184
REVISION_NAMESPACES = {}
186
def push_stores(self, branch_to):
187
"""Copy the content of this branches store to branch_to."""
188
if (self._branch_format != branch_to._branch_format
189
or self._branch_format != 4):
190
from bzrlib.fetch import greedy_fetch
191
mutter("falling back to fetch logic to push between %s(%s) and %s(%s)",
192
self, self._branch_format, branch_to, branch_to._branch_format)
193
greedy_fetch(to_branch=branch_to, from_branch=self,
194
revision=self.last_revision())
197
store_pairs = ((self.text_store, branch_to.text_store),
198
(self.inventory_store, branch_to.inventory_store),
199
(self.revision_store, branch_to.revision_store))
201
for from_store, to_store in store_pairs:
202
copy_all(from_store, to_store)
203
except UnlistableStore:
204
raise UnlistableBranch(from_store)
206
def __init__(self, transport, init=False,
207
relax_version_check=False):
208
"""Create new branch object at a particular location.
210
transport -- A Transport object, defining how to access files.
211
(If a string, transport.transport() will be used to
212
create a Transport object)
214
init -- If True, create new control files in a previously
215
unversioned directory. If False, the branch must already
218
relax_version_check -- If true, the usual check for the branch
219
version is not applied. This is intended only for
220
upgrade/recovery type use; it's not guaranteed that
221
all operations will work on old format branches.
223
In the test suite, creation of new trees is tested using the
224
`ScratchBranch` class.
226
assert isinstance(transport, Transport), \
227
"%r is not a Transport" % transport
228
self._transport = transport
231
self._check_format(relax_version_check)
233
def get_store(name, compressed=True, prefixed=False):
234
# FIXME: This approach of assuming stores are all entirely compressed
235
# or entirely uncompressed is tidy, but breaks upgrade from
236
# some existing branches where there's a mixture; we probably
237
# still want the option to look for both.
238
relpath = self._rel_controlfilename(name)
240
store = CompressedTextStore(self._transport.clone(relpath),
243
store = TextStore(self._transport.clone(relpath),
245
#if self._transport.should_cache():
246
# cache_path = os.path.join(self.cache_root, name)
247
# os.mkdir(cache_path)
248
# store = bzrlib.store.CachedStore(store, cache_path)
250
def get_weave(name, prefixed=False):
251
relpath = self._rel_controlfilename(name)
252
ws = WeaveStore(self._transport.clone(relpath), prefixed=prefixed)
253
if self._transport.should_cache():
254
ws.enable_cache = True
257
if self._branch_format == 4:
258
self.inventory_store = get_store('inventory-store')
259
self.text_store = get_store('text-store')
260
self.revision_store = get_store('revision-store')
261
elif self._branch_format == 5:
262
self.control_weaves = get_weave([])
263
self.weave_store = get_weave('weaves')
264
self.revision_store = get_store('revision-store', compressed=False)
265
elif self._branch_format == 6:
266
self.control_weaves = get_weave([])
267
self.weave_store = get_weave('weaves', prefixed=True)
268
self.revision_store = get_store('revision-store', compressed=False,
270
self.revision_store.register_suffix('sig')
271
self._transaction = None
274
return '%s(%r)' % (self.__class__.__name__, self._transport.base)
281
if self._lock_mode or self._lock:
282
# XXX: This should show something every time, and be suitable for
283
# headless operation and embedding
284
warn("branch %r was not explicitly unlocked" % self)
287
# TODO: It might be best to do this somewhere else,
288
# but it is nice for a Branch object to automatically
289
# cache it's information.
290
# Alternatively, we could have the Transport objects cache requests
291
# See the earlier discussion about how major objects (like Branch)
292
# should never expect their __del__ function to run.
293
if hasattr(self, 'cache_root') and self.cache_root is not None:
296
shutil.rmtree(self.cache_root)
299
self.cache_root = None
303
return self._transport.base
306
base = property(_get_base, doc="The URL for the root of this branch.")
308
def _finish_transaction(self):
309
"""Exit the current transaction."""
310
if self._transaction is None:
311
raise errors.LockError('Branch %s is not in a transaction' %
313
transaction = self._transaction
314
self._transaction = None
317
def get_transaction(self):
318
"""Return the current active transaction.
320
If no transaction is active, this returns a passthrough object
321
for which all data is immedaitely flushed and no caching happens.
323
if self._transaction is None:
324
return transactions.PassThroughTransaction()
326
return self._transaction
328
def _set_transaction(self, new_transaction):
329
"""Set a new active transaction."""
330
if self._transaction is not None:
331
raise errors.LockError('Branch %s is in a transaction already.' %
333
self._transaction = new_transaction
335
def lock_write(self):
336
mutter("lock write: %s (%s)", self, self._lock_count)
337
# TODO: Upgrade locking to support using a Transport,
338
# and potentially a remote locking protocol
340
if self._lock_mode != 'w':
341
raise LockError("can't upgrade to a write lock from %r" %
343
self._lock_count += 1
345
self._lock = self._transport.lock_write(
346
self._rel_controlfilename('branch-lock'))
347
self._lock_mode = 'w'
349
self._set_transaction(transactions.PassThroughTransaction())
327
351
def lock_read(self):
328
"""Lock the branch for read operations.
330
:return: A breezy.lock.LogicalLockResult.
332
raise NotImplementedError(self.lock_read)
352
mutter("lock read: %s (%s)", self, self._lock_count)
354
assert self._lock_mode in ('r', 'w'), \
355
"invalid lock mode %r" % self._lock_mode
356
self._lock_count += 1
358
self._lock = self._transport.lock_read(
359
self._rel_controlfilename('branch-lock'))
360
self._lock_mode = 'r'
362
self._set_transaction(transactions.ReadOnlyTransaction())
363
# 5K may be excessive, but hey, its a knob.
364
self.get_transaction().set_cache_size(5000)
334
366
def unlock(self):
335
raise NotImplementedError(self.unlock)
337
def peek_lock_mode(self):
338
"""Return lock mode for the Branch: 'r', 'w' or None"""
339
raise NotImplementedError(self.peek_lock_mode)
341
def get_physical_lock_status(self):
342
raise NotImplementedError(self.get_physical_lock_status)
344
def dotted_revno_to_revision_id(self, revno, _cache_reverse=False):
345
"""Return the revision_id for a dotted revno.
347
:param revno: a tuple like (1,) or (1,1,2)
348
:param _cache_reverse: a private parameter enabling storage
349
of the reverse mapping in a top level cache. (This should
350
only be done in selective circumstances as we want to
351
avoid having the mapping cached multiple times.)
352
:return: the revision_id
353
:raises errors.NoSuchRevision: if the revno doesn't exist
355
with self.lock_read():
356
rev_id = self._do_dotted_revno_to_revision_id(revno)
358
self._partial_revision_id_to_revno_cache[rev_id] = revno
361
def _do_dotted_revno_to_revision_id(self, revno):
362
"""Worker function for dotted_revno_to_revision_id.
364
Subclasses should override this if they wish to
365
provide a more efficient implementation.
367
mutter("unlock: %s (%s)", self, self._lock_count)
368
if not self._lock_mode:
369
raise LockError('branch %r is not locked' % (self))
371
if self._lock_count > 1:
372
self._lock_count -= 1
374
self._finish_transaction()
377
self._lock_mode = self._lock_count = None
379
def abspath(self, name):
380
"""Return absolute filename for something in the branch
382
XXX: Robert Collins 20051017 what is this used for? why is it a branch
383
method and not a tree method.
385
return self._transport.abspath(name)
387
def _rel_controlfilename(self, file_or_path):
388
if isinstance(file_or_path, basestring):
389
file_or_path = [file_or_path]
390
return [bzrlib.BZRDIR] + file_or_path
392
def controlfilename(self, file_or_path):
393
"""Return location relative to branch."""
394
return self._transport.abspath(self._rel_controlfilename(file_or_path))
397
def controlfile(self, file_or_path, mode='r'):
398
"""Open a control file for this branch.
400
There are two classes of file in the control directory: text
401
and binary. binary files are untranslated byte streams. Text
402
control files are stored with Unix newlines and in UTF-8, even
403
if the platform or locale defaults are different.
405
Controlfiles should almost never be opened in write mode but
406
rather should be atomically copied and replaced using atomicfile.
410
relpath = self._rel_controlfilename(file_or_path)
411
#TODO: codecs.open() buffers linewise, so it was overloaded with
412
# a much larger buffer, do we need to do the same for getreader/getwriter?
414
return self._transport.get(relpath)
416
raise BzrError("Branch.controlfile(mode='wb') is not supported, use put_controlfiles")
418
return codecs.getreader('utf-8')(self._transport.get(relpath), errors='replace')
420
raise BzrError("Branch.controlfile(mode='w') is not supported, use put_controlfiles")
422
raise BzrError("invalid controlfile mode %r" % mode)
424
def put_controlfile(self, path, f, encode=True):
425
"""Write an entry as a controlfile.
427
:param path: The path to put the file, relative to the .bzr control
429
:param f: A file-like or string object whose contents should be copied.
430
:param encode: If true, encode the contents as utf-8
432
self.put_controlfiles([(path, f)], encode=encode)
434
def put_controlfiles(self, files, encode=True):
435
"""Write several entries as controlfiles.
437
:param files: A list of [(path, file)] pairs, where the path is the directory
438
underneath the bzr control directory
439
:param encode: If true, encode the contents as utf-8
443
for path, f in files:
445
if isinstance(f, basestring):
446
f = f.encode('utf-8', 'replace')
448
f = codecs.getwriter('utf-8')(f, errors='replace')
449
path = self._rel_controlfilename(path)
450
ctrl_files.append((path, f))
451
self._transport.put_multi(ctrl_files)
453
def _make_control(self):
454
from bzrlib.inventory import Inventory
455
from bzrlib.weavefile import write_weave_v5
456
from bzrlib.weave import Weave
458
# Create an empty inventory
460
# if we want per-tree root ids then this is the place to set
461
# them; they're not needed for now and so ommitted for
463
bzrlib.xml5.serializer_v5.write_inventory(Inventory(), sio)
464
empty_inv = sio.getvalue()
466
bzrlib.weavefile.write_weave_v5(Weave(), sio)
467
empty_weave = sio.getvalue()
469
dirs = [[], 'revision-store', 'weaves']
471
"This is a Bazaar-NG control directory.\n"
472
"Do not change any files in this directory.\n"),
473
('branch-format', BZR_BRANCH_FORMAT_6),
474
('revision-history', ''),
477
('pending-merges', ''),
478
('inventory', empty_inv),
479
('inventory.weave', empty_weave),
480
('ancestry.weave', empty_weave)
482
cfn = self._rel_controlfilename
483
self._transport.mkdir_multi([cfn(d) for d in dirs])
484
self.put_controlfiles(files)
485
mutter('created control directory in ' + self._transport.base)
487
def _check_format(self, relax_version_check):
488
"""Check this branch format is supported.
490
The format level is stored, as an integer, in
491
self._branch_format for code that needs to check it later.
493
In the future, we might need different in-memory Branch
494
classes to support downlevel branches. But not yet.
497
fmt = self.controlfile('branch-format', 'r').read()
499
raise NotBranchError(path=self.base)
500
mutter("got branch format %r", fmt)
501
if fmt == BZR_BRANCH_FORMAT_6:
502
self._branch_format = 6
503
elif fmt == BZR_BRANCH_FORMAT_5:
504
self._branch_format = 5
505
elif fmt == BZR_BRANCH_FORMAT_4:
506
self._branch_format = 4
508
if (not relax_version_check
509
and self._branch_format not in (5, 6)):
510
raise errors.UnsupportedFormatError(
511
'sorry, branch format %r not supported' % fmt,
512
['use a different bzr version',
513
'or remove the .bzr directory'
514
' and "bzr init" again'])
516
def get_root_id(self):
517
"""Return the id of this branches root"""
518
inv = self.read_working_inventory()
519
return inv.root.file_id
521
def set_root_id(self, file_id):
522
inv = self.read_working_inventory()
523
orig_root_id = inv.root.file_id
524
del inv._byid[inv.root.file_id]
525
inv.root.file_id = file_id
526
inv._byid[inv.root.file_id] = inv.root
529
if entry.parent_id in (None, orig_root_id):
530
entry.parent_id = inv.root.file_id
531
self._write_inventory(inv)
534
def read_working_inventory(self):
535
"""Read the working inventory."""
536
# ElementTree does its own conversion from UTF-8, so open in
538
f = self.controlfile('inventory', 'rb')
539
return bzrlib.xml5.serializer_v5.read_inventory(f)
542
def _write_inventory(self, inv):
543
"""Update the working inventory.
545
That is to say, the inventory describing changes underway, that
546
will be committed to the next revision.
548
from cStringIO import StringIO
550
bzrlib.xml5.serializer_v5.write_inventory(inv, sio)
552
# Transport handles atomicity
553
self.put_controlfile('inventory', sio)
555
mutter('wrote working inventory')
557
inventory = property(read_working_inventory, _write_inventory, None,
558
"""Inventory for the working copy.""")
561
def add(self, files, ids=None):
562
"""Make files versioned.
564
Note that the command line normally calls smart_add instead,
565
which can automatically recurse.
567
This puts the files in the Added state, so that they will be
568
recorded by the next commit.
571
List of paths to add, relative to the base of the tree.
574
If set, use these instead of automatically generated ids.
575
Must be the same length as the list of files, but may
576
contain None for ids that are to be autogenerated.
578
TODO: Perhaps have an option to add the ids even if the files do
581
TODO: Perhaps yield the ids and paths as they're added.
583
# TODO: Re-adding a file that is removed in the working copy
584
# should probably put it back with the previous ID.
585
if isinstance(files, basestring):
586
assert(ids is None or isinstance(ids, basestring))
592
ids = [None] * len(files)
594
assert(len(ids) == len(files))
596
inv = self.read_working_inventory()
597
for f,file_id in zip(files, ids):
598
if is_control_file(f):
599
raise BzrError("cannot add control file %s" % quotefn(f))
604
raise BzrError("cannot add top-level %r" % f)
606
fullpath = os.path.normpath(self.abspath(f))
369
return self.get_rev_id(revno[0])
370
except errors.RevisionNotPresent as e:
371
raise errors.GhostRevisionsHaveNoRevno(revno[0], e.revision_id)
372
revision_id_to_revno = self.get_revision_id_to_revno_map()
373
revision_ids = [revision_id for revision_id, this_revno
374
in viewitems(revision_id_to_revno)
375
if revno == this_revno]
376
if len(revision_ids) == 1:
377
return revision_ids[0]
379
revno_str = '.'.join(map(str, revno))
380
raise errors.NoSuchRevision(self, revno_str)
382
def revision_id_to_dotted_revno(self, revision_id):
383
"""Given a revision id, return its dotted revno.
385
:return: a tuple like (1,) or (400,1,3).
387
with self.lock_read():
388
return self._do_revision_id_to_dotted_revno(revision_id)
390
def _do_revision_id_to_dotted_revno(self, revision_id):
391
"""Worker function for revision_id_to_revno."""
392
# Try the caches if they are loaded
393
result = self._partial_revision_id_to_revno_cache.get(revision_id)
394
if result is not None:
396
if self._revision_id_to_revno_cache:
397
result = self._revision_id_to_revno_cache.get(revision_id)
399
raise errors.NoSuchRevision(self, revision_id)
400
# Try the mainline as it's optimised
402
revno = self.revision_id_to_revno(revision_id)
404
except errors.NoSuchRevision:
405
# We need to load and use the full revno map after all
406
result = self.get_revision_id_to_revno_map().get(revision_id)
408
raise errors.NoSuchRevision(self, revision_id)
411
def get_revision_id_to_revno_map(self):
412
"""Return the revision_id => dotted revno map.
414
This will be regenerated on demand, but will be cached.
416
:return: A dictionary mapping revision_id => dotted revno.
417
This dictionary should not be modified by the caller.
419
if 'evil' in debug.debug_flags:
421
3, "get_revision_id_to_revno_map scales with ancestry.")
422
with self.lock_read():
423
if self._revision_id_to_revno_cache is not None:
424
mapping = self._revision_id_to_revno_cache
426
mapping = self._gen_revno_map()
427
self._cache_revision_id_to_revno(mapping)
428
# TODO: jam 20070417 Since this is being cached, should we be
430
# I would rather not, and instead just declare that users should
431
# not modify the return value.
434
def _gen_revno_map(self):
435
"""Create a new mapping from revision ids to dotted revnos.
437
Dotted revnos are generated based on the current tip in the revision
439
This is the worker function for get_revision_id_to_revno_map, which
440
just caches the return value.
442
:return: A dictionary mapping revision_id => dotted revno.
444
revision_id_to_revno = {
445
rev_id: revno for rev_id, depth, revno, end_of_merge
446
in self.iter_merge_sorted_revisions()}
447
return revision_id_to_revno
449
def iter_merge_sorted_revisions(self, start_revision_id=None,
450
stop_revision_id=None,
451
stop_rule='exclude', direction='reverse'):
452
"""Walk the revisions for a branch in merge sorted order.
454
Merge sorted order is the output from a merge-aware,
455
topological sort, i.e. all parents come before their
456
children going forward; the opposite for reverse.
458
:param start_revision_id: the revision_id to begin walking from.
459
If None, the branch tip is used.
460
:param stop_revision_id: the revision_id to terminate the walk
461
after. If None, the rest of history is included.
462
:param stop_rule: if stop_revision_id is not None, the precise rule
463
to use for termination:
465
* 'exclude' - leave the stop revision out of the result (default)
466
* 'include' - the stop revision is the last item in the result
467
* 'with-merges' - include the stop revision and all of its
468
merged revisions in the result
469
* 'with-merges-without-common-ancestry' - filter out revisions
470
that are in both ancestries
471
:param direction: either 'reverse' or 'forward':
473
* reverse means return the start_revision_id first, i.e.
474
start at the most recent revision and go backwards in history
475
* forward returns tuples in the opposite order to reverse.
476
Note in particular that forward does *not* do any intelligent
477
ordering w.r.t. depth as some clients of this API may like.
478
(If required, that ought to be done at higher layers.)
480
:return: an iterator over (revision_id, depth, revno, end_of_merge)
483
* revision_id: the unique id of the revision
484
* depth: How many levels of merging deep this node has been
486
* revno_sequence: This field provides a sequence of
487
revision numbers for all revisions. The format is:
488
(REVNO, BRANCHNUM, BRANCHREVNO). BRANCHNUM is the number of the
489
branch that the revno is on. From left to right the REVNO numbers
490
are the sequence numbers within that branch of the revision.
491
* end_of_merge: When True the next node (earlier in history) is
492
part of a different merge.
494
with self.lock_read():
495
# Note: depth and revno values are in the context of the branch so
496
# we need the full graph to get stable numbers, regardless of the
498
if self._merge_sorted_revisions_cache is None:
499
last_revision = self.last_revision()
500
known_graph = self.repository.get_known_graph_ancestry(
502
self._merge_sorted_revisions_cache = known_graph.merge_sort(
504
filtered = self._filter_merge_sorted_revisions(
505
self._merge_sorted_revisions_cache, start_revision_id,
506
stop_revision_id, stop_rule)
507
# Make sure we don't return revisions that are not part of the
508
# start_revision_id ancestry.
509
filtered = self._filter_start_non_ancestors(filtered)
510
if direction == 'reverse':
512
if direction == 'forward':
513
return reversed(list(filtered))
515
raise ValueError('invalid direction %r' % direction)
517
def _filter_merge_sorted_revisions(self, merge_sorted_revisions,
518
start_revision_id, stop_revision_id,
520
"""Iterate over an inclusive range of sorted revisions."""
521
rev_iter = iter(merge_sorted_revisions)
522
if start_revision_id is not None:
523
for node in rev_iter:
525
if rev_id != start_revision_id:
528
# The decision to include the start or not
529
# depends on the stop_rule if a stop is provided
530
# so pop this node back into the iterator
531
rev_iter = itertools.chain(iter([node]), rev_iter)
533
if stop_revision_id is None:
535
for node in rev_iter:
537
yield (rev_id, node.merge_depth, node.revno,
539
elif stop_rule == 'exclude':
540
for node in rev_iter:
542
if rev_id == stop_revision_id:
544
yield (rev_id, node.merge_depth, node.revno,
546
elif stop_rule == 'include':
547
for node in rev_iter:
549
yield (rev_id, node.merge_depth, node.revno,
551
if rev_id == stop_revision_id:
553
elif stop_rule == 'with-merges-without-common-ancestry':
554
# We want to exclude all revisions that are already part of the
555
# stop_revision_id ancestry.
556
graph = self.repository.get_graph()
557
ancestors = graph.find_unique_ancestors(start_revision_id,
559
for node in rev_iter:
561
if rev_id not in ancestors:
563
yield (rev_id, node.merge_depth, node.revno,
565
elif stop_rule == 'with-merges':
566
stop_rev = self.repository.get_revision(stop_revision_id)
567
if stop_rev.parent_ids:
568
left_parent = stop_rev.parent_ids[0]
570
left_parent = _mod_revision.NULL_REVISION
571
# left_parent is the actual revision we want to stop logging at,
572
# since we want to show the merged revisions after the stop_rev too
573
reached_stop_revision_id = False
574
revision_id_whitelist = []
575
for node in rev_iter:
577
if rev_id == left_parent:
578
# reached the left parent after the stop_revision
580
if (not reached_stop_revision_id
581
or rev_id in revision_id_whitelist):
582
yield (rev_id, node.merge_depth, node.revno,
584
if reached_stop_revision_id or rev_id == stop_revision_id:
585
# only do the merged revs of rev_id from now on
586
rev = self.repository.get_revision(rev_id)
588
reached_stop_revision_id = True
589
revision_id_whitelist.extend(rev.parent_ids)
591
raise ValueError('invalid stop_rule %r' % stop_rule)
593
def _filter_start_non_ancestors(self, rev_iter):
594
# If we started from a dotted revno, we want to consider it as a tip
595
# and don't want to yield revisions that are not part of its
596
# ancestry. Given the order guaranteed by the merge sort, we will see
597
# uninteresting descendants of the first parent of our tip before the
600
first = next(rev_iter)
601
except StopIteration:
603
(rev_id, merge_depth, revno, end_of_merge) = first
606
# We start at a mainline revision so by definition, all others
607
# revisions in rev_iter are ancestors
608
for node in rev_iter:
613
pmap = self.repository.get_parent_map([rev_id])
614
parents = pmap.get(rev_id, [])
616
whitelist.update(parents)
618
# If there is no parents, there is nothing of interest left
620
# FIXME: It's hard to test this scenario here as this code is never
621
# called in that case. -- vila 20100322
624
for (rev_id, merge_depth, revno, end_of_merge) in rev_iter:
626
if rev_id in whitelist:
627
pmap = self.repository.get_parent_map([rev_id])
628
parents = pmap.get(rev_id, [])
629
whitelist.remove(rev_id)
630
whitelist.update(parents)
632
# We've reached the mainline, there is nothing left to
636
# A revision that is not part of the ancestry of our
639
yield (rev_id, merge_depth, revno, end_of_merge)
641
def leave_lock_in_place(self):
642
"""Tell this branch object not to release the physical lock when this
645
If lock_write doesn't return a token, then this method is not
648
self.control_files.leave_in_place()
650
def dont_leave_lock_in_place(self):
651
"""Tell this branch object to release the physical lock when this
652
object is unlocked, even if it didn't originally acquire it.
654
If lock_write doesn't return a token, then this method is not
657
self.control_files.dont_leave_in_place()
659
def bind(self, other):
660
"""Bind the local branch the other branch.
662
:param other: The branch to bind to
665
raise errors.UpgradeRequired(self.user_url)
667
def get_append_revisions_only(self):
668
"""Whether it is only possible to append revisions to the history.
670
if not self._format.supports_set_append_revisions_only():
672
return self.get_config_stack().get('append_revisions_only')
674
def set_append_revisions_only(self, enabled):
675
if not self._format.supports_set_append_revisions_only():
676
raise errors.UpgradeRequired(self.user_url)
677
self.get_config_stack().set('append_revisions_only', enabled)
679
def fetch(self, from_branch, stop_revision=None, limit=None, lossy=False):
680
"""Copy revisions from from_branch into this branch.
682
:param from_branch: Where to copy from.
683
:param stop_revision: What revision to stop at (None for at the end
685
:param limit: Optional rough limit of revisions to fetch
688
with self.lock_write():
689
return InterBranch.get(from_branch, self).fetch(
690
stop_revision, limit=limit, lossy=lossy)
692
def get_bound_location(self):
693
"""Return the URL of the branch we are bound to.
695
Older format branches cannot bind, please be sure to use a metadir
700
def get_old_bound_location(self):
701
"""Return the URL of the branch we used to be bound to
703
raise errors.UpgradeRequired(self.user_url)
705
def get_commit_builder(self, parents, config_stack=None, timestamp=None,
706
timezone=None, committer=None, revprops=None,
707
revision_id=None, lossy=False):
708
"""Obtain a CommitBuilder for this branch.
710
:param parents: Revision ids of the parents of the new revision.
711
:param config: Optional configuration to use.
712
:param timestamp: Optional timestamp recorded for commit.
713
:param timezone: Optional timezone for timestamp.
714
:param committer: Optional committer to set for commit.
715
:param revprops: Optional dictionary of revision properties.
716
:param revision_id: Optional revision id.
717
:param lossy: Whether to discard data that can not be natively
718
represented, when pushing to a foreign VCS
721
if config_stack is None:
722
config_stack = self.get_config_stack()
724
return self.repository.get_commit_builder(
725
self, parents, config_stack, timestamp, timezone, committer,
726
revprops, revision_id, lossy)
728
def get_master_branch(self, possible_transports=None):
729
"""Return the branch we are bound to.
731
:return: Either a Branch, or None
735
def get_stacked_on_url(self):
736
"""Get the URL this branch is stacked against.
738
:raises NotStacked: If the branch is not stacked.
739
:raises UnstackableBranchFormat: If the branch does not support
742
raise NotImplementedError(self.get_stacked_on_url)
744
def set_last_revision_info(self, revno, revision_id):
745
"""Set the last revision of this branch.
747
The caller is responsible for checking that the revno is correct
748
for this revision id.
750
It may be possible to set the branch last revision to an id not
751
present in the repository. However, branches can also be
752
configured to check constraints on history, in which case this may not
755
raise NotImplementedError(self.set_last_revision_info)
757
def generate_revision_history(self, revision_id, last_rev=None,
759
"""See Branch.generate_revision_history"""
760
with self.lock_write():
761
graph = self.repository.get_graph()
762
(last_revno, last_revid) = self.last_revision_info()
763
known_revision_ids = [
764
(last_revid, last_revno),
765
(_mod_revision.NULL_REVISION, 0),
767
if last_rev is not None:
768
if not graph.is_ancestor(last_rev, revision_id):
769
# our previous tip is not merged into stop_revision
770
raise errors.DivergedBranches(self, other_branch)
771
revno = graph.find_distance_to_null(
772
revision_id, known_revision_ids)
773
self.set_last_revision_info(revno, revision_id)
775
def set_parent(self, url):
776
"""See Branch.set_parent."""
777
# TODO: Maybe delete old location files?
778
# URLs should never be unicode, even on the local fs,
779
# FIXUP this and get_parent in a future branch format bump:
780
# read and rewrite the file. RBC 20060125
782
if isinstance(url, text_type):
785
except UnicodeEncodeError:
786
raise urlutils.InvalidURL(
787
url, "Urls must be 7-bit ascii, "
788
"use breezy.urlutils.escape")
789
url = urlutils.relative_url(self.base, url)
790
with self.lock_write():
791
self._set_parent_location(url)
793
def set_stacked_on_url(self, url):
794
"""Set the URL this branch is stacked against.
796
:raises UnstackableBranchFormat: If the branch does not support
798
:raises UnstackableRepositoryFormat: If the repository does not support
801
if not self._format.supports_stacking():
802
raise UnstackableBranchFormat(self._format, self.user_url)
803
with self.lock_write():
804
# XXX: Changing from one fallback repository to another does not
805
# check that all the data you need is present in the new fallback.
806
# Possibly it should.
807
self._check_stackable_repo()
810
self.get_stacked_on_url()
811
except (errors.NotStacked, UnstackableBranchFormat,
812
errors.UnstackableRepositoryFormat):
816
self._activate_fallback_location(
817
url, possible_transports=[self.controldir.root_transport])
818
# write this out after the repository is stacked to avoid setting a
819
# stacked config that doesn't work.
820
self._set_config_location('stacked_on_location', url)
823
"""Change a branch to be unstacked, copying data as needed.
825
Don't call this directly, use set_stacked_on_url(None).
827
with ui.ui_factory.nested_progress_bar() as pb:
828
pb.update(gettext("Unstacking"))
829
# The basic approach here is to fetch the tip of the branch,
830
# including all available ghosts, from the existing stacked
831
# repository into a new repository object without the fallbacks.
833
# XXX: See <https://launchpad.net/bugs/397286> - this may not be
834
# correct for CHKMap repostiories
835
old_repository = self.repository
836
if len(old_repository._fallback_repositories) != 1:
837
raise AssertionError(
838
"can't cope with fallback repositories "
839
"of %r (fallbacks: %r)" % (
840
old_repository, old_repository._fallback_repositories))
841
# Open the new repository object.
842
# Repositories don't offer an interface to remove fallback
843
# repositories today; take the conceptually simpler option and just
844
# reopen it. We reopen it starting from the URL so that we
845
# get a separate connection for RemoteRepositories and can
846
# stream from one of them to the other. This does mean doing
847
# separate SSH connection setup, but unstacking is not a
848
# common operation so it's tolerable.
849
new_bzrdir = controldir.ControlDir.open(
850
self.controldir.root_transport.base)
851
new_repository = new_bzrdir.find_repository()
852
if new_repository._fallback_repositories:
853
raise AssertionError(
854
"didn't expect %r to have fallback_repositories"
855
% (self.repository,))
856
# Replace self.repository with the new repository.
857
# Do our best to transfer the lock state (i.e. lock-tokens and
858
# lock count) of self.repository to the new repository.
859
lock_token = old_repository.lock_write().repository_token
860
self.repository = new_repository
861
if isinstance(self, remote.RemoteBranch):
862
# Remote branches can have a second reference to the old
863
# repository that need to be replaced.
864
if self._real_branch is not None:
865
self._real_branch.repository = new_repository
866
self.repository.lock_write(token=lock_token)
867
if lock_token is not None:
868
old_repository.leave_lock_in_place()
869
old_repository.unlock()
870
if lock_token is not None:
871
# XXX: self.repository.leave_lock_in_place() before this
872
# function will not be preserved. Fortunately that doesn't
873
# affect the current default format (2a), and would be a
874
# corner-case anyway.
875
# - Andrew Bennetts, 2010/06/30
876
self.repository.dont_leave_lock_in_place()
880
old_repository.unlock()
881
except errors.LockNotHeld:
884
if old_lock_count == 0:
885
raise AssertionError(
886
'old_repository should have been locked at least once.')
887
for i in range(old_lock_count - 1):
888
self.repository.lock_write()
889
# Fetch from the old repository into the new.
890
with old_repository.lock_read():
891
# XXX: If you unstack a branch while it has a working tree
892
# with a pending merge, the pending-merged revisions will no
893
# longer be present. You can (probably) revert and remerge.
895
tags_to_fetch = set(self.tags.get_reverse_tag_dict())
896
except errors.TagsNotSupported:
897
tags_to_fetch = set()
898
fetch_spec = vf_search.NotInOtherForRevs(
899
self.repository, old_repository,
900
required_ids=[self.last_revision()],
901
if_present_ids=tags_to_fetch, find_ghosts=True).execute()
902
self.repository.fetch(old_repository, fetch_spec=fetch_spec)
904
def _cache_revision_history(self, rev_history):
905
"""Set the cached revision history to rev_history.
907
The revision_history method will use this cache to avoid regenerating
908
the revision history.
910
This API is semi-public; it only for use by subclasses, all other code
911
should consider it to be private.
913
self._revision_history_cache = rev_history
915
def _cache_revision_id_to_revno(self, revision_id_to_revno):
916
"""Set the cached revision_id => revno map to revision_id_to_revno.
918
This API is semi-public; it only for use by subclasses, all other code
919
should consider it to be private.
921
self._revision_id_to_revno_cache = revision_id_to_revno
923
def _clear_cached_state(self):
924
"""Clear any cached data on this branch, e.g. cached revision history.
926
This means the next call to revision_history will need to call
927
_gen_revision_history.
929
This API is semi-public; it is only for use by subclasses, all other
930
code should consider it to be private.
932
self._revision_history_cache = None
933
self._revision_id_to_revno_cache = None
934
self._last_revision_info_cache = None
935
self._master_branch_cache = None
936
self._merge_sorted_revisions_cache = None
937
self._partial_revision_history_cache = []
938
self._partial_revision_id_to_revno_cache = {}
940
def _gen_revision_history(self):
941
"""Return sequence of revision hashes on to this branch.
943
Unlike revision_history, this method always regenerates or rereads the
944
revision history, i.e. it does not cache the result, so repeated calls
947
Concrete subclasses should override this instead of revision_history so
948
that subclasses do not need to deal with caching logic.
950
This API is semi-public; it only for use by subclasses, all other code
951
should consider it to be private.
953
raise NotImplementedError(self._gen_revision_history)
955
def _revision_history(self):
956
if 'evil' in debug.debug_flags:
957
mutter_callsite(3, "revision_history scales with history.")
958
if self._revision_history_cache is not None:
959
history = self._revision_history_cache
961
history = self._gen_revision_history()
962
self._cache_revision_history(history)
609
kind = file_kind(fullpath)
611
# maybe something better?
612
raise BzrError('cannot add: not a regular file, symlink or directory: %s' % quotefn(f))
614
if not InventoryEntry.versionable_kind(kind):
615
raise BzrError('cannot add: not a versionable file ('
616
'i.e. regular file, symlink or directory): %s' % quotefn(f))
619
file_id = gen_file_id(f)
620
inv.add_path(f, kind=kind, file_id=file_id)
622
mutter("add file %s file_id:{%s} kind=%r" % (f, file_id, kind))
624
self._write_inventory(inv)
627
def print_file(self, file, revno):
628
"""Print `file` to stdout."""
629
tree = self.revision_tree(self.get_rev_id(revno))
630
# use inventory as it was in that revision
631
file_id = tree.inventory.path2id(file)
633
raise BzrError("%r is not present in revision %s" % (file, revno))
634
tree.print_file(file_id)
636
# FIXME: this doesn't need to be a branch method
637
def set_inventory(self, new_inventory_list):
638
from bzrlib.inventory import Inventory, InventoryEntry
639
inv = Inventory(self.get_root_id())
640
for path, file_id, parent, kind in new_inventory_list:
641
name = os.path.basename(path)
644
# fixme, there should be a factory function inv,add_??
645
if kind == 'directory':
646
inv.add(inventory.InventoryDirectory(file_id, name, parent))
648
inv.add(inventory.InventoryFile(file_id, name, parent))
649
elif kind == 'symlink':
650
inv.add(inventory.InventoryLink(file_id, name, parent))
652
raise BzrError("unknown kind %r" % kind)
653
self._write_inventory(inv)
656
"""Return all unknown files.
658
These are files in the working directory that are not versioned or
659
control files or ignored.
661
>>> from bzrlib.workingtree import WorkingTree
662
>>> b = ScratchBranch(files=['foo', 'foo~'])
663
>>> map(str, b.unknowns())
666
>>> list(b.unknowns())
668
>>> WorkingTree(b.base, b).remove('foo')
669
>>> list(b.unknowns())
672
return self.working_tree().unknowns()
675
def append_revision(self, *revision_ids):
676
for revision_id in revision_ids:
677
mutter("add {%s} to revision-history" % revision_id)
678
rev_history = self.revision_history()
679
rev_history.extend(revision_ids)
680
self.set_revision_history(rev_history)
683
def set_revision_history(self, rev_history):
684
self.put_controlfile('revision-history', '\n'.join(rev_history))
686
def has_revision(self, revision_id):
687
"""True if this branch has a copy of the revision.
689
This does not necessarily imply the revision is merge
690
or on the mainline."""
691
return (revision_id is None
692
or self.revision_store.has_id(revision_id))
695
def get_revision_xml_file(self, revision_id):
696
"""Return XML file object for revision object."""
697
if not revision_id or not isinstance(revision_id, basestring):
698
raise InvalidRevisionId(revision_id)
700
return self.revision_store.get(revision_id)
701
except (IndexError, KeyError):
702
raise bzrlib.errors.NoSuchRevision(self, revision_id)
705
get_revision_xml = get_revision_xml_file
707
def get_revision_xml(self, revision_id):
708
return self.get_revision_xml_file(revision_id).read()
711
def get_revision(self, revision_id):
712
"""Return the Revision object for a named revision"""
713
xml_file = self.get_revision_xml_file(revision_id)
716
r = bzrlib.xml5.serializer_v5.read_revision(xml_file)
717
except SyntaxError, e:
718
raise bzrlib.errors.BzrError('failed to unpack revision_xml',
722
assert r.revision_id == revision_id
725
def get_revision_delta(self, revno):
726
"""Return the delta for one revision.
728
The delta is relative to its mainline predecessor, or the
729
empty tree for revision 1.
731
assert isinstance(revno, int)
732
rh = self.revision_history()
733
if not (1 <= revno <= len(rh)):
734
raise InvalidRevisionNumber(revno)
736
# revno is 1-based; list is 0-based
738
new_tree = self.revision_tree(rh[revno-1])
740
old_tree = EmptyTree()
742
old_tree = self.revision_tree(rh[revno-2])
744
return compare_trees(old_tree, new_tree)
746
def get_revision_sha1(self, revision_id):
747
"""Hash the stored value of a revision, and return it."""
748
# In the future, revision entries will be signed. At that
749
# point, it is probably best *not* to include the signature
750
# in the revision hash. Because that lets you re-sign
751
# the revision, (add signatures/remove signatures) and still
752
# have all hash pointers stay consistent.
753
# But for now, just hash the contents.
754
return bzrlib.osutils.sha_file(self.get_revision_xml_file(revision_id))
756
def get_ancestry(self, revision_id):
757
"""Return a list of revision-ids integrated by a revision.
759
This currently returns a list, but the ordering is not guaranteed:
762
if revision_id is None:
764
w = self.get_inventory_weave()
765
return [None] + map(w.idx_to_name,
766
w.inclusions([w.lookup(revision_id)]))
768
def get_inventory_weave(self):
769
return self.control_weaves.get_weave('inventory',
770
self.get_transaction())
772
def get_inventory(self, revision_id):
773
"""Get Inventory object by hash."""
774
xml = self.get_inventory_xml(revision_id)
775
return bzrlib.xml5.serializer_v5.read_inventory_from_string(xml)
777
def get_inventory_xml(self, revision_id):
778
"""Get inventory XML as a file object."""
780
assert isinstance(revision_id, basestring), type(revision_id)
781
iw = self.get_inventory_weave()
782
return iw.get_text(iw.lookup(revision_id))
784
raise bzrlib.errors.HistoryMissing(self, 'inventory', revision_id)
786
def get_inventory_sha1(self, revision_id):
787
"""Return the sha1 hash of the inventory entry
789
return self.get_revision(revision_id).inventory_sha1
791
def get_revision_inventory(self, revision_id):
792
"""Return inventory of a past revision."""
793
# TODO: Unify this with get_inventory()
794
# bzr 0.0.6 and later imposes the constraint that the inventory_id
795
# must be the same as its revision, so this is trivial.
796
if revision_id == None:
797
return Inventory(self.get_root_id())
799
return self.get_inventory(revision_id)
802
def revision_history(self):
803
"""Return sequence of revision hashes on to this branch."""
804
transaction = self.get_transaction()
805
history = transaction.map.find_revision_history()
806
if history is not None:
807
mutter("cache hit for revision-history in %s", self)
809
history = [l.rstrip('\r\n') for l in
810
self.controlfile('revision-history', 'r').readlines()]
811
transaction.map.add_revision_history(history)
812
# this call is disabled because revision_history is
813
# not really an object yet, and the transaction is for objects.
814
# transaction.register_clean(history, precious=True)
963
815
return list(history)
968
820
That is equivalent to the number of revisions committed to
971
return self.last_revision_info()[0]
974
"""Older format branches cannot bind or unbind."""
975
raise errors.UpgradeRequired(self.user_url)
823
return len(self.revision_history())
977
825
def last_revision(self):
978
"""Return last revision id, or NULL_REVISION."""
979
return self.last_revision_info()[1]
981
def last_revision_info(self):
982
"""Return information about the last revision.
984
:return: A tuple (revno, revision_id).
986
with self.lock_read():
987
if self._last_revision_info_cache is None:
988
self._last_revision_info_cache = (
989
self._read_last_revision_info())
990
return self._last_revision_info_cache
992
def _read_last_revision_info(self):
993
raise NotImplementedError(self._read_last_revision_info)
995
def import_last_revision_info_and_tags(self, source, revno, revid,
997
"""Set the last revision info, importing from another repo if necessary.
999
This is used by the bound branch code to upload a revision to
1000
the master branch first before updating the tip of the local branch.
1001
Revisions referenced by source's tags are also transferred.
1003
:param source: Source branch to optionally fetch from
1004
:param revno: Revision number of the new tip
1005
:param revid: Revision id of the new tip
1006
:param lossy: Whether to discard metadata that can not be
1007
natively represented
1008
:return: Tuple with the new revision number and revision id
1009
(should only be different from the arguments when lossy=True)
1011
if not self.repository.has_same_location(source.repository):
1012
self.fetch(source, revid)
1013
self.set_last_revision_info(revno, revid)
1014
return (revno, revid)
826
"""Return last patch hash, or None if no history.
828
ph = self.revision_history()
834
def missing_revisions(self, other, stop_revision=None, diverged_ok=False):
835
"""Return a list of new revisions that would perfectly fit.
837
If self and other have not diverged, return a list of the revisions
838
present in other, but missing from self.
840
>>> from bzrlib.commit import commit
841
>>> bzrlib.trace.silent = True
842
>>> br1 = ScratchBranch()
843
>>> br2 = ScratchBranch()
844
>>> br1.missing_revisions(br2)
846
>>> commit(br2, "lala!", rev_id="REVISION-ID-1")
847
>>> br1.missing_revisions(br2)
849
>>> br2.missing_revisions(br1)
851
>>> commit(br1, "lala!", rev_id="REVISION-ID-1")
852
>>> br1.missing_revisions(br2)
854
>>> commit(br2, "lala!", rev_id="REVISION-ID-2A")
855
>>> br1.missing_revisions(br2)
857
>>> commit(br1, "lala!", rev_id="REVISION-ID-2B")
858
>>> br1.missing_revisions(br2)
859
Traceback (most recent call last):
860
DivergedBranches: These branches have diverged.
862
self_history = self.revision_history()
863
self_len = len(self_history)
864
other_history = other.revision_history()
865
other_len = len(other_history)
866
common_index = min(self_len, other_len) -1
867
if common_index >= 0 and \
868
self_history[common_index] != other_history[common_index]:
869
raise DivergedBranches(self, other)
871
if stop_revision is None:
872
stop_revision = other_len
874
assert isinstance(stop_revision, int)
875
if stop_revision > other_len:
876
raise bzrlib.errors.NoSuchRevision(self, stop_revision)
877
return other_history[self_len:stop_revision]
879
def update_revisions(self, other, stop_revision=None):
880
"""Pull in new perfect-fit revisions."""
881
# FIXME: If the branches have diverged, but the latest
882
# revision in this branch is completely merged into the other,
883
# then we should still be able to pull.
884
from bzrlib.fetch import greedy_fetch
885
if stop_revision is None:
886
stop_revision = other.last_revision()
887
### Should this be checking is_ancestor instead of revision_history?
888
if (stop_revision is not None and
889
stop_revision in self.revision_history()):
891
greedy_fetch(to_branch=self, from_branch=other,
892
revision=stop_revision)
893
pullable_revs = self.pullable_revisions(other, stop_revision)
894
if len(pullable_revs) > 0:
895
self.append_revision(*pullable_revs)
897
def pullable_revisions(self, other, stop_revision):
898
other_revno = other.revision_id_to_revno(stop_revision)
900
return self.missing_revisions(other, other_revno)
901
except DivergedBranches, e:
903
pullable_revs = get_intervening_revisions(self.last_revision(),
905
assert self.last_revision() not in pullable_revs
907
except bzrlib.errors.NotAncestor:
908
if is_ancestor(self.last_revision(), stop_revision, self):
913
def commit(self, *args, **kw):
914
from bzrlib.commit import Commit
915
Commit().commit(self, *args, **kw)
1016
917
def revision_id_to_revno(self, revision_id):
1017
918
"""Given a revision id, return its revno"""
1018
if _mod_revision.is_null(revision_id):
919
if revision_id is None:
1020
history = self._revision_history()
921
history = self.revision_history()
1022
923
return history.index(revision_id) + 1
1023
924
except ValueError:
1024
raise errors.NoSuchRevision(self, revision_id)
925
raise bzrlib.errors.NoSuchRevision(self, revision_id)
1026
927
def get_rev_id(self, revno, history=None):
1027
928
"""Find the revision id of the specified revno."""
1028
with self.lock_read():
1030
return _mod_revision.NULL_REVISION
1031
last_revno, last_revid = self.last_revision_info()
1032
if revno == last_revno:
1034
if revno <= 0 or revno > last_revno:
1035
raise errors.NoSuchRevision(self, revno)
1036
distance_from_last = last_revno - revno
1037
if len(self._partial_revision_history_cache) <= distance_from_last:
1038
self._extend_partial_history(distance_from_last)
1039
return self._partial_revision_history_cache[distance_from_last]
1041
def pull(self, source, overwrite=False, stop_revision=None,
1042
possible_transports=None, *args, **kwargs):
1043
"""Mirror source into this branch.
1045
This branch is considered to be 'local', having low latency.
1047
:returns: PullResult instance
1049
return InterBranch.get(source, self).pull(
1050
overwrite=overwrite, stop_revision=stop_revision,
1051
possible_transports=possible_transports, *args, **kwargs)
1053
def push(self, target, overwrite=False, stop_revision=None, lossy=False,
1055
"""Mirror this branch into target.
1057
This branch is considered to be 'local', having low latency.
1059
return InterBranch.get(self, target).push(
1060
overwrite, stop_revision, lossy, *args, **kwargs)
932
history = self.revision_history()
933
elif revno <= 0 or revno > len(history):
934
raise bzrlib.errors.NoSuchRevision(self, revno)
935
return history[revno - 1]
937
def revision_tree(self, revision_id):
938
"""Return Tree for a revision on this branch.
940
`revision_id` may be None for the null revision, in which case
941
an `EmptyTree` is returned."""
942
# TODO: refactor this to use an existing revision object
943
# so we don't need to read it in twice.
944
if revision_id == None:
947
inv = self.get_revision_inventory(revision_id)
948
return RevisionTree(self.weave_store, inv, revision_id)
950
def working_tree(self):
951
"""Return a `Tree` for the working copy."""
952
from bzrlib.workingtree import WorkingTree
953
# TODO: In the future, perhaps WorkingTree should utilize Transport
954
# RobertCollins 20051003 - I don't think it should - working trees are
955
# much more complex to keep consistent than our careful .bzr subset.
956
# instead, we should say that working trees are local only, and optimise
958
return WorkingTree(self.base, branch=self)
1062
961
def basis_tree(self):
1063
"""Return `Tree` object for last revision."""
1064
return self.repository.revision_tree(self.last_revision())
962
"""Return `Tree` object for last revision.
964
If there are no revisions yet, return an `EmptyTree`.
966
return self.revision_tree(self.last_revision())
969
def rename_one(self, from_rel, to_rel):
972
This can change the directory or the filename or both.
974
tree = self.working_tree()
976
if not tree.has_filename(from_rel):
977
raise BzrError("can't rename: old working file %r does not exist" % from_rel)
978
if tree.has_filename(to_rel):
979
raise BzrError("can't rename: new working file %r already exists" % to_rel)
981
file_id = inv.path2id(from_rel)
983
raise BzrError("can't rename: old name %r is not versioned" % from_rel)
985
if inv.path2id(to_rel):
986
raise BzrError("can't rename: new name %r is already versioned" % to_rel)
988
to_dir, to_tail = os.path.split(to_rel)
989
to_dir_id = inv.path2id(to_dir)
990
if to_dir_id == None and to_dir != '':
991
raise BzrError("can't determine destination directory id for %r" % to_dir)
993
mutter("rename_one:")
994
mutter(" file_id {%s}" % file_id)
995
mutter(" from_rel %r" % from_rel)
996
mutter(" to_rel %r" % to_rel)
997
mutter(" to_dir %r" % to_dir)
998
mutter(" to_dir_id {%s}" % to_dir_id)
1000
inv.rename(file_id, to_dir_id, to_tail)
1002
from_abs = self.abspath(from_rel)
1003
to_abs = self.abspath(to_rel)
1005
rename(from_abs, to_abs)
1007
raise BzrError("failed to rename %r to %r: %s"
1008
% (from_abs, to_abs, e[1]),
1009
["rename rolled back"])
1011
self._write_inventory(inv)
1014
def move(self, from_paths, to_name):
1017
to_name must exist as a versioned directory.
1019
If to_name exists and is a directory, the files are moved into
1020
it, keeping their old names. If it is a directory,
1022
Note that to_name is only the last component of the new name;
1023
this doesn't change the directory.
1025
This returns a list of (from_path, to_path) pairs for each
1026
entry that is moved.
1029
## TODO: Option to move IDs only
1030
assert not isinstance(from_paths, basestring)
1031
tree = self.working_tree()
1032
inv = tree.inventory
1033
to_abs = self.abspath(to_name)
1034
if not isdir(to_abs):
1035
raise BzrError("destination %r is not a directory" % to_abs)
1036
if not tree.has_filename(to_name):
1037
raise BzrError("destination %r not in working directory" % to_abs)
1038
to_dir_id = inv.path2id(to_name)
1039
if to_dir_id == None and to_name != '':
1040
raise BzrError("destination %r is not a versioned directory" % to_name)
1041
to_dir_ie = inv[to_dir_id]
1042
if to_dir_ie.kind not in ('directory', 'root_directory'):
1043
raise BzrError("destination %r is not a directory" % to_abs)
1045
to_idpath = inv.get_idpath(to_dir_id)
1047
for f in from_paths:
1048
if not tree.has_filename(f):
1049
raise BzrError("%r does not exist in working tree" % f)
1050
f_id = inv.path2id(f)
1052
raise BzrError("%r is not versioned" % f)
1053
name_tail = splitpath(f)[-1]
1054
dest_path = appendpath(to_name, name_tail)
1055
if tree.has_filename(dest_path):
1056
raise BzrError("destination %r already exists" % dest_path)
1057
if f_id in to_idpath:
1058
raise BzrError("can't move %r to a subdirectory of itself" % f)
1060
# OK, so there's a race here, it's possible that someone will
1061
# create a file in this interval and then the rename might be
1062
# left half-done. But we should have caught most problems.
1064
for f in from_paths:
1065
name_tail = splitpath(f)[-1]
1066
dest_path = appendpath(to_name, name_tail)
1067
result.append((f, dest_path))
1068
inv.rename(inv.path2id(f), to_dir_id, name_tail)
1070
rename(self.abspath(f), self.abspath(dest_path))
1072
raise BzrError("failed to rename %r to %r: %s" % (f, dest_path, e[1]),
1073
["rename rolled back"])
1075
self._write_inventory(inv)
1079
def revert(self, filenames, old_tree=None, backups=True):
1080
"""Restore selected files to the versions from a previous tree.
1083
If true (default) backups are made of files before
1086
from bzrlib.atomicfile import AtomicFile
1087
from bzrlib.osutils import backup_file
1089
inv = self.read_working_inventory()
1090
if old_tree is None:
1091
old_tree = self.basis_tree()
1092
old_inv = old_tree.inventory
1095
for fn in filenames:
1096
file_id = inv.path2id(fn)
1098
raise NotVersionedError(path=fn)
1099
if not old_inv.has_id(file_id):
1100
raise BzrError("file not present in old tree", fn, file_id)
1101
nids.append((fn, file_id))
1103
# TODO: Rename back if it was previously at a different location
1105
# TODO: If given a directory, restore the entire contents from
1106
# the previous version.
1108
# TODO: Make a backup to a temporary file.
1110
# TODO: If the file previously didn't exist, delete it?
1111
for fn, file_id in nids:
1114
f = AtomicFile(fn, 'wb')
1116
f.write(old_tree.get_file(file_id).read())
1122
def pending_merges(self):
1123
"""Return a list of pending merges.
1125
These are revisions that have been merged into the working
1126
directory but not yet committed.
1128
cfn = self._rel_controlfilename('pending-merges')
1129
if not self._transport.has(cfn):
1132
for l in self.controlfile('pending-merges', 'r').readlines():
1133
p.append(l.rstrip('\n'))
1137
def add_pending_merge(self, *revision_ids):
1138
# TODO: Perhaps should check at this point that the
1139
# history of the revision is actually present?
1140
p = self.pending_merges()
1142
for rev_id in revision_ids:
1148
self.set_pending_merges(p)
1151
def set_pending_merges(self, rev_list):
1152
self.put_controlfile('pending-merges', '\n'.join(rev_list))
1066
1154
def get_parent(self):
1067
1155
"""Return the parent location of the branch.
1069
This is the default location for pull/missing. The usual
1157
This is the default location for push/pull/missing. The usual
1070
1158
pattern is that the user can override it by specifying a
1073
parent = self._get_parent_location()
1076
# This is an old-format absolute path to a local branch
1077
# turn it into a url
1078
if parent.startswith('/'):
1079
parent = urlutils.local_path_to_url(parent)
1162
_locs = ['parent', 'pull', 'x-pull']
1165
return self.controlfile(l, 'r').read().strip('\n')
1167
if e.errno != errno.ENOENT:
1172
def set_parent(self, url):
1173
# TODO: Maybe delete old location files?
1174
from bzrlib.atomicfile import AtomicFile
1175
f = AtomicFile(self.controlfilename('parent'))
1081
return urlutils.join(self.base[:-1], parent)
1082
except urlutils.InvalidURLJoin:
1083
raise errors.InaccessibleParent(parent, self.user_url)
1085
def _get_parent_location(self):
1086
raise NotImplementedError(self._get_parent_location)
1088
def _set_config_location(self, name, url, config=None,
1089
make_relative=False):
1091
config = self.get_config_stack()
1095
url = urlutils.relative_url(self.base, url)
1096
config.set(name, url)
1098
def _get_config_location(self, name, config=None):
1100
config = self.get_config_stack()
1101
location = config.get(name)
1106
def get_child_submit_format(self):
1107
"""Return the preferred format of submissions to this branch."""
1108
return self.get_config_stack().get('child_submit_format')
1110
def get_submit_branch(self):
1111
"""Return the submit location of the branch.
1113
This is the default location for bundle. The usual
1114
pattern is that the user can override it by specifying a
1117
return self.get_config_stack().get('submit_branch')
1119
def set_submit_branch(self, location):
1120
"""Return the submit location of the branch.
1122
This is the default location for bundle. The usual
1123
pattern is that the user can override it by specifying a
1126
self.get_config_stack().set('submit_branch', location)
1128
def get_public_branch(self):
1129
"""Return the public location of the branch.
1131
This is used by merge directives.
1133
return self._get_config_location('public_branch')
1135
def set_public_branch(self, location):
1136
"""Return the submit location of the branch.
1138
This is the default location for bundle. The usual
1139
pattern is that the user can override it by specifying a
1142
self._set_config_location('public_branch', location)
1144
def get_push_location(self):
1145
"""Return None or the location to push this branch to."""
1146
return self.get_config_stack().get('push_location')
1148
def set_push_location(self, location):
1149
"""Set a new push location for this branch."""
1150
raise NotImplementedError(self.set_push_location)
1152
def _run_post_change_branch_tip_hooks(self, old_revno, old_revid):
1153
"""Run the post_change_branch_tip hooks."""
1154
hooks = Branch.hooks['post_change_branch_tip']
1157
new_revno, new_revid = self.last_revision_info()
1158
params = ChangeBranchTipParams(
1159
self, old_revno, new_revno, old_revid, new_revid)
1163
def _run_pre_change_branch_tip_hooks(self, new_revno, new_revid):
1164
"""Run the pre_change_branch_tip hooks."""
1165
hooks = Branch.hooks['pre_change_branch_tip']
1168
old_revno, old_revid = self.last_revision_info()
1169
params = ChangeBranchTipParams(
1170
self, old_revno, new_revno, old_revid, new_revid)
1175
"""Synchronise this branch with the master branch if any.
1177
:return: None or the last_revision pivoted out during the update.
1181
1182
def check_revno(self, revno):
1187
1188
self.check_real_revno(revno)
1189
1190
def check_real_revno(self, revno):
1191
1192
Check whether a revno corresponds to a real revision.
1192
1193
Zero (the NULL revision) is considered invalid
1194
1195
if revno < 1 or revno > self.revno():
1195
raise errors.InvalidRevisionNumber(revno)
1197
def clone(self, to_controldir, revision_id=None, name=None,
1198
repository_policy=None, tag_selector=None):
1199
"""Clone this branch into to_controldir preserving all semantic values.
1201
Most API users will want 'create_clone_on_transport', which creates a
1202
new bzrdir and branch on the fly.
1204
revision_id: if not None, the revision history in the new branch will
1205
be truncated to end with revision_id.
1207
result = to_controldir.create_branch(name=name)
1208
with self.lock_read(), result.lock_write():
1209
if repository_policy is not None:
1210
repository_policy.configure_branch(result)
1211
self.copy_content_into(
1212
result, revision_id=revision_id, tag_selector=tag_selector)
1215
def sprout(self, to_controldir, revision_id=None, repository_policy=None,
1216
repository=None, lossy=False, tag_selector=None):
1217
"""Create a new line of development from the branch, into to_controldir.
1219
to_controldir controls the branch format.
1221
revision_id: if not None, the revision history in the new branch will
1222
be truncated to end with revision_id.
1224
if (repository_policy is not None
1225
and repository_policy.requires_stacking()):
1226
to_controldir._format.require_stacking(_skip_repo=True)
1227
result = to_controldir.create_branch(repository=repository)
1229
raise errors.LossyPushToSameVCS(self, result)
1230
with self.lock_read(), result.lock_write():
1231
if repository_policy is not None:
1232
repository_policy.configure_branch(result)
1233
self.copy_content_into(
1234
result, revision_id=revision_id, tag_selector=tag_selector)
1235
master_url = self.get_bound_location()
1236
if master_url is None:
1237
result.set_parent(self.user_url)
1239
result.set_parent(master_url)
1242
def _synchronize_history(self, destination, revision_id):
1243
"""Synchronize last revision and revision history between branches.
1245
This version is most efficient when the destination is also a
1246
BzrBranch6, but works for BzrBranch5, as long as the destination's
1247
repository contains all the lefthand ancestors of the intended
1248
last_revision. If not, set_last_revision_info will fail.
1250
:param destination: The branch to copy the history into
1251
:param revision_id: The revision-id to truncate history at. May
1252
be None to copy complete history.
1254
source_revno, source_revision_id = self.last_revision_info()
1255
if revision_id is None:
1256
revno, revision_id = source_revno, source_revision_id
1258
graph = self.repository.get_graph()
1260
revno = graph.find_distance_to_null(
1261
revision_id, [(source_revision_id, source_revno)])
1262
except errors.GhostRevisionsHaveNoRevno:
1263
# Default to 1, if we can't find anything else
1265
destination.set_last_revision_info(revno, revision_id)
1267
def copy_content_into(self, destination, revision_id=None, tag_selector=None):
1268
"""Copy the content of self into destination.
1270
revision_id: if not None, the revision history in the new branch will
1271
be truncated to end with revision_id.
1272
tag_selector: Optional callback that receives a tag name
1273
and should return a boolean to indicate whether a tag should be copied
1275
return InterBranch.get(self, destination).copy_content_into(
1276
revision_id=revision_id, tag_selector=tag_selector)
1278
def update_references(self, target):
1279
if not self._format.supports_reference_locations:
1281
return InterBranch.get(self, target).update_references()
1283
def check(self, refs):
1284
"""Check consistency of the branch.
1286
In particular this checks that revisions given in the revision-history
1287
do actually match up in the revision graph, and that they're all
1288
present in the repository.
1290
Callers will typically also want to check the repository.
1292
:param refs: Calculated refs for this branch as specified by
1293
branch._get_check_refs()
1294
:return: A BranchCheckResult.
1296
with self.lock_read():
1297
result = BranchCheckResult(self)
1298
last_revno, last_revision_id = self.last_revision_info()
1299
actual_revno = refs[('lefthand-distance', last_revision_id)]
1300
if actual_revno != last_revno:
1301
result.errors.append(errors.BzrCheckError(
1302
'revno does not match len(mainline) %s != %s' % (
1303
last_revno, actual_revno)))
1304
# TODO: We should probably also check that self.revision_history
1305
# matches the repository for older branch formats.
1306
# If looking for the code that cross-checks repository parents
1307
# against the Graph.iter_lefthand_ancestry output, that is now a
1308
# repository specific check.
1311
def _get_checkout_format(self, lightweight=False):
1312
"""Return the most suitable metadir for a checkout of this branch.
1313
Weaves are used if this branch's repository uses weaves.
1315
format = self.repository.controldir.checkout_metadir()
1316
format.set_branch_format(self._format)
1319
def create_clone_on_transport(self, to_transport, revision_id=None,
1320
stacked_on=None, create_prefix=False,
1321
use_existing_dir=False, no_tree=None,
1323
"""Create a clone of this branch and its bzrdir.
1325
:param to_transport: The transport to clone onto.
1326
:param revision_id: The revision id to use as tip in the new branch.
1327
If None the tip is obtained from this branch.
1328
:param stacked_on: An optional URL to stack the clone on.
1329
:param create_prefix: Create any missing directories leading up to
1331
:param use_existing_dir: Use an existing directory if one exists.
1333
# XXX: Fix the bzrdir API to allow getting the branch back from the
1334
# clone call. Or something. 20090224 RBC/spiv.
1335
# XXX: Should this perhaps clone colocated branches as well,
1336
# rather than just the default branch? 20100319 JRV
1337
if revision_id is None:
1338
revision_id = self.last_revision()
1339
dir_to = self.controldir.clone_on_transport(
1340
to_transport, revision_id=revision_id, stacked_on=stacked_on,
1341
create_prefix=create_prefix, use_existing_dir=use_existing_dir,
1342
no_tree=no_tree, tag_selector=tag_selector)
1343
return dir_to.open_branch()
1345
def create_checkout(self, to_location, revision_id=None,
1346
lightweight=False, accelerator_tree=None,
1347
hardlink=False, recurse_nested=True):
1348
"""Create a checkout of a branch.
1350
:param to_location: The url to produce the checkout at
1351
:param revision_id: The revision to check out
1352
:param lightweight: If True, produce a lightweight checkout, otherwise,
1353
produce a bound branch (heavyweight checkout)
1354
:param accelerator_tree: A tree which can be used for retrieving file
1355
contents more quickly than the revision tree, i.e. a workingtree.
1356
The revision tree will be used for cases where accelerator_tree's
1357
content is different.
1358
:param hardlink: If true, hard-link files from accelerator_tree,
1360
:param recurse_nested: Whether to recurse into nested trees
1361
:return: The tree of the created checkout
1363
t = transport.get_transport(to_location)
1365
format = self._get_checkout_format(lightweight=lightweight)
1367
checkout = format.initialize_on_transport(t)
1368
except errors.AlreadyControlDirError:
1369
# It's fine if the control directory already exists,
1370
# as long as there is no existing branch and working tree.
1371
checkout = controldir.ControlDir.open_from_transport(t)
1373
checkout.open_branch()
1374
except errors.NotBranchError:
1377
raise errors.AlreadyControlDirError(t.base)
1378
if (checkout.control_transport.base
1379
== self.controldir.control_transport.base):
1380
# When checking out to the same control directory,
1381
# always create a lightweight checkout
1385
from_branch = checkout.set_branch_reference(target_branch=self)
1387
policy = checkout.determine_repository_policy()
1388
policy.acquire_repository()
1389
checkout_branch = checkout.create_branch()
1390
checkout_branch.bind(self)
1391
# pull up to the specified revision_id to set the initial
1392
# branch tip correctly, and seed it with history.
1393
checkout_branch.pull(self, stop_revision=revision_id)
1395
tree = checkout.create_workingtree(revision_id,
1396
from_branch=from_branch,
1397
accelerator_tree=accelerator_tree,
1399
basis_tree = tree.basis_tree()
1400
with basis_tree.lock_read():
1401
for path in basis_tree.iter_references():
1402
reference_parent = tree.reference_parent(path)
1403
if reference_parent is None:
1404
warning('Branch location for %s unknown.', path)
1406
reference_parent.create_checkout(
1408
basis_tree.get_reference_revision(path), lightweight)
1411
def reconcile(self, thorough=True):
1412
"""Make sure the data stored in this branch is consistent.
1414
:return: A `ReconcileResult` object.
1416
raise NotImplementedError(self.reconcile)
1418
def supports_tags(self):
1419
return self._format.supports_tags()
1421
def automatic_tag_name(self, revision_id):
1422
"""Try to automatically find the tag name for a revision.
1424
:param revision_id: Revision id of the revision.
1425
:return: A tag name or None if no tag name could be determined.
1427
for hook in Branch.hooks['automatic_tag_name']:
1428
ret = hook(self, revision_id)
1433
def _check_if_descendant_or_diverged(self, revision_a, revision_b, graph,
1435
"""Ensure that revision_b is a descendant of revision_a.
1437
This is a helper function for update_revisions.
1439
:raises: DivergedBranches if revision_b has diverged from revision_a.
1440
:returns: True if revision_b is a descendant of revision_a.
1442
relation = self._revision_relations(revision_a, revision_b, graph)
1443
if relation == 'b_descends_from_a':
1196
raise InvalidRevisionNumber(revno)
1198
def sign_revision(self, revision_id, gpg_strategy):
1199
plaintext = Testament.from_revision(self, revision_id).as_short_text()
1200
self.store_revision_signature(gpg_strategy, plaintext, revision_id)
1203
def store_revision_signature(self, gpg_strategy, plaintext, revision_id):
1204
self.revision_store.add(StringIO(gpg_strategy.sign(plaintext)),
1208
class ScratchBranch(_Branch):
1209
"""Special test class: a branch that cleans up after itself.
1211
>>> b = ScratchBranch()
1215
>>> b._transport.__del__()
1220
def __init__(self, files=[], dirs=[], transport=None):
1221
"""Make a test branch.
1223
This creates a temporary directory and runs init-tree in it.
1225
If any files are listed, they are created in the working copy.
1227
if transport is None:
1228
transport = bzrlib.transport.local.ScratchTransport()
1229
super(ScratchBranch, self).__init__(transport, init=True)
1231
super(ScratchBranch, self).__init__(transport)
1234
self._transport.mkdir(d)
1237
self._transport.put(f, 'content of %s' % f)
1242
>>> orig = ScratchBranch(files=["file1", "file2"])
1243
>>> clone = orig.clone()
1244
>>> if os.name != 'nt':
1245
... os.path.samefile(orig.base, clone.base)
1247
... orig.base == clone.base
1250
>>> os.path.isfile(os.path.join(clone.base, "file1"))
1253
from shutil import copytree
1254
from tempfile import mkdtemp
1257
copytree(self.base, base, symlinks=True)
1258
return ScratchBranch(
1259
transport=bzrlib.transport.local.ScratchTransport(base))
1262
######################################################################
1266
def is_control_file(filename):
1267
## FIXME: better check
1268
filename = os.path.normpath(filename)
1269
while filename != '':
1270
head, tail = os.path.split(filename)
1271
## mutter('check %r for control file' % ((head, tail), ))
1272
if tail == bzrlib.BZRDIR:
1445
elif relation == 'diverged':
1446
raise errors.DivergedBranches(self, other_branch)
1447
elif relation == 'a_descends_from_b':
1450
raise AssertionError("invalid relation: %r" % (relation,))
1452
def _revision_relations(self, revision_a, revision_b, graph):
1453
"""Determine the relationship between two revisions.
1455
:returns: One of: 'a_descends_from_b', 'b_descends_from_a', 'diverged'
1457
heads = graph.heads([revision_a, revision_b])
1458
if heads == {revision_b}:
1459
return 'b_descends_from_a'
1460
elif heads == {revision_a, revision_b}:
1461
# These branches have diverged
1463
elif heads == {revision_a}:
1464
return 'a_descends_from_b'
1466
raise AssertionError("invalid heads: %r" % (heads,))
1468
def heads_to_fetch(self):
1469
"""Return the heads that must and that should be fetched to copy this
1470
branch into another repo.
1472
:returns: a 2-tuple of (must_fetch, if_present_fetch). must_fetch is a
1473
set of heads that must be fetched. if_present_fetch is a set of
1474
heads that must be fetched if present, but no error is necessary if
1475
they are not present.
1477
# For bzr native formats must_fetch is just the tip, and
1478
# if_present_fetch are the tags.
1479
must_fetch = {self.last_revision()}
1480
if_present_fetch = set()
1481
if self.get_config_stack().get('branch.fetch_tags'):
1483
if_present_fetch = set(self.tags.get_reverse_tag_dict())
1484
except errors.TagsNotSupported:
1486
must_fetch.discard(_mod_revision.NULL_REVISION)
1487
if_present_fetch.discard(_mod_revision.NULL_REVISION)
1488
return must_fetch, if_present_fetch
1490
def create_memorytree(self):
1491
"""Create a memory tree for this branch.
1493
:return: An in-memory MutableTree instance
1495
return memorytree.MemoryTree.create_on_branch(self)
1498
class BranchFormat(controldir.ControlComponentFormat):
1499
"""An encapsulation of the initialization and open routines for a format.
1501
Formats provide three things:
1502
* An initialization routine,
1503
* a format description
1506
Formats are placed in an dict by their format string for reference
1507
during branch opening. It's not required that these be instances, they
1508
can be classes themselves with class methods - it simply depends on
1509
whether state is needed for a given format or not.
1511
Once a format is deprecated, just deprecate the initialize and open
1512
methods on the format class. Do not deprecate the object, as the
1513
object will be created every time regardless.
1516
def __eq__(self, other):
1517
return self.__class__ is other.__class__
1519
def __ne__(self, other):
1520
return not (self == other)
1522
def get_reference(self, controldir, name=None):
1523
"""Get the target reference of the branch in controldir.
1525
format probing must have been completed before calling
1526
this method - it is assumed that the format of the branch
1527
in controldir is correct.
1529
:param controldir: The controldir to get the branch data from.
1530
:param name: Name of the colocated branch to fetch
1531
:return: None if the branch is not a reference branch.
1536
def set_reference(self, controldir, name, to_branch):
1537
"""Set the target reference of the branch in controldir.
1539
format probing must have been completed before calling
1540
this method - it is assumed that the format of the branch
1541
in controldir is correct.
1543
:param controldir: The controldir to set the branch reference for.
1544
:param name: Name of colocated branch to set, None for default
1545
:param to_branch: branch that the checkout is to reference
1547
raise NotImplementedError(self.set_reference)
1549
def get_format_description(self):
1550
"""Return the short format description for this format."""
1551
raise NotImplementedError(self.get_format_description)
1553
def _run_post_branch_init_hooks(self, controldir, name, branch):
1554
hooks = Branch.hooks['post_branch_init']
1557
params = BranchInitHookParams(self, controldir, name, branch)
1561
def initialize(self, controldir, name=None, repository=None,
1562
append_revisions_only=None):
1563
"""Create a branch of this format in controldir.
1565
:param name: Name of the colocated branch to create.
1567
raise NotImplementedError(self.initialize)
1569
def is_supported(self):
1570
"""Is this format supported?
1572
Supported formats can be initialized and opened.
1573
Unsupported formats may not support initialization or committing or
1574
some other features depending on the reason for not being supported.
1578
def make_tags(self, branch):
1579
"""Create a tags object for branch.
1581
This method is on BranchFormat, because BranchFormats are reflected
1582
over the wire via network_name(), whereas full Branch instances require
1583
multiple VFS method calls to operate at all.
1585
The default implementation returns a disabled-tags instance.
1587
Note that it is normal for branch to be a RemoteBranch when using tags
1590
return _mod_tag.DisabledTags(branch)
1592
def network_name(self):
1593
"""A simple byte string uniquely identifying this format for RPC calls.
1595
MetaDir branch formats use their disk format string to identify the
1596
repository over the wire. All in one formats such as bzr < 0.8, and
1597
foreign formats like svn/git and hg should use some marker which is
1598
unique and immutable.
1600
raise NotImplementedError(self.network_name)
1602
def open(self, controldir, name=None, _found=False, ignore_fallbacks=False,
1603
found_repository=None, possible_transports=None):
1604
"""Return the branch object for controldir.
1606
:param controldir: A ControlDir that contains a branch.
1607
:param name: Name of colocated branch to open
1608
:param _found: a private parameter, do not use it. It is used to
1609
indicate if format probing has already be done.
1610
:param ignore_fallbacks: when set, no fallback branches will be opened
1611
(if there are any). Default is to open fallbacks.
1613
raise NotImplementedError(self.open)
1615
def supports_set_append_revisions_only(self):
1616
"""True if this format supports set_append_revisions_only."""
1619
def supports_stacking(self):
1620
"""True if this format records a stacked-on branch."""
1623
def supports_leaving_lock(self):
1624
"""True if this format supports leaving locks in place."""
1625
return False # by default
1628
return self.get_format_description().rstrip()
1630
def supports_tags(self):
1631
"""True if this format supports tags stored in the branch"""
1632
return False # by default
1634
def tags_are_versioned(self):
1635
"""Whether the tag container for this branch versions tags."""
1638
def supports_tags_referencing_ghosts(self):
1639
"""True if tags can reference ghost revisions."""
1642
def supports_store_uncommitted(self):
1643
"""True if uncommitted changes can be stored in this branch."""
1646
def stores_revno(self):
1647
"""True if this branch format store revision numbers."""
1651
class BranchHooks(Hooks):
1652
"""A dictionary mapping hook name to a list of callables for branch hooks.
1654
e.g. ['post_push'] Is the list of items to be called when the
1655
push function is invoked.
1659
"""Create the default hooks.
1661
These are all empty initially, because by default nothing should get
1664
Hooks.__init__(self, "breezy.branch", "Branch.hooks")
1667
"Called with the Branch object that has been opened after a "
1668
"branch is opened.", (1, 8))
1671
"Called after a push operation completes. post_push is called "
1672
"with a breezy.branch.BranchPushResult object and only runs in "
1673
"the bzr client.", (0, 15))
1676
"Called after a pull operation completes. post_pull is called "
1677
"with a breezy.branch.PullResult object and only runs in the "
1678
"bzr client.", (0, 15))
1681
"Called after a commit is calculated but before it is "
1682
"completed. pre_commit is called with (local, master, old_revno, "
1683
"old_revid, future_revno, future_revid, tree_delta, future_tree"
1684
"). old_revid is NULL_REVISION for the first commit to a branch, "
1685
"tree_delta is a TreeDelta object describing changes from the "
1686
"basis revision. hooks MUST NOT modify this delta. "
1687
" future_tree is an in-memory tree obtained from "
1688
"CommitBuilder.revision_tree() and hooks MUST NOT modify this "
1692
"Called in the bzr client after a commit has completed. "
1693
"post_commit is called with (local, master, old_revno, old_revid, "
1694
"new_revno, new_revid). old_revid is NULL_REVISION for the first "
1695
"commit to a branch.", (0, 15))
1698
"Called in the bzr client after an uncommit completes. "
1699
"post_uncommit is called with (local, master, old_revno, "
1700
"old_revid, new_revno, new_revid) where local is the local branch "
1701
"or None, master is the target branch, and an empty branch "
1702
"receives new_revno of 0, new_revid of None.", (0, 15))
1704
'pre_change_branch_tip',
1705
"Called in bzr client and server before a change to the tip of a "
1706
"branch is made. pre_change_branch_tip is called with a "
1707
"breezy.branch.ChangeBranchTipParams. Note that push, pull, "
1708
"commit, uncommit will all trigger this hook.", (1, 6))
1710
'post_change_branch_tip',
1711
"Called in bzr client and server after a change to the tip of a "
1712
"branch is made. post_change_branch_tip is called with a "
1713
"breezy.branch.ChangeBranchTipParams. Note that push, pull, "
1714
"commit, uncommit will all trigger this hook.", (1, 4))
1716
'transform_fallback_location',
1717
"Called when a stacked branch is activating its fallback "
1718
"locations. transform_fallback_location is called with (branch, "
1719
"url), and should return a new url. Returning the same url "
1720
"allows it to be used as-is, returning a different one can be "
1721
"used to cause the branch to stack on a closer copy of that "
1722
"fallback_location. Note that the branch cannot have history "
1723
"accessing methods called on it during this hook because the "
1724
"fallback locations have not been activated. When there are "
1725
"multiple hooks installed for transform_fallback_location, "
1726
"all are called with the url returned from the previous hook."
1727
"The order is however undefined.", (1, 9))
1729
'automatic_tag_name',
1730
"Called to determine an automatic tag name for a revision. "
1731
"automatic_tag_name is called with (branch, revision_id) and "
1732
"should return a tag name or None if no tag name could be "
1733
"determined. The first non-None tag name returned will be used.",
1737
"Called after new branch initialization completes. "
1738
"post_branch_init is called with a "
1739
"breezy.branch.BranchInitHookParams. "
1740
"Note that init, branch and checkout (both heavyweight and "
1741
"lightweight) will all trigger this hook.", (2, 2))
1744
"Called after a checkout switches branch. "
1745
"post_switch is called with a "
1746
"breezy.branch.SwitchHookParams.", (2, 2))
1749
# install the default hooks into the Branch class.
1750
Branch.hooks = BranchHooks()
1753
class ChangeBranchTipParams(object):
1754
"""Object holding parameters passed to `*_change_branch_tip` hooks.
1756
There are 5 fields that hooks may wish to access:
1758
:ivar branch: the branch being changed
1759
:ivar old_revno: revision number before the change
1760
:ivar new_revno: revision number after the change
1761
:ivar old_revid: revision id before the change
1762
:ivar new_revid: revision id after the change
1764
The revid fields are strings. The revno fields are integers.
1767
def __init__(self, branch, old_revno, new_revno, old_revid, new_revid):
1768
"""Create a group of ChangeBranchTip parameters.
1770
:param branch: The branch being changed.
1771
:param old_revno: Revision number before the change.
1772
:param new_revno: Revision number after the change.
1773
:param old_revid: Tip revision id before the change.
1774
:param new_revid: Tip revision id after the change.
1776
self.branch = branch
1777
self.old_revno = old_revno
1778
self.new_revno = new_revno
1779
self.old_revid = old_revid
1780
self.new_revid = new_revid
1782
def __eq__(self, other):
1783
return self.__dict__ == other.__dict__
1786
return "<%s of %s from (%s, %s) to (%s, %s)>" % (
1787
self.__class__.__name__, self.branch,
1788
self.old_revno, self.old_revid, self.new_revno, self.new_revid)
1791
class BranchInitHookParams(object):
1792
"""Object holding parameters passed to `*_branch_init` hooks.
1794
There are 4 fields that hooks may wish to access:
1796
:ivar format: the branch format
1797
:ivar bzrdir: the ControlDir where the branch will be/has been initialized
1798
:ivar name: name of colocated branch, if any (or None)
1799
:ivar branch: the branch created
1801
Note that for lightweight checkouts, the bzrdir and format fields refer to
1802
the checkout, hence they are different from the corresponding fields in
1803
branch, which refer to the original branch.
1806
def __init__(self, format, controldir, name, branch):
1807
"""Create a group of BranchInitHook parameters.
1809
:param format: the branch format
1810
:param controldir: the ControlDir where the branch will be/has been
1812
:param name: name of colocated branch, if any (or None)
1813
:param branch: the branch created
1815
Note that for lightweight checkouts, the bzrdir and format fields refer
1816
to the checkout, hence they are different from the corresponding fields
1817
in branch, which refer to the original branch.
1819
self.format = format
1820
self.controldir = controldir
1822
self.branch = branch
1824
def __eq__(self, other):
1825
return self.__dict__ == other.__dict__
1828
return "<%s of %s>" % (self.__class__.__name__, self.branch)
1831
class SwitchHookParams(object):
1832
"""Object holding parameters passed to `*_switch` hooks.
1834
There are 4 fields that hooks may wish to access:
1836
:ivar control_dir: ControlDir of the checkout to change
1837
:ivar to_branch: branch that the checkout is to reference
1838
:ivar force: skip the check for local commits in a heavy checkout
1839
:ivar revision_id: revision ID to switch to (or None)
1842
def __init__(self, control_dir, to_branch, force, revision_id):
1843
"""Create a group of SwitchHook parameters.
1845
:param control_dir: ControlDir of the checkout to change
1846
:param to_branch: branch that the checkout is to reference
1847
:param force: skip the check for local commits in a heavy checkout
1848
:param revision_id: revision ID to switch to (or None)
1850
self.control_dir = control_dir
1851
self.to_branch = to_branch
1853
self.revision_id = revision_id
1855
def __eq__(self, other):
1856
return self.__dict__ == other.__dict__
1859
return "<%s for %s to (%s, %s)>" % (
1860
self.__class__.__name__, self.control_dir, self.to_branch,
1864
class BranchFormatRegistry(controldir.ControlComponentFormatRegistry):
1865
"""Branch format registry."""
1867
def __init__(self, other_registry=None):
1868
super(BranchFormatRegistry, self).__init__(other_registry)
1869
self._default_format = None
1870
self._default_format_key = None
1872
def get_default(self):
1873
"""Return the current default format."""
1874
if (self._default_format_key is not None
1875
and self._default_format is None):
1876
self._default_format = self.get(self._default_format_key)
1877
return self._default_format
1879
def set_default(self, format):
1880
"""Set the default format."""
1881
self._default_format = format
1882
self._default_format_key = None
1884
def set_default_key(self, format_string):
1885
"""Set the default format by its format string."""
1886
self._default_format_key = format_string
1887
self._default_format = None
1890
network_format_registry = registry.FormatRegistry()
1891
"""Registry of formats indexed by their network name.
1893
The network name for a branch format is an identifier that can be used when
1894
referring to formats with smart server operations. See
1895
BranchFormat.network_name() for more detail.
1898
format_registry = BranchFormatRegistry(network_format_registry)
1901
# formats which have no format string are not discoverable
1902
# and not independently creatable, so are not registered.
1903
format_registry.register_lazy(
1904
b"Bazaar-NG branch format 5\n", "breezy.bzr.fullhistory",
1906
format_registry.register_lazy(
1907
b"Bazaar Branch Format 6 (bzr 0.15)\n",
1908
"breezy.bzr.branch", "BzrBranchFormat6")
1909
format_registry.register_lazy(
1910
b"Bazaar Branch Format 7 (needs bzr 1.6)\n",
1911
"breezy.bzr.branch", "BzrBranchFormat7")
1912
format_registry.register_lazy(
1913
b"Bazaar Branch Format 8 (needs bzr 1.15)\n",
1914
"breezy.bzr.branch", "BzrBranchFormat8")
1915
format_registry.register_lazy(
1916
b"Bazaar-NG Branch Reference Format 1\n",
1917
"breezy.bzr.branch", "BranchReferenceFormat")
1919
format_registry.set_default_key(b"Bazaar Branch Format 7 (needs bzr 1.6)\n")
1922
class BranchWriteLockResult(LogicalLockResult):
1923
"""The result of write locking a branch.
1925
:ivar token: The token obtained from the underlying branch lock, or
1927
:ivar unlock: A callable which will unlock the lock.
1931
return "BranchWriteLockResult(%r, %r)" % (self.unlock, self.token)
1934
######################################################################
1935
# results of operations
1938
class _Result(object):
1940
def _show_tag_conficts(self, to_file):
1941
if not getattr(self, 'tag_conflicts', None):
1943
to_file.write('Conflicting tags:\n')
1944
for name, value1, value2 in self.tag_conflicts:
1945
to_file.write(' %s\n' % (name, ))
1948
class PullResult(_Result):
1949
"""Result of a Branch.pull operation.
1951
:ivar old_revno: Revision number before pull.
1952
:ivar new_revno: Revision number after pull.
1953
:ivar old_revid: Tip revision id before pull.
1954
:ivar new_revid: Tip revision id after pull.
1955
:ivar source_branch: Source (local) branch object. (read locked)
1956
:ivar master_branch: Master branch of the target, or the target if no
1958
:ivar local_branch: target branch if there is a Master, else None
1959
:ivar target_branch: Target/destination branch object. (write locked)
1960
:ivar tag_conflicts: A list of tag conflicts, see BasicTags.merge_to
1961
:ivar tag_updates: A dict with new tags, see BasicTags.merge_to
1964
def report(self, to_file):
1965
tag_conflicts = getattr(self, "tag_conflicts", None)
1966
tag_updates = getattr(self, "tag_updates", None)
1968
if self.old_revid != self.new_revid:
1969
to_file.write('Now on revision %d.\n' % self.new_revno)
1971
to_file.write('%d tag(s) updated.\n' % len(tag_updates))
1972
if self.old_revid == self.new_revid and not tag_updates:
1973
if not tag_conflicts:
1974
to_file.write('No revisions or tags to pull.\n')
1976
to_file.write('No revisions to pull.\n')
1977
self._show_tag_conficts(to_file)
1980
class BranchPushResult(_Result):
1981
"""Result of a Branch.push operation.
1983
:ivar old_revno: Revision number (eg 10) of the target before push.
1984
:ivar new_revno: Revision number (eg 12) of the target after push.
1985
:ivar old_revid: Tip revision id (eg joe@foo.com-1234234-aoeua34) of target
1987
:ivar new_revid: Tip revision id (eg joe@foo.com-5676566-boa234a) of target
1989
:ivar source_branch: Source branch object that the push was from. This is
1990
read locked, and generally is a local (and thus low latency) branch.
1991
:ivar master_branch: If target is a bound branch, the master branch of
1992
target, or target itself. Always write locked.
1993
:ivar target_branch: The direct Branch where data is being sent (write
1995
:ivar local_branch: If the target is a bound branch this will be the
1996
target, otherwise it will be None.
1999
def report(self, to_file):
2000
# TODO: This function gets passed a to_file, but then
2001
# ignores it and calls note() instead. This is also
2002
# inconsistent with PullResult(), which writes to stdout.
2003
# -- JRV20110901, bug #838853
2004
tag_conflicts = getattr(self, "tag_conflicts", None)
2005
tag_updates = getattr(self, "tag_updates", None)
2007
if self.old_revid != self.new_revid:
2008
if self.new_revno is not None:
2009
note(gettext('Pushed up to revision %d.'),
2012
note(gettext('Pushed up to revision id %s.'),
2013
self.new_revid.decode('utf-8'))
2015
note(ngettext('%d tag updated.', '%d tags updated.',
2016
len(tag_updates)) % len(tag_updates))
2017
if self.old_revid == self.new_revid and not tag_updates:
2018
if not tag_conflicts:
2019
note(gettext('No new revisions or tags to push.'))
2021
note(gettext('No new revisions to push.'))
2022
self._show_tag_conficts(to_file)
2025
class BranchCheckResult(object):
2026
"""Results of checking branch consistency.
2031
def __init__(self, branch):
2032
self.branch = branch
2035
def report_results(self, verbose):
2036
"""Report the check results via trace.note.
2038
:param verbose: Requests more detailed display of what was checked,
2041
note(gettext('checked branch {0} format {1}').format(
2042
self.branch.user_url, self.branch._format))
2043
for error in self.errors:
2044
note(gettext('found error:%s'), error)
2047
class InterBranch(InterObject):
2048
"""This class represents operations taking place between two branches.
2050
Its instances have methods like pull() and push() and contain
2051
references to the source and target repositories these operations
2052
can be carried out on.
2056
"""The available optimised InterBranch types."""
2059
def _get_branch_formats_to_test(klass):
2060
"""Return an iterable of format tuples for testing.
2062
:return: An iterable of (from_format, to_format) to use when testing
2063
this InterBranch class. Each InterBranch class should define this
2066
raise NotImplementedError(klass._get_branch_formats_to_test)
2068
def pull(self, overwrite=False, stop_revision=None,
2069
possible_transports=None, local=False, tag_selector=None):
2070
"""Mirror source into target branch.
2072
The target branch is considered to be 'local', having low latency.
2074
:returns: PullResult instance
2076
raise NotImplementedError(self.pull)
2078
def push(self, overwrite=False, stop_revision=None, lossy=False,
2079
_override_hook_source_branch=None, tag_selector=None):
2080
"""Mirror the source branch into the target branch.
2082
The source branch is considered to be 'local', having low latency.
2084
raise NotImplementedError(self.push)
2086
def copy_content_into(self, revision_id=None, tag_selector=None):
2087
"""Copy the content of source into target
2090
if not None, the revision history in the new branch will
2091
be truncated to end with revision_id.
2092
:param tag_selector: Optional callback that can decide
2093
to copy or not copy tags.
2095
raise NotImplementedError(self.copy_content_into)
2097
def fetch(self, stop_revision=None, limit=None, lossy=False):
2100
:param stop_revision: Last revision to fetch
2101
:param limit: Optional rough limit of revisions to fetch
2102
:return: FetchResult object
2104
raise NotImplementedError(self.fetch)
2106
def update_references(self):
2107
"""Import reference information from source to target.
2109
raise NotImplementedError(self.update_references)
2112
def _fix_overwrite_type(overwrite):
2113
if isinstance(overwrite, bool):
2115
return ["history", "tags"]
2121
class GenericInterBranch(InterBranch):
2122
"""InterBranch implementation that uses public Branch functions."""
2125
def is_compatible(klass, source, target):
2126
# GenericBranch uses the public API, so always compatible
2130
def _get_branch_formats_to_test(klass):
2131
return [(format_registry.get_default(), format_registry.get_default())]
2134
def unwrap_format(klass, format):
2135
if isinstance(format, remote.RemoteBranchFormat):
2136
format._ensure_real()
2137
return format._custom_format
2140
def copy_content_into(self, revision_id=None, tag_selector=None):
2141
"""Copy the content of source into target
2143
revision_id: if not None, the revision history in the new branch will
2144
be truncated to end with revision_id.
2146
with self.source.lock_read(), self.target.lock_write():
2147
self.source._synchronize_history(self.target, revision_id)
2148
self.update_references()
2150
parent = self.source.get_parent()
2151
except errors.InaccessibleParent as e:
2152
mutter('parent was not accessible to copy: %s', str(e))
2155
self.target.set_parent(parent)
2156
if self.source._push_should_merge_tags():
2157
self.source.tags.merge_to(self.target.tags, selector=tag_selector)
2159
def fetch(self, stop_revision=None, limit=None, lossy=False):
2160
if self.target.base == self.source.base:
2162
with self.source.lock_read(), self.target.lock_write():
2163
fetch_spec_factory = fetch.FetchSpecFactory()
2164
fetch_spec_factory.source_branch = self.source
2165
fetch_spec_factory.source_branch_stop_revision_id = stop_revision
2166
fetch_spec_factory.source_repo = self.source.repository
2167
fetch_spec_factory.target_repo = self.target.repository
2168
fetch_spec_factory.target_repo_kind = (
2169
fetch.TargetRepoKinds.PREEXISTING)
2170
fetch_spec_factory.limit = limit
2171
fetch_spec = fetch_spec_factory.make_fetch_spec()
2172
return self.target.repository.fetch(
2173
self.source.repository,
2175
fetch_spec=fetch_spec)
2177
def _update_revisions(self, stop_revision=None, overwrite=False,
2179
with self.source.lock_read(), self.target.lock_write():
2180
other_revno, other_last_revision = self.source.last_revision_info()
2181
stop_revno = None # unknown
2182
if stop_revision is None:
2183
stop_revision = other_last_revision
2184
if _mod_revision.is_null(stop_revision):
2185
# if there are no commits, we're done.
2187
stop_revno = other_revno
2189
# what's the current last revision, before we fetch [and change it
2191
last_rev = _mod_revision.ensure_null(self.target.last_revision())
2192
# we fetch here so that we don't process data twice in the common
2193
# case of having something to pull, and so that the check for
2194
# already merged can operate on the just fetched graph, which will
2195
# be cached in memory.
2196
self.fetch(stop_revision=stop_revision)
2197
# Check to see if one is an ancestor of the other
2200
graph = self.target.repository.get_graph()
2201
if self.target._check_if_descendant_or_diverged(
2202
stop_revision, last_rev, graph, self.source):
2203
# stop_revision is a descendant of last_rev, but we aren't
2204
# overwriting, so we're done.
2206
if stop_revno is None:
2208
graph = self.target.repository.get_graph()
2209
this_revno, this_last_revision = \
2210
self.target.last_revision_info()
2211
stop_revno = graph.find_distance_to_null(
2212
stop_revision, [(other_last_revision, other_revno),
2213
(this_last_revision, this_revno)])
2214
self.target.set_last_revision_info(stop_revno, stop_revision)
2216
def pull(self, overwrite=False, stop_revision=None,
2217
possible_transports=None, run_hooks=True,
2218
_override_hook_target=None, local=False,
2220
"""Pull from source into self, updating my master if any.
2222
:param run_hooks: Private parameter - if false, this branch
2223
is being called because it's the master of the primary branch,
2224
so it should not run its hooks.
2226
with cleanup.ExitStack() as exit_stack:
2227
exit_stack.enter_context(self.target.lock_write())
2228
bound_location = self.target.get_bound_location()
2229
if local and not bound_location:
2230
raise errors.LocalRequiresBoundBranch()
2231
master_branch = None
2232
source_is_master = False
2234
# bound_location comes from a config file, some care has to be
2235
# taken to relate it to source.user_url
2236
normalized = urlutils.normalize_url(bound_location)
2238
relpath = self.source.user_transport.relpath(normalized)
2239
source_is_master = (relpath == '')
2240
except (errors.PathNotChild, urlutils.InvalidURL):
2241
source_is_master = False
2242
if not local and bound_location and not source_is_master:
2243
# not pulling from master, so we need to update master.
2244
master_branch = self.target.get_master_branch(
2245
possible_transports)
2246
exit_stack.enter_context(master_branch.lock_write())
2248
# pull from source into master.
2250
self.source, overwrite, stop_revision, run_hooks=False,
2251
tag_selector=tag_selector)
2253
overwrite, stop_revision, _hook_master=master_branch,
2254
run_hooks=run_hooks,
2255
_override_hook_target=_override_hook_target,
2256
merge_tags_to_master=not source_is_master,
2257
tag_selector=tag_selector)
2259
def push(self, overwrite=False, stop_revision=None, lossy=False,
2260
_override_hook_source_branch=None, tag_selector=None):
2261
"""See InterBranch.push.
2263
This is the basic concrete implementation of push()
2265
:param _override_hook_source_branch: If specified, run the hooks
2266
passing this Branch as the source, rather than self. This is for
2267
use of RemoteBranch, where push is delegated to the underlying
2271
raise errors.LossyPushToSameVCS(self.source, self.target)
2272
# TODO: Public option to disable running hooks - should be trivial but
2276
if _override_hook_source_branch:
2277
result.source_branch = _override_hook_source_branch
2278
for hook in Branch.hooks['post_push']:
2281
with self.source.lock_read(), self.target.lock_write():
2282
bound_location = self.target.get_bound_location()
2283
if bound_location and self.target.base != bound_location:
2284
# there is a master branch.
2286
# XXX: Why the second check? Is it even supported for a branch
2287
# to be bound to itself? -- mbp 20070507
2288
master_branch = self.target.get_master_branch()
2289
with master_branch.lock_write():
2290
# push into the master from the source branch.
2291
master_inter = InterBranch.get(self.source, master_branch)
2292
master_inter._basic_push(
2293
overwrite, stop_revision, tag_selector=tag_selector)
2294
# and push into the target branch from the source. Note
2295
# that we push from the source branch again, because it's
2296
# considered the highest bandwidth repository.
2297
result = self._basic_push(
2298
overwrite, stop_revision, tag_selector=tag_selector)
2299
result.master_branch = master_branch
2300
result.local_branch = self.target
2303
master_branch = None
2305
result = self._basic_push(
2306
overwrite, stop_revision, tag_selector=tag_selector)
2307
# TODO: Why set master_branch and local_branch if there's no
2308
# binding? Maybe cleaner to just leave them unset? -- mbp
2310
result.master_branch = self.target
2311
result.local_branch = None
2315
def _basic_push(self, overwrite, stop_revision, tag_selector=None):
2316
"""Basic implementation of push without bound branches or hooks.
2318
Must be called with source read locked and target write locked.
2320
result = BranchPushResult()
2321
result.source_branch = self.source
2322
result.target_branch = self.target
2323
result.old_revno, result.old_revid = self.target.last_revision_info()
2324
overwrite = _fix_overwrite_type(overwrite)
2325
if result.old_revid != stop_revision:
2326
# We assume that during 'push' this repository is closer than
2328
graph = self.source.repository.get_graph(self.target.repository)
2329
self._update_revisions(
2330
stop_revision, overwrite=("history" in overwrite), graph=graph)
2331
if self.source._push_should_merge_tags():
2332
result.tag_updates, result.tag_conflicts = (
2333
self.source.tags.merge_to(
2334
self.target.tags, "tags" in overwrite, selector=tag_selector))
2335
self.update_references()
2336
result.new_revno, result.new_revid = self.target.last_revision_info()
2339
def _pull(self, overwrite=False, stop_revision=None,
2340
possible_transports=None, _hook_master=None, run_hooks=True,
2341
_override_hook_target=None, local=False,
2342
merge_tags_to_master=True, tag_selector=None):
2345
This function is the core worker, used by GenericInterBranch.pull to
2346
avoid duplication when pulling source->master and source->local.
2348
:param _hook_master: Private parameter - set the branch to
2349
be supplied as the master to pull hooks.
2350
:param run_hooks: Private parameter - if false, this branch
2351
is being called because it's the master of the primary branch,
2352
so it should not run its hooks.
2353
is being called because it's the master of the primary branch,
2354
so it should not run its hooks.
2355
:param _override_hook_target: Private parameter - set the branch to be
2356
supplied as the target_branch to pull hooks.
2357
:param local: Only update the local branch, and not the bound branch.
2359
# This type of branch can't be bound.
2361
raise errors.LocalRequiresBoundBranch()
2362
result = PullResult()
2363
result.source_branch = self.source
2364
if _override_hook_target is None:
2365
result.target_branch = self.target
2367
result.target_branch = _override_hook_target
2368
with self.source.lock_read():
2369
# We assume that during 'pull' the target repository is closer than
2371
graph = self.target.repository.get_graph(self.source.repository)
2372
# TODO: Branch formats should have a flag that indicates
2373
# that revno's are expensive, and pull() should honor that flag.
2375
result.old_revno, result.old_revid = \
2376
self.target.last_revision_info()
2377
overwrite = _fix_overwrite_type(overwrite)
2378
self._update_revisions(
2379
stop_revision, overwrite=("history" in overwrite), graph=graph)
2380
# TODO: The old revid should be specified when merging tags,
2381
# so a tags implementation that versions tags can only
2382
# pull in the most recent changes. -- JRV20090506
2383
result.tag_updates, result.tag_conflicts = (
2384
self.source.tags.merge_to(
2385
self.target.tags, "tags" in overwrite,
2386
ignore_master=not merge_tags_to_master,
2387
selector=tag_selector))
2388
self.update_references()
2389
result.new_revno, result.new_revid = (
2390
self.target.last_revision_info())
2392
result.master_branch = _hook_master
2393
result.local_branch = result.target_branch
2395
result.master_branch = result.target_branch
2396
result.local_branch = None
2398
for hook in Branch.hooks['post_pull']:
2402
def update_references(self):
2403
if not getattr(self.source._format, 'supports_reference_locations', False):
2405
reference_dict = self.source._get_all_reference_info()
2406
if len(reference_dict) == 0:
2408
old_base = self.source.base
2409
new_base = self.target.base
2410
target_reference_dict = self.target._get_all_reference_info()
2411
for tree_path, (branch_location, file_id) in viewitems(reference_dict):
2413
branch_location = urlutils.rebase_url(branch_location,
2415
except urlutils.InvalidRebaseURLs:
2416
# Fall back to absolute URL
2417
branch_location = urlutils.join(old_base, branch_location)
2418
target_reference_dict.setdefault(
2419
tree_path, (branch_location, file_id))
2420
self.target._set_all_reference_info(target_reference_dict)
2423
InterBranch.register_optimiser(GenericInterBranch)
1274
if filename == head:
1281
def gen_file_id(name):
1282
"""Return new file id.
1284
This should probably generate proper UUIDs, but for the moment we
1285
cope with just randomness because running uuidgen every time is
1288
from binascii import hexlify
1289
from time import time
1291
# get last component
1292
idx = name.rfind('/')
1294
name = name[idx+1 : ]
1295
idx = name.rfind('\\')
1297
name = name[idx+1 : ]
1299
# make it not a hidden file
1300
name = name.lstrip('.')
1302
# remove any wierd characters; we don't escape them but rather
1303
# just pull them out
1304
name = re.sub(r'[^\w.]', '', name)
1306
s = hexlify(rand_bytes(8))
1307
return '-'.join((name, compact_date(time()), s))
1311
"""Return a new tree-root file id."""
1312
return gen_file_id('TREE_ROOT')