221
356
:return: The WorkingTree that contains 'path', and the rest of path
224
path = osutils.getcwd()
225
control, relpath = ControlDir.open_containing(path)
360
control, relpath = bzrdir.BzrDir.open_containing(path)
226
362
return control.open_workingtree(), relpath
229
def open_containing_paths(file_list, default_directory=None,
230
canonicalize=True, apply_view=True):
231
"""Open the WorkingTree that contains a set of paths.
233
Fail if the paths given are not all in a single tree.
235
This is used for the many command-line interfaces that take a list of
236
any number of files and that require they all be in the same tree.
238
if default_directory is None:
239
default_directory = u'.'
240
# recommended replacement for builtins.internal_tree_files
241
if file_list is None or len(file_list) == 0:
242
tree = WorkingTree.open_containing(default_directory)[0]
243
# XXX: doesn't really belong here, and seems to have the strange
244
# side effect of making it return a bunch of files, not the whole
245
# tree -- mbp 20100716
246
if tree.supports_views() and apply_view:
247
view_files = tree.views.lookup_view()
249
file_list = view_files
250
view_str = views.view_display_str(view_files)
251
note(gettext("Ignoring files outside view. View is %s") % view_str)
252
return tree, file_list
253
if default_directory == u'.':
256
seed = default_directory
257
file_list = [osutils.pathjoin(default_directory, f)
259
tree = WorkingTree.open_containing(seed)[0]
260
return tree, tree.safe_relpath_files(file_list, canonicalize,
261
apply_view=apply_view)
263
def safe_relpath_files(self, file_list, canonicalize=True, apply_view=True):
264
"""Convert file_list into a list of relpaths in tree.
266
:param self: A tree to operate on.
267
:param file_list: A list of user provided paths or None.
268
:param apply_view: if True and a view is set, apply it or check that
269
specified files are within it
270
:return: A list of relative paths.
271
:raises errors.PathNotChild: When a provided path is in a different self
274
if file_list is None:
276
if self.supports_views() and apply_view:
277
view_files = self.views.lookup_view()
281
# self.relpath exists as a "thunk" to osutils, but canonical_relpath
282
# doesn't - fix that up here before we enter the loop.
285
return osutils.canonical_relpath(self.basedir, p)
288
for filename in file_list:
289
relpath = fixer(osutils.dereference_path(filename))
290
if view_files and not osutils.is_inside_any(view_files, relpath):
291
raise views.FileOutsideView(filename, view_files)
292
new_list.append(relpath)
296
365
def open_downlevel(path=None):
297
366
"""Open an unsupported working tree.
299
Only intended for advanced situations like upgrading part of a controldir.
368
Only intended for advanced situations like upgrading part of a bzrdir.
301
370
return WorkingTree.open(path, _unsupported=True)
373
"""Iterate through file_ids for this tree.
375
file_ids are in a WorkingTree if they are in the working inventory
376
and the working file exists.
378
inv = self._inventory
379
for path, ie in inv.iter_entries():
380
if bzrlib.osutils.lexists(self.abspath(path)):
303
383
def __repr__(self):
304
384
return "<%s of %s>" % (self.__class__.__name__,
305
385
getattr(self, 'basedir', None))
307
387
def abspath(self, filename):
308
return osutils.pathjoin(self.basedir, filename)
388
return pathjoin(self.basedir, filename)
310
390
def basis_tree(self):
311
"""Return RevisionTree for the current last revision.
313
If the left most parent is a ghost then the returned tree will be an
314
empty tree - one obtained by calling
315
repository.revision_tree(NULL_REVISION).
318
revision_id = self.get_parent_ids()[0]
320
# no parents, return an empty revision tree.
321
# in the future this should return the tree for
322
# 'empty:' - the implicit root empty tree.
323
return self.branch.repository.revision_tree(
324
_mod_revision.NULL_REVISION)
326
return self.revision_tree(revision_id)
327
except errors.NoSuchRevision:
329
# No cached copy available, retrieve from the repository.
330
# FIXME? RBC 20060403 should we cache the tree locally
333
return self.branch.repository.revision_tree(revision_id)
334
except (errors.RevisionNotPresent, errors.NoSuchRevision):
335
# the basis tree *may* be a ghost or a low level error may have
336
# occurred. If the revision is present, its a problem, if its not
338
if self.branch.repository.has_revision(revision_id):
340
# the basis tree is a ghost so return an empty tree.
341
return self.branch.repository.revision_tree(
342
_mod_revision.NULL_REVISION)
391
"""Return RevisionTree for the current last revision."""
392
revision_id = self.last_revision()
393
if revision_id is not None:
395
xml = self.read_basis_inventory()
396
inv = bzrlib.xml5.serializer_v5.read_inventory_from_string(xml)
399
if inv is not None and inv.revision_id == revision_id:
400
return bzrlib.tree.RevisionTree(self.branch.repository, inv,
402
# FIXME? RBC 20060403 should we cache the inventory here ?
403
return self.branch.repository.revision_tree(revision_id)
406
@deprecated_method(zero_eight)
407
def create(branch, directory):
408
"""Create a workingtree for branch at directory.
410
If existing_directory already exists it must have a .bzr directory.
411
If it does not exist, it will be created.
413
This returns a new WorkingTree object for the new checkout.
415
TODO FIXME RBC 20060124 when we have checkout formats in place this
416
should accept an optional revisionid to checkout [and reject this if
417
checking out into the same dir as a pre-checkout-aware branch format.]
419
XXX: When BzrDir is present, these should be created through that
422
warnings.warn('delete WorkingTree.create', stacklevel=3)
423
transport = get_transport(directory)
424
if branch.bzrdir.root_transport.base == transport.base:
426
return branch.bzrdir.create_workingtree()
427
# different directory,
428
# create a branch reference
429
# and now a working tree.
430
raise NotImplementedError
433
@deprecated_method(zero_eight)
434
def create_standalone(directory):
435
"""Create a checkout and a branch and a repo at directory.
437
Directory must exist and be empty.
439
please use BzrDir.create_standalone_workingtree
441
return bzrdir.BzrDir.create_standalone_workingtree(directory)
344
443
def relpath(self, path):
345
444
"""Return the local path portion from a given path.
347
The path may be absolute or relative. If its a relative path it is
446
The path may be absolute or relative. If its a relative path it is
348
447
interpreted relative to the python current working directory.
350
return osutils.relpath(self.basedir, path)
449
return relpath(self.basedir, path)
352
451
def has_filename(self, filename):
353
return osutils.lexists(self.abspath(filename))
355
def get_file(self, path, filtered=True):
356
return self.get_file_with_stat(path, filtered=filtered)[0]
358
def get_file_with_stat(self, path, filtered=True,
359
_fstat=osutils.fstat):
360
"""See Tree.get_file_with_stat."""
361
abspath = self.abspath(path)
363
file_obj = open(abspath, 'rb')
364
except EnvironmentError as e:
365
if e.errno == errno.ENOENT:
366
raise errors.NoSuchFile(path)
368
stat_value = _fstat(file_obj.fileno())
369
if filtered and self.supports_content_filtering():
370
filters = self._content_filter_stack(path)
372
file_obj, size = _mod_filters.filtered_input_file(
374
stat_value = _mod_filters.FilteredStat(
375
stat_value, st_size=size)
376
return (file_obj, stat_value)
378
def get_file_text(self, path, filtered=True):
379
with self.get_file(path, filtered=filtered) as my_file:
380
return my_file.read()
382
def get_file_lines(self, path, filtered=True):
383
"""See Tree.get_file_lines()"""
384
with self.get_file(path, filtered=filtered) as file:
385
return file.readlines()
452
return bzrlib.osutils.lexists(self.abspath(filename))
454
def get_file(self, file_id):
455
return self.get_file_byname(self.id2path(file_id))
457
def get_file_byname(self, filename):
458
return file(self.abspath(filename), 'rb')
387
460
def get_parent_ids(self):
388
461
"""See Tree.get_parent_ids.
390
463
This implementation reads the pending merges list and last_revision
391
464
value and uses that to decide what the parents list should be.
393
last_rev = _mod_revision.ensure_null(self._last_revision())
394
if _mod_revision.NULL_REVISION == last_rev:
466
last_rev = self.last_revision()
397
470
parents = [last_rev]
399
merges_bytes = self._transport.get_bytes('pending-merges')
400
except errors.NoSuchFile:
403
for l in osutils.split_lines(merges_bytes):
404
revision_id = l.rstrip(b'\n')
405
parents.append(revision_id)
408
def clone(self, to_controldir, revision_id=None):
471
other_parents = self.pending_merges()
472
return parents + other_parents
474
def get_root_id(self):
475
"""Return the id of this trees root"""
476
inv = self.read_working_inventory()
477
return inv.root.file_id
479
def _get_store_filename(self, file_id):
480
## XXX: badly named; this is not in the store at all
481
return self.abspath(self.id2path(file_id))
484
def clone(self, to_bzrdir, revision_id=None, basis=None):
409
485
"""Duplicate this working tree into to_bzr, including all state.
411
487
Specifically modified files are kept as modified, but
412
488
ignored and unknown files are discarded.
414
If you want to make a new line of development, see ControlDir.sprout()
490
If you want to make a new line of development, see bzrdir.sprout()
417
If not None, the cloned tree will have its last revision set to
418
revision, and difference between the source trees last revision
493
If not None, the cloned tree will have its last revision set to
494
revision, and and difference between the source trees last revision
419
495
and this one merged in.
498
If not None, a closer copy of a tree which may have some files in
499
common, and which file content should be preferentially copied from.
421
with self.lock_read():
422
# assumes the target bzr dir format is compatible.
423
result = to_controldir.create_workingtree()
424
self.copy_content_into(result, revision_id)
501
# assumes the target bzr dir format is compatible.
502
result = self._format.initialize(to_bzrdir)
503
self.copy_content_into(result, revision_id)
427
507
def copy_content_into(self, tree, revision_id=None):
428
508
"""Copy the current content and user files of this tree into tree."""
429
with self.lock_read():
430
tree.set_root_id(self.path2id(''))
431
if revision_id is None:
432
merge.transform_tree(tree, self)
434
# TODO now merge from tree.last_revision to revision (to
435
# preserve user local changes)
437
other_tree = self.revision_tree(revision_id)
438
except errors.NoSuchRevision:
439
other_tree = self.branch.repository.revision_tree(
442
merge.transform_tree(tree, other_tree)
443
if revision_id == _mod_revision.NULL_REVISION:
446
new_parents = [revision_id]
447
tree.set_parent_ids(new_parents)
449
def get_file_size(self, path):
450
"""See Tree.get_file_size"""
451
# XXX: this returns the on-disk size; it should probably return the
454
return os.path.getsize(self.abspath(path))
456
if e.errno != errno.ENOENT:
461
def _gather_kinds(self, files, kinds):
462
"""See MutableTree._gather_kinds."""
463
with self.lock_tree_write():
464
for pos, f in enumerate(files):
465
if kinds[pos] is None:
466
fullpath = osutils.normpath(self.abspath(f))
468
kinds[pos] = osutils.file_kind(fullpath)
470
if e.errno == errno.ENOENT:
471
raise errors.NoSuchFile(fullpath)
473
def add_parent_tree_id(self, revision_id, allow_leftmost_as_ghost=False):
474
"""Add revision_id as a parent.
476
This is equivalent to retrieving the current list of parent ids
477
and setting the list to its value plus revision_id.
479
:param revision_id: The revision id to add to the parent list. It may
480
be a ghost revision as long as its not the first parent to be
481
added, or the allow_leftmost_as_ghost parameter is set True.
482
:param allow_leftmost_as_ghost: Allow the first parent to be a ghost.
484
with self.lock_write():
485
parents = self.get_parent_ids() + [revision_id]
486
self.set_parent_ids(parents, allow_leftmost_as_ghost=len(parents) > 1
487
or allow_leftmost_as_ghost)
489
def add_parent_tree(self, parent_tuple, allow_leftmost_as_ghost=False):
490
"""Add revision_id, tree tuple as a parent.
492
This is equivalent to retrieving the current list of parent trees
493
and setting the list to its value plus parent_tuple. See also
494
add_parent_tree_id - if you only have a parent id available it will be
495
simpler to use that api. If you have the parent already available, using
496
this api is preferred.
498
:param parent_tuple: The (revision id, tree) to add to the parent list.
499
If the revision_id is a ghost, pass None for the tree.
500
:param allow_leftmost_as_ghost: Allow the first parent to be a ghost.
502
with self.lock_tree_write():
503
parent_ids = self.get_parent_ids() + [parent_tuple[0]]
504
if len(parent_ids) > 1:
505
# the leftmost may have already been a ghost, preserve that if it
507
allow_leftmost_as_ghost = True
508
self.set_parent_ids(parent_ids,
509
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
509
if revision_id is None:
510
transform_tree(tree, self)
512
# TODO now merge from tree.last_revision to revision
513
transform_tree(tree, self)
514
tree.set_last_revision(revision_id)
517
def commit(self, message=None, revprops=None, *args, **kwargs):
518
# avoid circular imports
519
from bzrlib.commit import Commit
522
if not 'branch-nick' in revprops:
523
revprops['branch-nick'] = self.branch.nick
524
# args for wt.commit start at message from the Commit.commit method,
525
# but with branch a kwarg now, passing in args as is results in the
526
#message being used for the branch
527
args = (DEPRECATED_PARAMETER, message, ) + args
528
committed_id = Commit().commit( working_tree=self, revprops=revprops,
530
self._set_inventory(self.read_working_inventory())
533
def id2abspath(self, file_id):
534
return self.abspath(self.id2path(file_id))
536
def has_id(self, file_id):
537
# files that have been deleted are excluded
538
inv = self._inventory
539
if not inv.has_id(file_id):
541
path = inv.id2path(file_id)
542
return bzrlib.osutils.lexists(self.abspath(path))
544
def has_or_had_id(self, file_id):
545
if file_id == self.inventory.root.file_id:
547
return self.inventory.has_id(file_id)
549
__contains__ = has_id
551
def get_file_size(self, file_id):
552
return os.path.getsize(self.id2abspath(file_id))
555
def get_file_sha1(self, file_id, path=None):
557
path = self._inventory.id2path(file_id)
558
return self._hashcache.get_sha1(path)
560
def get_file_mtime(self, file_id, path=None):
562
path = self._inventory.id2path(file_id)
563
return os.lstat(self.abspath(path)).st_mtime
565
if not supports_executable():
566
def is_executable(self, file_id, path=None):
567
return self._inventory[file_id].executable
569
def is_executable(self, file_id, path=None):
571
path = self._inventory.id2path(file_id)
572
mode = os.lstat(self.abspath(path)).st_mode
573
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
576
def add(self, files, ids=None):
577
"""Make files versioned.
579
Note that the command line normally calls smart_add instead,
580
which can automatically recurse.
582
This adds the files to the inventory, so that they will be
583
recorded by the next commit.
586
List of paths to add, relative to the base of the tree.
589
If set, use these instead of automatically generated ids.
590
Must be the same length as the list of files, but may
591
contain None for ids that are to be autogenerated.
593
TODO: Perhaps have an option to add the ids even if the files do
596
TODO: Perhaps callback with the ids and paths as they're added.
598
# TODO: Re-adding a file that is removed in the working copy
599
# should probably put it back with the previous ID.
600
if isinstance(files, basestring):
601
assert(ids is None or isinstance(ids, basestring))
607
ids = [None] * len(files)
609
assert(len(ids) == len(files))
611
inv = self.read_working_inventory()
612
for f,file_id in zip(files, ids):
613
if self.is_control_filename(f):
614
raise errors.ForbiddenControlFileError(filename=f)
619
raise BzrError("cannot add top-level %r" % f)
621
fullpath = normpath(self.abspath(f))
623
kind = file_kind(fullpath)
625
if e.errno == errno.ENOENT:
626
raise NoSuchFile(fullpath)
627
if not InventoryEntry.versionable_kind(kind):
628
raise errors.BadFileKindError(filename=f, kind=kind)
630
inv.add_path(f, kind=kind)
632
inv.add_path(f, kind=kind, file_id=file_id)
634
self._write_inventory(inv)
511
637
def add_pending_merge(self, *revision_ids):
512
with self.lock_tree_write():
513
# TODO: Perhaps should check at this point that the
514
# history of the revision is actually present?
515
parents = self.get_parent_ids()
517
for rev_id in revision_ids:
518
if rev_id in parents:
520
parents.append(rev_id)
523
self.set_parent_ids(parents, allow_leftmost_as_ghost=True)
525
def path_content_summary(self, path, _lstat=os.lstat,
526
_mapper=osutils.file_kind_from_stat_mode):
527
"""See Tree.path_content_summary."""
528
abspath = self.abspath(path)
638
# TODO: Perhaps should check at this point that the
639
# history of the revision is actually present?
640
p = self.pending_merges()
642
for rev_id in revision_ids:
648
self.set_pending_merges(p)
651
def pending_merges(self):
652
"""Return a list of pending merges.
654
These are revisions that have been merged into the working
655
directory but not yet committed.
530
stat_result = _lstat(abspath)
532
if getattr(e, 'errno', None) == errno.ENOENT:
534
return ('missing', None, None, None)
535
# propagate other errors
537
kind = _mapper(stat_result.st_mode)
539
return self._file_content_summary(path, stat_result)
540
elif kind == 'directory':
541
# perhaps it looks like a plain directory, but it's really a
543
if self._directory_is_tree_reference(path):
544
kind = 'tree-reference'
545
return kind, None, None, None
546
elif kind == 'symlink':
547
target = osutils.readlink(abspath)
548
return ('symlink', None, None, target)
550
return (kind, None, None, None)
552
def _file_content_summary(self, path, stat_result):
553
size = stat_result.st_size
554
executable = self._is_executable_from_path_and_stat(path, stat_result)
555
# try for a stat cache lookup
556
return ('file', size, executable, self._sha_from_stat(
559
def _check_parents_for_ghosts(self, revision_ids, allow_leftmost_as_ghost):
560
"""Common ghost checking functionality from set_parent_*.
562
This checks that the left hand-parent exists if there are any
565
if len(revision_ids) > 0:
566
leftmost_id = revision_ids[0]
567
if (not allow_leftmost_as_ghost and not
568
self.branch.repository.has_revision(leftmost_id)):
569
raise errors.GhostRevisionUnusableHere(leftmost_id)
571
def _set_merges_from_parent_ids(self, parent_ids):
572
merges = parent_ids[1:]
573
self._transport.put_bytes('pending-merges', b'\n'.join(merges),
574
mode=self.controldir._get_file_mode())
576
def _filter_parent_ids_by_ancestry(self, revision_ids):
577
"""Check that all merged revisions are proper 'heads'.
579
This will always return the first revision_id, and any merged revisions
582
if len(revision_ids) == 0:
584
graph = self.branch.repository.get_graph()
585
heads = graph.heads(revision_ids)
586
new_revision_ids = revision_ids[:1]
587
for revision_id in revision_ids[1:]:
588
if revision_id in heads and revision_id not in new_revision_ids:
589
new_revision_ids.append(revision_id)
590
if new_revision_ids != revision_ids:
591
mutter('requested to set revision_ids = %s,'
592
' but filtered to %s', revision_ids, new_revision_ids)
593
return new_revision_ids
595
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
596
"""Set the parent ids to revision_ids.
598
See also set_parent_trees. This api will try to retrieve the tree data
599
for each element of revision_ids from the trees repository. If you have
600
tree data already available, it is more efficient to use
601
set_parent_trees rather than set_parent_ids. set_parent_ids is however
602
an easier API to use.
604
:param revision_ids: The revision_ids to set as the parent ids of this
605
working tree. Any of these may be ghosts.
607
with self.lock_tree_write():
608
self._check_parents_for_ghosts(revision_ids,
609
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
610
for revision_id in revision_ids:
611
_mod_revision.check_not_reserved_id(revision_id)
613
revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
615
if len(revision_ids) > 0:
616
self.set_last_revision(revision_ids[0])
618
self.set_last_revision(_mod_revision.NULL_REVISION)
620
self._set_merges_from_parent_ids(revision_ids)
658
merges_file = self._control_files.get_utf8('pending-merges')
662
for l in merges_file.readlines():
663
p.append(l.rstrip('\n'))
622
667
def set_pending_merges(self, rev_list):
623
with self.lock_tree_write():
624
parents = self.get_parent_ids()
625
leftmost = parents[:1]
626
new_parents = leftmost + rev_list
627
self.set_parent_ids(new_parents)
668
self._control_files.put_utf8('pending-merges', '\n'.join(rev_list))
629
671
def set_merge_modified(self, modified_hashes):
630
"""Set the merge modified hashes."""
631
raise NotImplementedError(self.set_merge_modified)
633
def _sha_from_stat(self, path, stat_result):
634
"""Get a sha digest from the tree's stat cache.
636
The default implementation assumes no stat cache is present.
638
:param path: The path.
639
:param stat_result: The stat result being looked up.
643
def merge_from_branch(self, branch, to_revision=None, from_revision=None,
644
merge_type=None, force=False):
645
"""Merge from a branch into this working tree.
647
:param branch: The branch to merge from.
648
:param to_revision: If non-None, the merge will merge to to_revision,
649
but not beyond it. to_revision does not need to be in the history
650
of the branch when it is supplied. If None, to_revision defaults to
651
branch.last_revision().
653
from .merge import Merger, Merge3Merger
654
with self.lock_write():
655
merger = Merger(self.branch, this_tree=self)
656
# check that there are no local alterations
657
if not force and self.has_changes():
658
raise errors.UncommittedChanges(self)
659
if to_revision is None:
660
to_revision = _mod_revision.ensure_null(branch.last_revision())
661
merger.other_rev_id = to_revision
662
if _mod_revision.is_null(merger.other_rev_id):
663
raise errors.NoCommits(branch)
664
self.branch.fetch(branch, stop_revision=merger.other_rev_id)
665
merger.other_basis = merger.other_rev_id
666
merger.other_tree = self.branch.repository.revision_tree(
668
merger.other_branch = branch
669
if from_revision is None:
672
merger.set_base_revision(from_revision, branch)
673
if merger.base_rev_id == merger.other_rev_id:
674
raise errors.PointlessMerge
675
merger.backup_files = False
676
if merge_type is None:
677
merger.merge_type = Merge3Merger
679
merger.merge_type = merge_type
680
merger.set_interesting_files(None)
681
merger.show_base = False
682
merger.reprocess = False
683
conflicts = merger.do_merge()
673
for file_id, hash in modified_hashes.iteritems():
674
yield Stanza(file_id=file_id, hash=hash)
675
self._put_rio('merge-hashes', iter_stanzas(), MERGE_MODIFIED_HEADER_1)
678
def _put_rio(self, filename, stanzas, header):
679
my_file = rio_file(stanzas, header)
680
self._control_files.put(filename, my_file)
687
683
def merge_modified(self):
688
"""Return a dictionary of files modified by a merge.
690
The list is initialized by WorkingTree.set_merge_modified, which is
691
typically called after we make some automatic updates to the tree
694
This returns a map of file_id->sha1, containing only files which are
695
still in the working tree and have that text hash.
697
raise NotImplementedError(self.merge_modified)
699
def mkdir(self, path, file_id=None):
700
"""See MutableTree.mkdir()."""
702
if self.supports_setting_file_ids():
703
file_id = generate_ids.gen_file_id(os.path.basename(path))
685
hashfile = self._control_files.get('merge-hashes')
690
if hashfile.next() != MERGE_MODIFIED_HEADER_1 + '\n':
691
raise MergeModifiedFormatError()
692
except StopIteration:
693
raise MergeModifiedFormatError()
694
for s in RioReader(hashfile):
695
file_id = s.get("file_id")
696
if file_id not in self.inventory:
699
if hash == self.get_file_sha1(file_id):
700
merge_hashes[file_id] = hash
703
def get_symlink_target(self, file_id):
704
return os.readlink(self.id2abspath(file_id))
706
def file_class(self, filename):
707
if self.path2id(filename):
709
elif self.is_ignored(filename):
705
if not self.supports_setting_file_ids():
706
raise SettingFileIdUnsupported()
707
with self.lock_write():
708
os.mkdir(self.abspath(path))
709
self.add(path, file_id, 'directory')
712
def get_symlink_target(self, path):
713
abspath = self.abspath(path)
714
target = osutils.readlink(abspath)
717
def subsume(self, other_tree):
718
raise NotImplementedError(self.subsume)
720
def _directory_is_tree_reference(self, relpath):
721
raise NotImplementedError(self._directory_is_tree_reference)
723
def extract(self, path, format=None):
724
"""Extract a subtree from this tree.
726
A new branch will be created, relative to the path for this tree.
728
raise NotImplementedError(self.extract)
731
"""Write the in memory meta data to disk."""
732
raise NotImplementedError(self.flush)
734
def kind(self, relpath):
735
return osutils.file_kind(self.abspath(relpath))
737
def list_files(self, include_root=False, from_dir=None, recursive=True,
738
recurse_nested=False):
739
"""List all files as (path, class, kind, id, entry).
714
def list_files(self):
715
"""Recursively list all files as (path, class, kind, id, entry).
741
717
Lists, but does not descend into unversioned directories.
742
719
This does not include files that have been deleted in this
743
tree. Skips the control directory.
745
:param include_root: if True, return an entry for the root
746
:param from_dir: start from this directory or None for the root
747
:param recursive: whether to recurse into subdirectories or not
722
Skips the control directory.
749
raise NotImplementedError(self.list_files)
751
def move(self, from_paths, to_dir=None, after=False):
724
inv = self._inventory
725
# Convert these into local objects to save lookup times
726
pathjoin = bzrlib.osutils.pathjoin
727
file_kind = bzrlib.osutils.file_kind
729
# transport.base ends in a slash, we want the piece
730
# between the last two slashes
731
transport_base_dir = self.bzrdir.transport.base.rsplit('/', 2)[1]
733
fk_entries = {'directory':TreeDirectory, 'file':TreeFile, 'symlink':TreeLink}
735
# directory file_id, relative path, absolute path, reverse sorted children
736
children = os.listdir(self.basedir)
738
# jam 20060527 The kernel sized tree seems equivalent whether we
739
# use a deque and popleft to keep them sorted, or if we use a plain
740
# list and just reverse() them.
741
children = collections.deque(children)
742
stack = [(inv.root.file_id, u'', self.basedir, children)]
744
from_dir_id, from_dir_relpath, from_dir_abspath, children = stack[-1]
747
f = children.popleft()
748
## TODO: If we find a subdirectory with its own .bzr
749
## directory, then that is a separate tree and we
750
## should exclude it.
752
# the bzrdir for this tree
753
if transport_base_dir == f:
756
# we know that from_dir_relpath and from_dir_abspath never end in a slash
757
# and 'f' doesn't begin with one, we can do a string op, rather
758
# than the checks of pathjoin(), all relative paths will have an extra slash
760
fp = from_dir_relpath + '/' + f
763
fap = from_dir_abspath + '/' + f
765
f_ie = inv.get_child(from_dir_id, f)
768
elif self.is_ignored(fp[1:]):
777
raise BzrCheckError("file %r entered as kind %r id %r, "
779
% (fap, f_ie.kind, f_ie.file_id, fk))
781
# make a last minute entry
783
yield fp[1:], c, fk, f_ie.file_id, f_ie
786
yield fp[1:], c, fk, None, fk_entries[fk]()
788
yield fp[1:], c, fk, None, TreeEntry()
791
if fk != 'directory':
794
# But do this child first
795
new_children = os.listdir(fap)
797
new_children = collections.deque(new_children)
798
stack.append((f_ie.file_id, fp, fap, new_children))
799
# Break out of inner loop, so that we start outer loop with child
802
# if we finished all children, pop it off the stack
807
def move(self, from_paths, to_name):
754
to_dir must be known to the working tree.
756
If to_dir exists and is a directory, the files are moved into
757
it, keeping their old names.
759
Note that to_dir is only the last component of the new name;
810
to_name must exist in the inventory.
812
If to_name exists and is a directory, the files are moved into
813
it, keeping their old names.
815
Note that to_name is only the last component of the new name;
760
816
this doesn't change the directory.
762
For each entry in from_paths the move mode will be determined
765
The first mode moves the file in the filesystem and updates the
766
working tree metadata. The second mode only updates the working tree
767
metadata without touching the file on the filesystem.
769
move uses the second mode if 'after == True' and the target is not
770
versioned but present in the working tree.
772
move uses the second mode if 'after == False' and the source is
773
versioned but no longer in the working tree, and the target is not
774
versioned but present in the working tree.
776
move uses the first mode if 'after == False' and the source is
777
versioned and present in the working tree, and the target is not
778
versioned and not present in the working tree.
780
Everything else results in an error.
782
818
This returns a list of (from_path, to_path) pairs for each
783
819
entry that is moved.
785
raise NotImplementedError(self.move)
787
def copy_one(self, from_rel, to_rel):
788
"""Copy a file in the tree to a new location.
790
This default implementation just copies the file, then
793
:param from_rel: From location (relative to tree root)
794
:param to_rel: Target location (relative to tree root)
822
## TODO: Option to move IDs only
823
assert not isinstance(from_paths, basestring)
825
to_abs = self.abspath(to_name)
826
if not isdir(to_abs):
827
raise BzrError("destination %r is not a directory" % to_abs)
828
if not self.has_filename(to_name):
829
raise BzrError("destination %r not in working directory" % to_abs)
830
to_dir_id = inv.path2id(to_name)
831
if to_dir_id == None and to_name != '':
832
raise BzrError("destination %r is not a versioned directory" % to_name)
833
to_dir_ie = inv[to_dir_id]
834
if to_dir_ie.kind not in ('directory', 'root_directory'):
835
raise BzrError("destination %r is not a directory" % to_abs)
837
to_idpath = inv.get_idpath(to_dir_id)
840
if not self.has_filename(f):
841
raise BzrError("%r does not exist in working tree" % f)
842
f_id = inv.path2id(f)
844
raise BzrError("%r is not versioned" % f)
845
name_tail = splitpath(f)[-1]
846
dest_path = pathjoin(to_name, name_tail)
847
if self.has_filename(dest_path):
848
raise BzrError("destination %r already exists" % dest_path)
849
if f_id in to_idpath:
850
raise BzrError("can't move %r to a subdirectory of itself" % f)
852
# OK, so there's a race here, it's possible that someone will
853
# create a file in this interval and then the rename might be
854
# left half-done. But we should have caught most problems.
855
orig_inv = deepcopy(self.inventory)
858
name_tail = splitpath(f)[-1]
859
dest_path = pathjoin(to_name, name_tail)
860
result.append((f, dest_path))
861
inv.rename(inv.path2id(f), to_dir_id, name_tail)
863
rename(self.abspath(f), self.abspath(dest_path))
865
raise BzrError("failed to rename %r to %r: %s" %
866
(f, dest_path, e[1]),
867
["rename rolled back"])
869
# restore the inventory on error
870
self._set_inventory(orig_inv)
872
self._write_inventory(inv)
876
def rename_one(self, from_rel, to_rel):
879
This can change the directory or the filename or both.
796
shutil.copyfile(self.abspath(from_rel), self.abspath(to_rel))
882
if not self.has_filename(from_rel):
883
raise BzrError("can't rename: old working file %r does not exist" % from_rel)
884
if self.has_filename(to_rel):
885
raise BzrError("can't rename: new working file %r already exists" % to_rel)
887
file_id = inv.path2id(from_rel)
889
raise BzrError("can't rename: old name %r is not versioned" % from_rel)
892
from_parent = entry.parent_id
893
from_name = entry.name
895
if inv.path2id(to_rel):
896
raise BzrError("can't rename: new name %r is already versioned" % to_rel)
898
to_dir, to_tail = os.path.split(to_rel)
899
to_dir_id = inv.path2id(to_dir)
900
if to_dir_id == None and to_dir != '':
901
raise BzrError("can't determine destination directory id for %r" % to_dir)
903
mutter("rename_one:")
904
mutter(" file_id {%s}" % file_id)
905
mutter(" from_rel %r" % from_rel)
906
mutter(" to_rel %r" % to_rel)
907
mutter(" to_dir %r" % to_dir)
908
mutter(" to_dir_id {%s}" % to_dir_id)
910
inv.rename(file_id, to_dir_id, to_tail)
912
from_abs = self.abspath(from_rel)
913
to_abs = self.abspath(to_rel)
915
rename(from_abs, to_abs)
917
inv.rename(file_id, from_parent, from_name)
918
raise BzrError("failed to rename %r to %r: %s"
919
% (from_abs, to_abs, e[1]),
920
["rename rolled back"])
921
self._write_inventory(inv)
799
924
def unknowns(self):
800
925
"""Return all unknown files.
802
927
These are files in the working directory that are not versioned or
803
928
control files or ignored.
805
with self.lock_read():
806
# force the extras method to be fully executed before returning, to
807
# prevent race conditions with the lock
809
[subp for subp in self.extras() if not self.is_ignored(subp)])
811
def unversion(self, paths):
812
"""Remove the path in pahs from the current versioned set.
814
When a path is unversioned, all of its children are automatically
817
:param paths: The paths to stop versioning.
818
:raises NoSuchFile: if any path is not currently versioned.
820
raise NotImplementedError(self.unversion)
822
def pull(self, source, overwrite=False, stop_revision=None,
823
change_reporter=None, possible_transports=None, local=False,
824
show_base=False, tag_selector=None):
825
with self.lock_write(), source.lock_read():
826
old_revision_info = self.branch.last_revision_info()
930
for subp in self.extras():
931
if not self.is_ignored(subp):
934
@deprecated_method(zero_eight)
935
def iter_conflicts(self):
936
"""List all files in the tree that have text or content conflicts.
937
DEPRECATED. Use conflicts instead."""
938
return self._iter_conflicts()
940
def _iter_conflicts(self):
942
for info in self.list_files():
944
stem = get_conflicted_stem(path)
947
if stem not in conflicted:
952
def pull(self, source, overwrite=False, stop_revision=None):
953
top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
956
pp = ProgressPhase("Pull phase", 2, top_pb)
958
old_revision_history = self.branch.revision_history()
827
959
basis_tree = self.basis_tree()
828
count = self.branch.pull(source, overwrite, stop_revision,
829
possible_transports=possible_transports,
830
local=local, tag_selector=tag_selector)
831
new_revision_info = self.branch.last_revision_info()
832
if new_revision_info != old_revision_info:
960
count = self.branch.pull(source, overwrite, stop_revision)
961
new_revision_history = self.branch.revision_history()
962
if new_revision_history != old_revision_history:
964
if len(old_revision_history):
965
other_revision = old_revision_history[-1]
967
other_revision = None
833
968
repository = self.branch.repository
834
if repository._format.fast_deltas:
835
parent_ids = self.get_parent_ids()
837
basis_id = parent_ids[0]
838
basis_tree = repository.revision_tree(basis_id)
839
with basis_tree.lock_read():
840
new_basis_tree = self.branch.basis_tree()
846
change_reporter=change_reporter,
848
basis_root_id = basis_tree.path2id('')
849
new_root_id = new_basis_tree.path2id('')
850
if new_root_id is not None and basis_root_id != new_root_id:
851
self.set_root_id(new_root_id)
852
# TODO - dedup parents list with things merged by pull ?
853
# reuse the revisiontree we merged against to set the new
856
if self.branch.last_revision() != _mod_revision.NULL_REVISION:
858
(self.branch.last_revision(), new_basis_tree))
859
# we have to pull the merge trees out again, because
860
# merge_inner has set the ids. - this corner is not yet
861
# layered well enough to prevent double handling.
862
# XXX TODO: Fix the double handling: telling the tree about
863
# the already known parent data is wasteful.
864
merges = self.get_parent_ids()[1:]
865
parent_trees.extend([
866
(parent, repository.revision_tree(parent)) for
868
self.set_parent_trees(parent_trees)
969
pb = bzrlib.ui.ui_factory.nested_progress_bar()
971
merge_inner(self.branch,
972
self.branch.basis_tree(),
978
self.set_last_revision(self.branch.last_revision())
871
def put_file_bytes_non_atomic(self, path, bytes):
872
"""See MutableTree.put_file_bytes_non_atomic."""
873
with self.lock_write(), open(self.abspath(path), 'wb') as stream:
876
984
def extras(self):
877
"""Yield all unversioned files in this WorkingTree.
985
"""Yield all unknown files in this WorkingTree.
879
If there are any unversioned directories and the file format
880
supports versioning directories, then only the directory is returned,
881
not all its children. But if there are unversioned files under a
882
versioned subdirectory, they are returned.
987
If there are any unknown directories then only the directory is
988
returned, not all its children. But if there are unknown files
989
under a versioned subdirectory, they are returned.
884
991
Currently returned depth-first, sorted by name within directories.
885
This is the same order used by 'osutils.walkdirs'.
887
raise NotImplementedError(self.extras)
993
## TODO: Work from given directory downwards
994
for path, dir_entry in self.inventory.directories():
995
mutter("search for unknowns in %r", path)
996
dirabs = self.abspath(path)
997
if not isdir(dirabs):
998
# e.g. directory deleted
1002
for subf in os.listdir(dirabs):
1004
and (subf not in dir_entry.children)):
1009
subp = pathjoin(path, subf)
1012
def _translate_ignore_rule(self, rule):
1013
"""Translate a single ignore rule to a regex.
1015
There are two types of ignore rules. Those that do not contain a / are
1016
matched against the tail of the filename (that is, they do not care
1017
what directory the file is in.) Rules which do contain a slash must
1018
match the entire path. As a special case, './' at the start of the
1019
string counts as a slash in the string but is removed before matching
1020
(e.g. ./foo.c, ./src/foo.c)
1022
:return: The translated regex.
1024
if rule[:2] in ('./', '.\\'):
1026
result = fnmatch.translate(rule[2:])
1027
elif '/' in rule or '\\' in rule:
1029
result = fnmatch.translate(rule)
1031
# default rule style.
1032
result = "(?:.*/)?(?!.*/)" + fnmatch.translate(rule)
1033
assert result[-1] == '$', "fnmatch.translate did not add the expected $"
1034
return "(" + result + ")"
1036
def _combine_ignore_rules(self, rules):
1037
"""Combine a list of ignore rules into a single regex object.
1039
Each individual rule is combined with | to form a big regex, which then
1040
has $ added to it to form something like ()|()|()$. The group index for
1041
each subregex's outermost group is placed in a dictionary mapping back
1042
to the rule. This allows quick identification of the matching rule that
1044
:return: a list of the compiled regex and the matching-group index
1045
dictionaries. We return a list because python complains if you try to
1046
combine more than 100 regexes.
1051
translated_rules = []
1053
translated_rule = self._translate_ignore_rule(rule)
1054
compiled_rule = re.compile(translated_rule)
1055
groups[next_group] = rule
1056
next_group += compiled_rule.groups
1057
translated_rules.append(translated_rule)
1058
if next_group == 99:
1059
result.append((re.compile("|".join(translated_rules)), groups))
1062
translated_rules = []
1063
if len(translated_rules):
1064
result.append((re.compile("|".join(translated_rules)), groups))
889
1067
def ignored_files(self):
890
1068
"""Yield list of PATH, IGNORE_PATTERN"""
891
1069
for subp in self.extras():
892
1070
pat = self.is_ignored(subp)
1074
def get_ignore_list(self):
1075
"""Return list of ignore patterns.
1077
Cached in the Tree object after the first call.
1079
if hasattr(self, '_ignorelist'):
1080
return self._ignorelist
1082
l = bzrlib.DEFAULT_IGNORE[:]
1083
if self.has_filename(bzrlib.IGNORE_FILENAME):
1084
f = self.get_file_byname(bzrlib.IGNORE_FILENAME)
1085
l.extend([line.rstrip("\n\r").decode('utf-8')
1086
for line in f.readlines()])
1087
self._ignorelist = l
1088
self._ignore_regex = self._combine_ignore_rules(l)
1091
def _get_ignore_rules_as_regex(self):
1092
"""Return a regex of the ignore rules and a mapping dict.
1094
:return: (ignore rules compiled regex, dictionary mapping rule group
1095
indices to original rule.)
1097
if getattr(self, '_ignorelist', None) is None:
1098
self.get_ignore_list()
1099
return self._ignore_regex
896
1101
def is_ignored(self, filename):
897
1102
r"""Check whether the filename matches an ignore pattern.
899
raise NotImplementedError(self.is_ignored)
901
def stored_kind(self, path):
902
"""See Tree.stored_kind"""
903
raise NotImplementedError(self.stored_kind)
905
def _comparison_data(self, entry, path):
906
abspath = self.abspath(path)
908
stat_value = os.lstat(abspath)
910
if getattr(e, 'errno', None) == errno.ENOENT:
917
mode = stat_value.st_mode
918
kind = osutils.file_kind_from_stat_mode(mode)
919
if not self._supports_executable():
920
executable = entry is not None and entry.executable
922
executable = bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
923
return kind, executable, stat_value
1104
Patterns containing '/' or '\' need to match the whole path;
1105
others match against only the last component.
1107
If the file is ignored, returns the pattern which caused it to
1108
be ignored, otherwise None. So this can simply be used as a
1109
boolean if desired."""
1111
# TODO: Use '**' to match directories, and other extended
1112
# globbing stuff from cvs/rsync.
1114
# XXX: fnmatch is actually not quite what we want: it's only
1115
# approximately the same as real Unix fnmatch, and doesn't
1116
# treat dotfiles correctly and allows * to match /.
1117
# Eventually it should be replaced with something more
1120
rules = self._get_ignore_rules_as_regex()
1121
for regex, mapping in rules:
1122
match = regex.match(filename)
1123
if match is not None:
1124
# one or more of the groups in mapping will have a non-None group
1126
groups = match.groups()
1127
rules = [mapping[group] for group in
1128
mapping if groups[group] is not None]
1132
def kind(self, file_id):
1133
return file_kind(self.id2abspath(file_id))
925
1136
def last_revision(self):
926
"""Return the last revision of the branch for this tree.
928
This format tree does not support a separate marker for last-revision
929
compared to the branch.
931
See MutableTree.last_revision
1137
"""Return the last revision id of this working tree.
1139
In early branch formats this was == the branch last_revision,
1140
but that cannot be relied upon - for working tree operations,
1141
always use tree.last_revision().
933
return self._last_revision()
935
def _last_revision(self):
936
"""helper for get_parent_ids."""
937
with self.lock_read():
938
return _mod_revision.ensure_null(self.branch.last_revision())
1143
return self.branch.last_revision()
940
1145
def is_locked(self):
941
"""Check if this tree is locked."""
942
raise NotImplementedError(self.is_locked)
1146
return self._control_files.is_locked()
944
1148
def lock_read(self):
945
"""Lock the tree for reading.
947
This also locks the branch, and can be unlocked via self.unlock().
949
:return: A breezy.lock.LogicalLockResult.
951
raise NotImplementedError(self.lock_read)
953
def lock_tree_write(self):
954
"""See MutableTree.lock_tree_write, and WorkingTree.unlock.
956
:return: A breezy.lock.LogicalLockResult.
958
raise NotImplementedError(self.lock_tree_write)
1149
"""See Branch.lock_read, and WorkingTree.unlock."""
1150
self.branch.lock_read()
1152
return self._control_files.lock_read()
1154
self.branch.unlock()
960
1157
def lock_write(self):
961
"""See MutableTree.lock_write, and WorkingTree.unlock.
963
:return: A breezy.lock.LogicalLockResult.
965
raise NotImplementedError(self.lock_write)
1158
"""See Branch.lock_write, and WorkingTree.unlock."""
1159
self.branch.lock_write()
1161
return self._control_files.lock_write()
1163
self.branch.unlock()
967
1166
def get_physical_lock_status(self):
968
raise NotImplementedError(self.get_physical_lock_status)
1167
return self._control_files.get_physical_lock_status()
1169
def _basis_inventory_name(self):
1170
return 'basis-inventory'
970
1173
def set_last_revision(self, new_revision):
971
1174
"""Change the last revision in the working tree."""
972
raise NotImplementedError(self.set_last_revision)
1175
if self._change_last_revision(new_revision):
1176
self._cache_basis_inventory(new_revision)
974
1178
def _change_last_revision(self, new_revision):
975
1179
"""Template method part of set_last_revision to perform the change.
977
1181
This is used to allow WorkingTree3 instances to not affect branch
978
1182
when their last revision is set.
980
if _mod_revision.is_null(new_revision):
981
self.branch.set_last_revision_info(0, new_revision)
1184
if new_revision is None:
1185
self.branch.set_revision_history([])
983
_mod_revision.check_not_reserved_id(new_revision)
1187
# current format is locked in with the branch
1188
revision_history = self.branch.revision_history()
985
self.branch.generate_revision_history(new_revision)
986
except errors.NoSuchRevision:
987
# not present in the repo - dont try to set it deeper than the tip
988
self.branch._set_revision_history([new_revision])
1190
position = revision_history.index(new_revision)
1192
raise errors.NoSuchRevision(self.branch, new_revision)
1193
self.branch.set_revision_history(revision_history[:position + 1])
991
def remove(self, files, verbose=False, to_file=None, keep_files=True,
993
"""Remove nominated files from the working tree metadata.
995
:files: File paths relative to the basedir.
996
:keep_files: If true, the files will also be kept.
997
:force: Delete files and directories, even if they are changed and
998
even if the directories are not empty.
1000
raise NotImplementedError(self.remove)
1002
def revert(self, filenames=None, old_tree=None, backups=True,
1003
pb=None, report_changes=False):
1004
from .conflicts import resolve
1005
with cleanup.ExitStack() as exit_stack:
1006
exit_stack.enter_context(self.lock_tree_write())
1007
if old_tree is None:
1008
basis_tree = self.basis_tree()
1009
exit_stack.enter_context(basis_tree.lock_read())
1010
old_tree = basis_tree
1013
conflicts = transform.revert(self, old_tree, filenames, backups, pb,
1015
if filenames is None and len(self.get_parent_ids()) > 1:
1017
last_revision = self.last_revision()
1018
if last_revision != _mod_revision.NULL_REVISION:
1019
if basis_tree is None:
1020
basis_tree = self.basis_tree()
1021
exit_stack.enter_context(basis_tree.lock_read())
1022
parent_trees.append((last_revision, basis_tree))
1023
self.set_parent_trees(parent_trees)
1026
resolve(self, filenames, ignore_misses=True, recursive=True)
1029
def store_uncommitted(self):
1030
"""Store uncommitted changes from the tree in the branch."""
1031
raise NotImplementedError(self.store_uncommitted)
1033
def restore_uncommitted(self):
1034
"""Restore uncommitted changes from the branch into the tree."""
1035
raise NotImplementedError(self.restore_uncommitted)
1037
def revision_tree(self, revision_id):
1038
"""See Tree.revision_tree.
1040
For trees that can be obtained from the working tree, this
1041
will do so. For other trees, it will fall back to the repository.
1043
raise NotImplementedError(self.revision_tree)
1196
def _cache_basis_inventory(self, new_revision):
1197
"""Cache new_revision as the basis inventory."""
1198
# TODO: this should allow the ready-to-use inventory to be passed in,
1199
# as commit already has that ready-to-use [while the format is the
1202
# this double handles the inventory - unpack and repack -
1203
# but is easier to understand. We can/should put a conditional
1204
# in here based on whether the inventory is in the latest format
1205
# - perhaps we should repack all inventories on a repository
1207
# the fast path is to copy the raw xml from the repository. If the
1208
# xml contains 'revision_id="', then we assume the right
1209
# revision_id is set. We must check for this full string, because a
1210
# root node id can legitimately look like 'revision_id' but cannot
1212
xml = self.branch.repository.get_inventory_xml(new_revision)
1213
if not 'revision_id="' in xml.split('\n', 1)[0]:
1214
inv = self.branch.repository.deserialise_inventory(
1216
inv.revision_id = new_revision
1217
xml = bzrlib.xml5.serializer_v5.write_inventory_to_string(inv)
1218
assert isinstance(xml, str), 'serialised xml must be bytestring.'
1219
path = self._basis_inventory_name()
1221
self._control_files.put(path, sio)
1222
except WeaveRevisionNotPresent:
1225
def read_basis_inventory(self):
1226
"""Read the cached basis inventory."""
1227
path = self._basis_inventory_name()
1228
return self._control_files.get(path).read()
1231
def read_working_inventory(self):
1232
"""Read the working inventory."""
1233
# ElementTree does its own conversion from UTF-8, so open in
1235
result = bzrlib.xml5.serializer_v5.read_inventory(
1236
self._control_files.get('inventory'))
1237
self._set_inventory(result)
1241
def remove(self, files, verbose=False, to_file=None):
1242
"""Remove nominated files from the working inventory..
1244
This does not remove their text. This does not run on XXX on what? RBC
1246
TODO: Refuse to remove modified files unless --force is given?
1248
TODO: Do something useful with directories.
1250
TODO: Should this remove the text or not? Tough call; not
1251
removing may be useful and the user can just use use rm, and
1252
is the opposite of add. Removing it is consistent with most
1253
other tools. Maybe an option.
1255
## TODO: Normalize names
1256
## TODO: Remove nested loops; better scalability
1257
if isinstance(files, basestring):
1260
inv = self.inventory
1262
# do this before any modifications
1264
fid = inv.path2id(f)
1266
# TODO: Perhaps make this just a warning, and continue?
1267
# This tends to happen when
1268
raise NotVersionedError(path=f)
1270
# having remove it, it must be either ignored or unknown
1271
if self.is_ignored(f):
1275
show_status(new_status, inv[fid].kind, f, to_file=to_file)
1278
self._write_inventory(inv)
1281
def revert(self, filenames, old_tree=None, backups=True,
1282
pb=DummyProgress()):
1283
from transform import revert
1284
from conflicts import resolve
1285
if old_tree is None:
1286
old_tree = self.basis_tree()
1287
conflicts = revert(self, old_tree, filenames, backups, pb)
1288
if not len(filenames):
1289
self.set_pending_merges([])
1292
resolve(self, filenames, ignore_misses=True)
1295
# XXX: This method should be deprecated in favour of taking in a proper
1296
# new Inventory object.
1298
def set_inventory(self, new_inventory_list):
1299
from bzrlib.inventory import (Inventory,
1304
inv = Inventory(self.get_root_id())
1305
for path, file_id, parent, kind in new_inventory_list:
1306
name = os.path.basename(path)
1309
# fixme, there should be a factory function inv,add_??
1310
if kind == 'directory':
1311
inv.add(InventoryDirectory(file_id, name, parent))
1312
elif kind == 'file':
1313
inv.add(InventoryFile(file_id, name, parent))
1314
elif kind == 'symlink':
1315
inv.add(InventoryLink(file_id, name, parent))
1317
raise BzrError("unknown kind %r" % kind)
1318
self._write_inventory(inv)
1045
1321
def set_root_id(self, file_id):
1046
1322
"""Set the root id for this tree."""
1047
if not self.supports_setting_file_ids():
1048
raise SettingFileIdUnsupported()
1049
with self.lock_tree_write():
1053
'WorkingTree.set_root_id with fileid=None')
1054
file_id = osutils.safe_file_id(file_id)
1055
self._set_root_id(file_id)
1057
def _set_root_id(self, file_id):
1058
"""Set the root id for this tree, in a format specific manner.
1060
:param file_id: The file id to assign to the root. It must not be
1061
present in the current inventory or an error will occur. It must
1062
not be None, but rather a valid file id.
1064
raise NotImplementedError(self._set_root_id)
1323
inv = self.read_working_inventory()
1324
orig_root_id = inv.root.file_id
1325
del inv._byid[inv.root.file_id]
1326
inv.root.file_id = file_id
1327
inv._byid[inv.root.file_id] = inv.root
1330
if entry.parent_id == orig_root_id:
1331
entry.parent_id = inv.root.file_id
1332
self._write_inventory(inv)
1066
1334
def unlock(self):
1067
1335
"""See Branch.unlock.
1069
1337
WorkingTree locking just uses the Branch locking facilities.
1070
1338
This is current because all working trees have an embedded branch
1071
1339
within them. IF in the future, we were to make branch data shareable
1072
between multiple working trees, i.e. via shared storage, then we
1340
between multiple working trees, i.e. via shared storage, then we
1073
1341
would probably want to lock both the local tree, and the branch.
1075
raise NotImplementedError(self.unlock)
1079
def update(self, change_reporter=None, possible_transports=None,
1080
revision=None, old_tip=_marker, show_base=False):
1343
# FIXME: We want to write out the hashcache only when the last lock on
1344
# this working copy is released. Peeking at the lock count is a bit
1345
# of a nasty hack; probably it's better to have a transaction object,
1346
# which can do some finalization when it's either successfully or
1347
# unsuccessfully completed. (Denys's original patch did that.)
1348
# RBC 20060206 hooking into transaction will couple lock and transaction
1349
# wrongly. Hooking into unlock on the control files object is fine though.
1351
# TODO: split this per format so there is no ugly if block
1352
if self._hashcache.needs_write and (
1353
# dedicated lock files
1354
self._control_files._lock_count==1 or
1356
(self._control_files is self.branch.control_files and
1357
self._control_files._lock_count==3)):
1358
self._hashcache.write()
1359
# reverse order of locking.
1361
return self._control_files.unlock()
1363
self.branch.unlock()
1081
1367
"""Update a working tree along its branch.
1083
This will update the branch if its bound too, which means we have
1084
multiple trees involved:
1086
- The new basis tree of the master.
1087
- The old basis tree of the branch.
1088
- The old basis tree of the working tree.
1089
- The current working tree state.
1091
Pathologically, all three may be different, and non-ancestors of each
1092
other. Conceptually we want to:
1094
- Preserve the wt.basis->wt.state changes
1095
- Transform the wt.basis to the new master basis.
1096
- Apply a merge of the old branch basis to get any 'local' changes from
1098
- Restore the wt.basis->wt.state changes.
1369
This will update the branch if its bound too, which means we have multiple trees involved:
1370
The new basis tree of the master.
1371
The old basis tree of the branch.
1372
The old basis tree of the working tree.
1373
The current working tree state.
1374
pathologically all three may be different, and non ancestors of each other.
1375
Conceptually we want to:
1376
Preserve the wt.basis->wt.state changes
1377
Transform the wt.basis to the new master basis.
1378
Apply a merge of the old branch basis to get any 'local' changes from it into the tree.
1379
Restore the wt.basis->wt.state changes.
1100
1381
There isn't a single operation at the moment to do that, so we:
1102
- Merge current state -> basis tree of the master w.r.t. the old tree
1104
- Do a 'normal' merge of the old branch basis if it is relevant.
1106
:param revision: The target revision to update to. Must be in the
1108
:param old_tip: If branch.update() has already been run, the value it
1109
returned (old tip of the branch or None). _marker is used
1382
Merge current state -> basis tree of the master w.r.t. the old tree basis.
1383
Do a 'normal' merge of the old branch basis if it is relevant.
1112
if self.branch.get_bound_location() is not None:
1114
update_branch = (old_tip is self._marker)
1116
self.lock_tree_write()
1117
update_branch = False
1385
old_tip = self.branch.update()
1386
if old_tip is not None:
1387
self.add_pending_merge(old_tip)
1388
self.branch.lock_read()
1120
old_tip = self.branch.update(possible_transports)
1122
if old_tip is self._marker:
1124
return self._update_tree(old_tip, change_reporter, revision, show_base)
1128
def _update_tree(self, old_tip=None, change_reporter=None, revision=None,
1130
"""Update a tree to the master branch.
1132
:param old_tip: if supplied, the previous tip revision the branch,
1133
before it was changed to the master branch's tip.
1135
# here if old_tip is not None, it is the old tip of the branch before
1136
# it was updated from the master branch. This should become a pending
1137
# merge in the working tree to preserve the user existing work. we
1138
# cant set that until we update the working trees last revision to be
1139
# one from the new branch, because it will just get absorbed by the
1140
# parent de-duplication logic.
1142
# We MUST save it even if an error occurs, because otherwise the users
1143
# local work is unreferenced and will appear to have been lost.
1145
with self.lock_tree_write():
1148
last_rev = self.get_parent_ids()[0]
1150
last_rev = _mod_revision.NULL_REVISION
1151
if revision is None:
1152
revision = self.branch.last_revision()
1154
old_tip = old_tip or _mod_revision.NULL_REVISION
1156
if not _mod_revision.is_null(old_tip) and old_tip != last_rev:
1157
# the branch we are bound to was updated
1158
# merge those changes in first
1159
base_tree = self.basis_tree()
1160
other_tree = self.branch.repository.revision_tree(old_tip)
1161
nb_conflicts = merge.merge_inner(self.branch, other_tree,
1162
base_tree, this_tree=self,
1163
change_reporter=change_reporter,
1164
show_base=show_base)
1166
self.add_parent_tree((old_tip, other_tree))
1167
note(gettext('Rerun update after fixing the conflicts.'))
1170
if last_rev != _mod_revision.ensure_null(revision):
1171
# the working tree is up to date with the branch
1172
# we can merge the specified revision from master
1173
to_tree = self.branch.repository.revision_tree(revision)
1174
to_root_id = to_tree.path2id('')
1391
if self.last_revision() != self.branch.last_revision():
1392
# merge tree state up to new branch tip.
1176
1393
basis = self.basis_tree()
1177
with basis.lock_read():
1178
if (basis.path2id('') is None or basis.path2id('') != to_root_id):
1179
self.set_root_id(to_root_id)
1182
# determine the branch point
1183
graph = self.branch.repository.get_graph()
1184
base_rev_id = graph.find_unique_lca(self.branch.last_revision(),
1394
to_tree = self.branch.basis_tree()
1395
result += merge_inner(self.branch,
1399
self.set_last_revision(self.branch.last_revision())
1400
if old_tip and old_tip != self.last_revision():
1401
# our last revision was not the prior branch last revision
1402
# and we have converted that last revision to a pending merge.
1403
# base is somewhere between the branch tip now
1404
# and the now pending merge
1405
from bzrlib.revision import common_ancestor
1407
base_rev_id = common_ancestor(self.branch.last_revision(),
1409
self.branch.repository)
1410
except errors.NoCommonAncestor:
1186
1412
base_tree = self.branch.repository.revision_tree(base_rev_id)
1413
other_tree = self.branch.repository.revision_tree(old_tip)
1414
result += merge_inner(self.branch,
1420
self.branch.unlock()
1188
nb_conflicts = merge.merge_inner(self.branch, to_tree, base_tree,
1190
change_reporter=change_reporter,
1191
show_base=show_base)
1192
self.set_last_revision(revision)
1193
# TODO - dedup parents list with things merged by pull ?
1194
# reuse the tree we've updated to to set the basis:
1195
parent_trees = [(revision, to_tree)]
1196
merges = self.get_parent_ids()[1:]
1197
# Ideally we ask the tree for the trees here, that way the working
1198
# tree can decide whether to give us the entire tree or give us a
1199
# lazy initialised tree. dirstate for instance will have the trees
1200
# in ram already, whereas a last-revision + basis-inventory tree
1201
# will not, but also does not need them when setting parents.
1202
for parent in merges:
1203
parent_trees.append(
1204
(parent, self.branch.repository.revision_tree(parent)))
1205
if not _mod_revision.is_null(old_tip):
1206
parent_trees.append(
1207
(old_tip, self.branch.repository.revision_tree(old_tip)))
1208
self.set_parent_trees(parent_trees)
1209
last_rev = parent_trees[0][0]
1423
def _write_inventory(self, inv):
1424
"""Write inventory as the current inventory."""
1426
bzrlib.xml5.serializer_v5.write_inventory(inv, sio)
1428
self._control_files.put('inventory', sio)
1429
self._set_inventory(inv)
1430
mutter('wrote working inventory')
1212
1432
def set_conflicts(self, arg):
1213
raise errors.UnsupportedOperation(self.set_conflicts, self)
1215
def add_conflicts(self, arg):
1216
raise errors.UnsupportedOperation(self.add_conflicts, self)
1218
def conflicts(self):
1219
raise NotImplementedError(self.conflicts)
1221
def walkdirs(self, prefix=""):
1222
"""Walk the directories of this tree.
1224
returns a generator which yields items in the form:
1225
((curren_directory_path, fileid),
1226
[(file1_path, file1_name, file1_kind, (lstat), file1_id,
1229
This API returns a generator, which is only valid during the current
1230
tree transaction - within a single lock_read or lock_write duration.
1232
If the tree is not locked, it may cause an error to be raised,
1233
depending on the tree implementation.
1235
raise NotImplementedError(self.walkdirs)
1237
@deprecated_method(deprecated_in((3, 0, 1)))
1238
def auto_resolve(self):
1239
"""Automatically resolve text conflicts according to contents.
1241
Only text conflicts are auto_resolvable. Files with no conflict markers
1242
are considered 'resolved', because bzr always puts conflict markers
1243
into files that have text conflicts. The corresponding .THIS .BASE and
1244
.OTHER files are deleted, as per 'resolve'.
1246
:return: a tuple of ConflictLists: (un_resolved, resolved).
1248
with self.lock_tree_write():
1249
un_resolved = _mod_conflicts.ConflictList()
1250
resolved = _mod_conflicts.ConflictList()
1251
for conflict in self.conflicts():
1253
conflict.action_auto(self)
1254
except NotImplementedError:
1255
un_resolved.append(conflict)
1257
conflict.cleanup(self)
1258
resolved.append(conflict)
1259
self.set_conflicts(un_resolved)
1260
return un_resolved, resolved
1262
def _validate(self):
1263
"""Validate internal structures.
1265
This is meant mostly for the test suite. To give it a chance to detect
1266
corruption after actions have occurred. The default implementation is a
1269
:return: None. An exception should be raised if there is an error.
1273
def check_state(self):
1274
"""Check that the working state is/isn't valid."""
1275
raise NotImplementedError(self.check_state)
1277
def reset_state(self, revision_ids=None):
1278
"""Reset the state of the working tree.
1280
This does a hard-reset to a last-known-good state. This is a way to
1281
fix if something got corrupted (like the .bzr/checkout/dirstate file)
1283
raise NotImplementedError(self.reset_state)
1285
def _get_rules_searcher(self, default_searcher):
1286
"""See Tree._get_rules_searcher."""
1287
if self._rules_searcher is None:
1288
self._rules_searcher = super(WorkingTree,
1289
self)._get_rules_searcher(default_searcher)
1290
return self._rules_searcher
1292
def get_shelf_manager(self):
1293
"""Return the ShelfManager for this WorkingTree."""
1294
raise NotImplementedError(self.get_shelf_manager)
1296
def get_canonical_paths(self, paths):
1297
"""Like get_canonical_path() but works on multiple items.
1299
:param paths: A sequence of paths relative to the root of the tree.
1300
:return: A list of paths, with each item the corresponding input path
1301
adjusted to account for existing elements that match case
1304
with self.lock_read():
1308
def get_canonical_path(self, path):
1309
"""Returns the first item in the tree that matches a path.
1311
This is meant to allow case-insensitive path lookups on e.g.
1314
If a path matches exactly, it is returned. If no path matches exactly
1315
but more than one path matches according to the underlying file system,
1316
it is implementation defined which is returned.
1318
If no path matches according to the file system, the input path is
1319
returned, but with as many path entries that do exist changed to their
1322
If you need to resolve many names from the same tree, you should
1323
use get_canonical_paths() to avoid O(N) behaviour.
1325
:param path: A paths relative to the root of the tree.
1326
:return: The input path adjusted to account for existing elements
1327
that match case insensitively.
1329
with self.lock_read():
1330
return next(self.get_canonical_paths([path]))
1332
def reference_parent(self, path, branch=None, possible_transports=None):
1333
raise errors.UnsupportedOperation(self.reference_parent, self)
1335
def get_reference_info(self, path, branch=None):
1336
raise errors.UnsupportedOperation(self.get_reference_info, self)
1338
def set_reference_info(self, tree_path, branch_location):
1339
raise errors.UnsupportedOperation(self.set_reference_info, self)
1342
class WorkingTreeFormatRegistry(ControlComponentFormatRegistry):
1343
"""Registry for working tree formats."""
1345
def __init__(self, other_registry=None):
1346
super(WorkingTreeFormatRegistry, self).__init__(other_registry)
1347
self._default_format = None
1348
self._default_format_key = None
1350
def get_default(self):
1351
"""Return the current default format."""
1352
if (self._default_format_key is not None and
1353
self._default_format is None):
1354
self._default_format = self.get(self._default_format_key)
1355
return self._default_format
1357
def set_default(self, format):
1358
"""Set the default format."""
1359
self._default_format = format
1360
self._default_format_key = None
1362
def set_default_key(self, format_string):
1363
"""Set the default format by its format string."""
1364
self._default_format_key = format_string
1365
self._default_format = None
1368
format_registry = WorkingTreeFormatRegistry()
1371
class WorkingTreeFormat(ControlComponentFormat):
1433
raise UnsupportedOperation(self.set_conflicts, self)
1436
def conflicts(self):
1437
conflicts = ConflictList()
1438
for conflicted in self._iter_conflicts():
1441
if file_kind(self.abspath(conflicted)) != "file":
1443
except errors.NoSuchFile:
1446
for suffix in ('.THIS', '.OTHER'):
1448
kind = file_kind(self.abspath(conflicted+suffix))
1451
except errors.NoSuchFile:
1455
ctype = {True: 'text conflict', False: 'contents conflict'}[text]
1456
conflicts.append(Conflict.factory(ctype, path=conflicted,
1457
file_id=self.path2id(conflicted)))
1461
class WorkingTree3(WorkingTree):
1462
"""This is the Format 3 working tree.
1464
This differs from the base WorkingTree by:
1465
- having its own file lock
1466
- having its own last-revision property.
1468
This is new in bzr 0.8
1472
def last_revision(self):
1473
"""See WorkingTree.last_revision."""
1475
return self._control_files.get_utf8('last-revision').read()
1479
def _change_last_revision(self, revision_id):
1480
"""See WorkingTree._change_last_revision."""
1481
if revision_id is None or revision_id == NULL_REVISION:
1483
self._control_files._transport.delete('last-revision')
1484
except errors.NoSuchFile:
1489
self.branch.revision_history().index(revision_id)
1491
raise errors.NoSuchRevision(self.branch, revision_id)
1492
self._control_files.put_utf8('last-revision', revision_id)
1496
def set_conflicts(self, conflicts):
1497
self._put_rio('conflicts', conflicts.to_stanzas(),
1501
def conflicts(self):
1503
confile = self._control_files.get('conflicts')
1505
return ConflictList()
1507
if confile.next() != CONFLICT_HEADER_1 + '\n':
1508
raise ConflictFormatError()
1509
except StopIteration:
1510
raise ConflictFormatError()
1511
return ConflictList.from_stanzas(RioReader(confile))
1514
def get_conflicted_stem(path):
1515
for suffix in CONFLICT_SUFFIXES:
1516
if path.endswith(suffix):
1517
return path[:-len(suffix)]
1519
@deprecated_function(zero_eight)
1520
def is_control_file(filename):
1521
"""See WorkingTree.is_control_filename(filename)."""
1522
## FIXME: better check
1523
filename = normpath(filename)
1524
while filename != '':
1525
head, tail = os.path.split(filename)
1526
## mutter('check %r for control file' % ((head, tail),))
1529
if filename == head:
1535
class WorkingTreeFormat(object):
1372
1536
"""An encapsulation of the initialization and open routines for a format.
1374
1538
Formats provide three things: