17
17
"""Tree classes, representing directory at point in time.
20
from .lazy_import import lazy_import
21
lazy_import(globals(), """
22
from breezy.i18n import gettext
21
from collections import deque
25
conflicts as _mod_conflicts,
29
30
revision as _mod_revision,
32
from .inter import InterObject
35
class FileTimestampUnavailable(errors.BzrError):
37
_fmt = "The filestamp for %(path)s is not available."
41
def __init__(self, path):
45
class MissingNestedTree(errors.BzrError):
47
_fmt = "The nested tree for %(path)s can not be resolved."""
49
def __init__(self, path):
53
class TreeEntry(object):
54
"""An entry that implements the minimum interface used by commands.
59
def __eq__(self, other):
60
# yes, this is ugly, TODO: best practice __eq__ style.
61
return (isinstance(other, TreeEntry)
62
and other.__class__ == self.__class__)
66
def kind_character(self):
69
def is_unmodified(self, other):
70
"""Does this entry reference the same entry?
72
This is mostly the same as __eq__, but returns False
73
for entries without enough information (i.e. revision is None)
78
class TreeDirectory(TreeEntry):
79
"""See TreeEntry. This is a directory in a working tree."""
85
def kind_character(self):
89
class TreeFile(TreeEntry):
90
"""See TreeEntry. This is a regular file in a working tree."""
96
def kind_character(self):
100
class TreeLink(TreeEntry):
101
"""See TreeEntry. This is a symlink in a working tree."""
107
def kind_character(self):
111
class TreeReference(TreeEntry):
112
"""See TreeEntry. This is a reference to a nested tree in a working tree."""
116
kind = 'tree-reference'
118
def kind_character(self):
122
class TreeChange(object):
123
"""Describes the changes between the same item in two different trees."""
125
__slots__ = ['file_id', 'path', 'changed_content', 'versioned', 'parent_id',
126
'name', 'kind', 'executable', 'copied']
128
def __init__(self, file_id, path, changed_content, versioned, parent_id,
129
name, kind, executable, copied=False):
130
self.file_id = file_id
132
self.changed_content = changed_content
133
self.versioned = versioned
134
self.parent_id = parent_id
137
self.executable = executable
141
return "%s%r" % (self.__class__.__name__, self._as_tuple())
144
return len(self.__slots__)
147
return (self.file_id, self.path, self.changed_content, self.versioned,
148
self.parent_id, self.name, self.kind, self.executable, self.copied)
150
def __eq__(self, other):
151
if isinstance(other, TreeChange):
152
return self._as_tuple() == other._as_tuple()
153
if isinstance(other, tuple):
154
return self._as_tuple() == other
157
def __lt__(self, other):
158
return self._as_tuple() < other._as_tuple()
160
def meta_modified(self):
161
if self.versioned == (True, True):
162
return (self.executable[0] != self.executable[1])
165
def is_reparented(self):
166
return self.parent_id[0] != self.parent_id[1]
168
def discard_new(self):
169
return self.__class__(
170
self.file_id, (self.path[0], None), self.changed_content,
171
(self.versioned[0], None), (self.parent_id[0], None),
172
(self.name[0], None), (self.kind[0], None),
173
(self.executable[0], None),
33
from bzrlib.decorators import needs_read_lock
34
from bzrlib.errors import BzrError, NoSuchId
35
from bzrlib import errors
36
from bzrlib.inventory import InventoryFile
37
from bzrlib.inter import InterObject
38
from bzrlib.osutils import fingerprint_file
39
import bzrlib.revision
40
from bzrlib.symbol_versioning import deprecated_function, deprecated_in
41
from bzrlib.trace import note
177
44
class Tree(object):
427
254
raise NotImplementedError(self._comparison_data)
429
def get_file(self, path):
430
"""Return a file object for the file path in the tree.
256
def _file_size(self, entry, stat_value):
257
raise NotImplementedError(self._file_size)
259
def _get_inventory(self):
260
return self._inventory
262
def get_file(self, file_id, path=None):
263
"""Return a file object for the file file_id in the tree.
265
If both file_id and path are defined, it is implementation defined as
266
to which one is used.
432
268
raise NotImplementedError(self.get_file)
434
def get_file_with_stat(self, path):
435
"""Get a file handle and stat object for path.
270
def get_file_with_stat(self, file_id, path=None):
271
"""Get a file handle and stat object for file_id.
437
273
The default implementation returns (self.get_file, None) for backwards
440
:param path: The path of the file.
276
:param file_id: The file id to read.
277
:param path: The path of the file, if it is known.
441
278
:return: A tuple (file_handle, stat_value_or_None). If the tree has
442
279
no stat facility, or need for a stat cache feedback during commit,
443
280
it may return None for the second element of the tuple.
445
return (self.get_file(path), None)
282
return (self.get_file(file_id, path), None)
447
def get_file_text(self, path):
284
def get_file_text(self, file_id, path=None):
448
285
"""Return the byte content of a file.
287
:param file_id: The file_id of the file.
450
288
:param path: The path of the file.
452
:returns: A single byte string for the whole file.
289
If both file_id and path are supplied, an implementation may use
454
with self.get_file(path) as my_file:
292
my_file = self.get_file(file_id, path)
455
294
return my_file.read()
457
def get_file_lines(self, path):
298
def get_file_lines(self, file_id, path=None):
458
299
"""Return the content of a file, as lines.
301
:param file_id: The file_id of the file.
460
302
:param path: The path of the file.
303
If both file_id and path are supplied, an implementation may use
462
return osutils.split_lines(self.get_file_text(path))
464
def get_file_verifier(self, path, stat_value=None):
465
"""Return a verifier for a file.
467
The default implementation returns a sha1.
306
return osutils.split_lines(self.get_file_text(file_id, path))
308
def get_file_mtime(self, file_id, path=None):
309
"""Return the modification time for a file.
311
:param file_id: The handle for this file.
469
312
:param path: The path that this file can be found at.
470
313
These must point to the same object.
471
:param stat_value: Optional stat value for the object
472
:return: Tuple with verifier name and verifier data
474
return ("SHA1", self.get_file_sha1(path, stat_value=stat_value))
476
def get_file_sha1(self, path, stat_value=None):
477
"""Return the SHA1 file for a file.
479
:note: callers should use get_file_verifier instead
480
where possible, as the underlying repository implementation may
481
have quicker access to a non-sha1 verifier.
483
:param path: The path that this file can be found at.
484
:param stat_value: Optional stat value for the object
486
raise NotImplementedError(self.get_file_sha1)
488
def get_file_mtime(self, path):
489
"""Return the modification time for a file.
491
:param path: The path that this file can be found at.
493
315
raise NotImplementedError(self.get_file_mtime)
495
def get_file_size(self, path):
317
def get_file_size(self, file_id):
496
318
"""Return the size of a file in bytes.
498
320
This applies only to regular files. If invoked on directories or
499
321
symlinks, it will return None.
322
:param file_id: The file-id of the file
501
324
raise NotImplementedError(self.get_file_size)
503
def is_executable(self, path):
504
"""Check if a file is executable.
506
:param path: The path that this file can be found at.
508
raise NotImplementedError(self.is_executable)
326
def get_file_by_path(self, path):
327
return self.get_file(self._inventory.path2id(path), path)
510
329
def iter_files_bytes(self, desired_files):
511
330
"""Iterate through file contents.
523
342
this implementation, it is a tuple containing a single bytestring with
524
343
the complete text of the file.
526
:param desired_files: a list of (path, identifier) pairs
345
:param desired_files: a list of (file_id, identifier) pairs
528
for path, identifier in desired_files:
347
for file_id, identifier in desired_files:
529
348
# We wrap the string in a tuple so that we can return an iterable
530
349
# of bytestrings. (Technically, a bytestring is also an iterable
531
350
# of bytestrings, but iterating through each character is not
533
cur_file = (self.get_file_text(path),)
352
cur_file = (self.get_file_text(file_id),)
534
353
yield identifier, cur_file
536
def get_symlink_target(self, path):
537
"""Get the target for a given path.
355
def get_symlink_target(self, file_id):
356
"""Get the target for a given file_id.
539
It is assumed that the caller already knows that path is referencing
358
It is assumed that the caller already knows that file_id is referencing
541
:param path: The path of the file.
360
:param file_id: Handle for the symlink entry.
542
361
:return: The path the symlink points to.
544
363
raise NotImplementedError(self.get_symlink_target)
546
def annotate_iter(self, path,
365
def get_canonical_inventory_paths(self, paths):
366
"""Like get_canonical_inventory_path() but works on multiple items.
368
:param paths: A sequence of paths relative to the root of the tree.
369
:return: A list of paths, with each item the corresponding input path
370
adjusted to account for existing elements that match case
373
return list(self._yield_canonical_inventory_paths(paths))
375
def get_canonical_inventory_path(self, path):
376
"""Returns the first inventory item that case-insensitively matches path.
378
If a path matches exactly, it is returned. If no path matches exactly
379
but more than one path matches case-insensitively, it is implementation
380
defined which is returned.
382
If no path matches case-insensitively, the input path is returned, but
383
with as many path entries that do exist changed to their canonical
386
If you need to resolve many names from the same tree, you should
387
use get_canonical_inventory_paths() to avoid O(N) behaviour.
389
:param path: A paths relative to the root of the tree.
390
:return: The input path adjusted to account for existing elements
391
that match case insensitively.
393
return self._yield_canonical_inventory_paths([path]).next()
395
def _yield_canonical_inventory_paths(self, paths):
397
# First, if the path as specified exists exactly, just use it.
398
if self.path2id(path) is not None:
402
cur_id = self.get_root_id()
404
bit_iter = iter(path.split("/"))
407
for child in self.iter_children(cur_id):
409
child_base = os.path.basename(self.id2path(child))
410
if child_base.lower() == lelt:
412
cur_path = osutils.pathjoin(cur_path, child_base)
415
# before a change is committed we can see this error...
418
# got to the end of this directory and no entries matched.
419
# Return what matched so far, plus the rest as specified.
420
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
425
def get_root_id(self):
426
"""Return the file_id for the root of this tree."""
427
raise NotImplementedError(self.get_root_id)
429
def annotate_iter(self, file_id,
547
430
default_revision=_mod_revision.CURRENT_REVISION):
548
431
"""Return an iterator of revision_id, line tuples.
550
433
For working trees (and mutable trees in general), the special
551
434
revision_id 'current:' will be used for lines that are new in this
552
435
tree, e.g. uncommitted changes.
553
:param path: The file to produce an annotated version from
436
:param file_id: The file to produce an annotated version from
554
437
:param default_revision: For lines that don't match a basis, mark them
555
438
with this revision id. Not all implementations will make use of
558
441
raise NotImplementedError(self.annotate_iter)
443
def _get_plan_merge_data(self, file_id, other, base):
444
from bzrlib import versionedfile
445
vf = versionedfile._PlanMergeVersionedFile(file_id)
446
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
447
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
449
last_revision_base = None
451
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
452
return vf, last_revision_a, last_revision_b, last_revision_base
454
def plan_file_merge(self, file_id, other, base=None):
455
"""Generate a merge plan based on annotations.
457
If the file contains uncommitted changes in this tree, they will be
458
attributed to the 'current:' pseudo-revision. If the file contains
459
uncommitted changes in the other tree, they will be assigned to the
460
'other:' pseudo-revision.
462
data = self._get_plan_merge_data(file_id, other, base)
463
vf, last_revision_a, last_revision_b, last_revision_base = data
464
return vf.plan_merge(last_revision_a, last_revision_b,
467
def plan_file_lca_merge(self, file_id, other, base=None):
468
"""Generate a merge plan based lca-newness.
470
If the file contains uncommitted changes in this tree, they will be
471
attributed to the 'current:' pseudo-revision. If the file contains
472
uncommitted changes in the other tree, they will be assigned to the
473
'other:' pseudo-revision.
475
data = self._get_plan_merge_data(file_id, other, base)
476
vf, last_revision_a, last_revision_b, last_revision_base = data
477
return vf.plan_lca_merge(last_revision_a, last_revision_b,
480
def _iter_parent_trees(self):
481
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
482
for revision_id in self.get_parent_ids():
484
yield self.revision_tree(revision_id)
485
except errors.NoSuchRevisionInTree:
486
yield self.repository.revision_tree(revision_id)
489
def _file_revision(revision_tree, file_id):
490
"""Determine the revision associated with a file in a given tree."""
491
revision_tree.lock_read()
493
return revision_tree.inventory[file_id].revision
495
revision_tree.unlock()
497
def _get_file_revision(self, file_id, vf, tree_revision):
498
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
500
if getattr(self, '_repository', None) is None:
501
last_revision = tree_revision
502
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
503
self._iter_parent_trees()]
504
vf.add_lines((file_id, last_revision), parent_keys,
505
self.get_file(file_id).readlines())
506
repo = self.branch.repository
509
last_revision = self._file_revision(self, file_id)
510
base_vf = self._repository.texts
511
if base_vf not in vf.fallback_versionedfiles:
512
vf.fallback_versionedfiles.append(base_vf)
515
inventory = property(_get_inventory,
516
doc="Inventory of this Tree")
518
def _check_retrieved(self, ie, f):
521
fp = fingerprint_file(f)
524
if ie.text_size is not None:
525
if ie.text_size != fp['size']:
526
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
527
["inventory expects %d bytes" % ie.text_size,
528
"file is actually %d bytes" % fp['size'],
529
"store is probably damaged/corrupt"])
531
if ie.text_sha1 != fp['sha1']:
532
raise BzrError("wrong SHA-1 for file %r in %r" % (ie.file_id, self._store),
533
["inventory expects %s" % ie.text_sha1,
534
"file is actually %s" % fp['sha1'],
535
"store is probably damaged/corrupt"])
560
538
def path2id(self, path):
561
539
"""Return the id for path in this tree."""
562
raise NotImplementedError(self.path2id)
564
def is_versioned(self, path):
565
"""Check whether path is versioned.
567
:param path: Path to check
570
return self.path2id(path) is not None
572
def find_related_paths_across_trees(self, paths, trees=[],
573
require_versioned=True):
574
"""Find related paths in tree corresponding to specified filenames in any
577
All matches in all trees will be used, and all children of matched
578
directories will be used.
580
:param paths: The filenames to find related paths for (if None, returns
582
:param trees: The trees to find file_ids within
583
:param require_versioned: if true, all specified filenames must occur in
585
:return: a set of paths for the specified filenames and their children
588
raise NotImplementedError(self.find_related_paths_across_trees)
540
return self._inventory.path2id(path)
542
def paths2ids(self, paths, trees=[], require_versioned=True):
543
"""Return all the ids that can be reached by walking from paths.
545
Each path is looked up in this tree and any extras provided in
546
trees, and this is repeated recursively: the children in an extra tree
547
of a directory that has been renamed under a provided path in this tree
548
are all returned, even if none exist under a provided path in this
549
tree, and vice versa.
551
:param paths: An iterable of paths to start converting to ids from.
552
Alternatively, if paths is None, no ids should be calculated and None
553
will be returned. This is offered to make calling the api unconditional
554
for code that *might* take a list of files.
555
:param trees: Additional trees to consider.
556
:param require_versioned: If False, do not raise NotVersionedError if
557
an element of paths is not versioned in this tree and all of trees.
559
return find_ids_across_trees(paths, [self] + list(trees), require_versioned)
561
def iter_children(self, file_id):
562
entry = self.iter_entries_by_dir([file_id]).next()[1]
563
for child in getattr(entry, 'children', {}).itervalues():
590
566
def lock_read(self):
591
"""Lock this tree for multiple read only operations.
593
:return: A breezy.lock.LogicalLockResult.
595
return lock.LogicalLockResult(self.unlock)
597
569
def revision_tree(self, revision_id):
598
570
"""Obtain a revision tree for the revision revision_id.
723
697
for path in path_names:
724
698
yield searcher.get_items(path)
726
701
def _get_rules_searcher(self, default_searcher):
727
702
"""Get the RulesSearcher for this tree given the default one."""
728
703
searcher = default_searcher
731
def archive(self, format, name, root='', subdir=None,
733
"""Create an archive of this tree.
735
:param format: Format name (e.g. 'tar')
736
:param name: target file name
737
:param root: Root directory name (or None)
738
:param subdir: Subdirectory to export (or None)
739
:return: Iterator over archive chunks
741
from .archive import create_archive
742
with self.lock_read():
743
return create_archive(format, self, name, root,
744
subdir, force_mtime=force_mtime)
747
def versionable_kind(cls, kind):
748
"""Check if this tree support versioning a specific file kind."""
749
return (kind in ('file', 'directory', 'symlink', 'tree-reference'))
751
def preview_transform(self, pb=None):
752
"""Obtain a transform object."""
753
raise NotImplementedError(self.preview_transform)
707
######################################################################
710
# TODO: Merge these two functions into a single one that can operate
711
# on either a whole tree or a set of files.
713
# TODO: Return the diff in order by filename, not by category or in
714
# random order. Can probably be done by lock-stepping through the
715
# filenames from both trees.
718
def file_status(filename, old_tree, new_tree):
719
"""Return single-letter status, old and new names for a file.
721
The complexity here is in deciding how to represent renames;
722
many complex cases are possible.
724
old_inv = old_tree.inventory
725
new_inv = new_tree.inventory
726
new_id = new_inv.path2id(filename)
727
old_id = old_inv.path2id(filename)
729
if not new_id and not old_id:
730
# easy: doesn't exist in either; not versioned at all
731
if new_tree.is_ignored(filename):
732
return 'I', None, None
734
return '?', None, None
736
# There is now a file of this name, great.
739
# There is no longer a file of this name, but we can describe
740
# what happened to the file that used to have
741
# this name. There are two possibilities: either it was
742
# deleted entirely, or renamed.
743
if new_inv.has_id(old_id):
744
return 'X', old_inv.id2path(old_id), new_inv.id2path(old_id)
746
return 'D', old_inv.id2path(old_id), None
748
# if the file_id is new in this revision, it is added
749
if new_id and not old_inv.has_id(new_id):
752
# if there used to be a file of this name, but that ID has now
753
# disappeared, it is deleted
754
if old_id and not new_inv.has_id(old_id):
760
@deprecated_function(deprecated_in((1, 9, 0)))
761
def find_renames(old_inv, new_inv):
762
for file_id in old_inv:
763
if file_id not in new_inv:
765
old_name = old_inv.id2path(file_id)
766
new_name = new_inv.id2path(file_id)
767
if old_name != new_name:
768
yield (old_name, new_name)
771
def find_ids_across_trees(filenames, trees, require_versioned=True):
772
"""Find the ids corresponding to specified filenames.
774
All matches in all trees will be used, and all children of matched
775
directories will be used.
777
:param filenames: The filenames to find file_ids for (if None, returns
779
:param trees: The trees to find file_ids within
780
:param require_versioned: if true, all specified filenames must occur in
782
:return: a set of file ids for the specified filenames and their children.
786
specified_path_ids = _find_ids_across_trees(filenames, trees,
788
return _find_children_across_trees(specified_path_ids, trees)
791
def _find_ids_across_trees(filenames, trees, require_versioned):
792
"""Find the ids corresponding to specified filenames.
794
All matches in all trees will be used, but subdirectories are not scanned.
796
:param filenames: The filenames to find file_ids for
797
:param trees: The trees to find file_ids within
798
:param require_versioned: if true, all specified filenames must occur in
800
:return: a set of file ids for the specified filenames
803
interesting_ids = set()
804
for tree_path in filenames:
807
file_id = tree.path2id(tree_path)
808
if file_id is not None:
809
interesting_ids.add(file_id)
812
not_versioned.append(tree_path)
813
if len(not_versioned) > 0 and require_versioned:
814
raise errors.PathsNotVersionedError(not_versioned)
815
return interesting_ids
818
def _find_children_across_trees(specified_ids, trees):
819
"""Return a set including specified ids and their children.
821
All matches in all trees will be used.
823
:param trees: The trees to find file_ids within
824
:return: a set containing all specified ids and their children
826
interesting_ids = set(specified_ids)
827
pending = interesting_ids
828
# now handle children of interesting ids
829
# we loop so that we handle all children of each id in both trees
830
while len(pending) > 0:
832
for file_id in pending:
834
if not tree.has_or_had_id(file_id):
836
for child_id in tree.iter_children(file_id):
837
if child_id not in interesting_ids:
838
new_pending.add(child_id)
839
interesting_ids.update(new_pending)
840
pending = new_pending
841
return interesting_ids
756
844
class InterTree(InterObject):
843
1012
output. An unversioned file is defined as one with (False, False)
844
1013
for the versioned pair.
846
raise NotImplementedError(self.iter_changes)
848
def file_content_matches(
849
self, source_path, target_path,
850
source_stat=None, target_stat=None):
851
"""Check if two files are the same in the source and target trees.
853
This only checks that the contents of the files are the same,
854
it does not touch anything else.
856
:param source_path: Path of the file in the source tree
857
:param target_path: Path of the file in the target tree
858
:param source_stat: Optional stat value of the file in the source tree
859
:param target_stat: Optional stat value of the file in the target tree
860
:return: Boolean indicating whether the files have the same contents
862
with self.lock_read():
863
source_verifier_kind, source_verifier_data = (
864
self.source.get_file_verifier(source_path, source_stat))
865
target_verifier_kind, target_verifier_data = (
866
self.target.get_file_verifier(
867
target_path, target_stat))
868
if source_verifier_kind == target_verifier_kind:
869
return (source_verifier_data == target_verifier_data)
870
# Fall back to SHA1 for now
871
if source_verifier_kind != "SHA1":
872
source_sha1 = self.source.get_file_sha1(
873
source_path, source_stat)
875
source_sha1 = source_verifier_data
876
if target_verifier_kind != "SHA1":
877
target_sha1 = self.target.get_file_sha1(
878
target_path, target_stat)
880
target_sha1 = target_verifier_data
881
return (source_sha1 == target_sha1)
883
def find_target_path(self, path, recurse='none'):
884
"""Find target tree path.
886
:param path: Path to search for (exists in source)
887
:return: path in target, or None if there is no equivalent path.
888
:raise NoSuchFile: If the path doesn't exist in source
890
raise NotImplementedError(self.find_target_path)
892
def find_source_path(self, path, recurse='none'):
893
"""Find the source tree path.
895
:param path: Path to search for (exists in target)
896
:return: path in source, or None if there is no equivalent path.
897
:raise NoSuchFile: if the path doesn't exist in target
899
raise NotImplementedError(self.find_source_path)
901
def find_target_paths(self, paths, recurse='none'):
902
"""Find target tree paths.
904
:param paths: Iterable over paths in target to search for
905
:return: Dictionary mapping from source paths to paths in target , or
906
None if there is no equivalent path.
910
ret[path] = self.find_target_path(path, recurse=recurse)
913
def find_source_paths(self, paths, recurse='none'):
914
"""Find source tree paths.
916
:param paths: Iterable over paths in target to search for
917
:return: Dictionary mapping from target paths to paths in source, or
918
None if there is no equivalent path.
922
ret[path] = self.find_source_path(path, recurse=recurse)
926
def find_previous_paths(from_tree, to_tree, paths, recurse='none'):
927
"""Find previous tree paths.
929
:param from_tree: From tree
930
:param to_tree: To tree
931
:param paths: Iterable over paths in from_tree to search for
932
:return: Dictionary mapping from from_tree paths to paths in to_tree, or
933
None if there is no equivalent path.
935
return InterTree.get(to_tree, from_tree).find_source_paths(paths, recurse=recurse)
938
def find_previous_path(from_tree, to_tree, path, recurse='none'):
939
"""Find previous tree path.
941
:param from_tree: From tree
942
:param to_tree: To tree
943
:param path: Path to search for (exists in from_tree)
944
:return: path in to_tree, or None if there is no equivalent path.
945
:raise NoSuchFile: If the path doesn't exist in from_tree
947
return InterTree.get(to_tree, from_tree).find_source_path(
948
path, recurse=recurse)
951
def get_canonical_path(tree, path, normalize):
952
"""Find the canonical path of an item, ignoring case.
954
:param tree: Tree to traverse
955
:param path: Case-insensitive path to look up
956
:param normalize: Function to normalize a filename for comparison
957
:return: The canonical path
961
bit_iter = iter(path.split("/"))
963
lelt = normalize(elt)
1015
lookup_trees = [self.source]
1017
lookup_trees.extend(extra_trees)
1018
# The ids of items we need to examine to insure delta consistency.
1019
precise_file_ids = set()
1020
changed_file_ids = []
1021
if specific_files == []:
1022
specific_file_ids = []
1024
specific_file_ids = self.target.paths2ids(specific_files,
1025
lookup_trees, require_versioned=require_versioned)
1026
if specific_files is not None:
1027
# reparented or added entries must have their parents included
1028
# so that valid deltas can be created. The seen_parents set
1029
# tracks the parents that we need to have.
1030
# The seen_dirs set tracks directory entries we've yielded.
1031
# After outputting version object in to_entries we set difference
1032
# the two seen sets and start checking parents.
1033
seen_parents = set()
1035
if want_unversioned:
1036
all_unversioned = sorted([(p.split('/'), p) for p in
1037
self.target.extras()
1038
if specific_files is None or
1039
osutils.is_inside_any(specific_files, p)])
1040
all_unversioned = deque(all_unversioned)
1042
all_unversioned = deque()
1044
from_entries_by_dir = list(self.source.iter_entries_by_dir(
1045
specific_file_ids=specific_file_ids))
1046
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
1047
to_entries_by_dir = list(self.target.iter_entries_by_dir(
1048
specific_file_ids=specific_file_ids))
1049
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
1051
# the unversioned path lookup only occurs on real trees - where there
1052
# can be extras. So the fake_entry is solely used to look up
1053
# executable it values when execute is not supported.
1054
fake_entry = InventoryFile('unused', 'unused', 'unused')
1055
for target_path, target_entry in to_entries_by_dir:
1056
while (all_unversioned and
1057
all_unversioned[0][0] < target_path.split('/')):
1058
unversioned_path = all_unversioned.popleft()
1059
target_kind, target_executable, target_stat = \
1060
self.target._comparison_data(fake_entry, unversioned_path[1])
1061
yield (None, (None, unversioned_path[1]), True, (False, False),
1063
(None, unversioned_path[0][-1]),
1064
(None, target_kind),
1065
(None, target_executable))
1066
source_path, source_entry = from_data.get(target_entry.file_id,
1068
result, changes = self._changes_from_entries(source_entry,
1069
target_entry, source_path=source_path, target_path=target_path)
1070
to_paths[result[0]] = result[1][1]
1075
pb.update('comparing files', entry_count, num_entries)
1076
if changes or include_unchanged:
1077
if specific_file_ids is not None:
1078
new_parent_id = result[4][1]
1079
precise_file_ids.add(new_parent_id)
1080
changed_file_ids.append(result[0])
1082
# Ensure correct behaviour for reparented/added specific files.
1083
if specific_files is not None:
1084
# Record output dirs
1085
if result[6][1] == 'directory':
1086
seen_dirs.add(result[0])
1087
# Record parents of reparented/added entries.
1088
versioned = result[3]
1090
if not versioned[0] or parents[0] != parents[1]:
1091
seen_parents.add(parents[1])
1092
while all_unversioned:
1093
# yield any trailing unversioned paths
1094
unversioned_path = all_unversioned.popleft()
1095
to_kind, to_executable, to_stat = \
1096
self.target._comparison_data(fake_entry, unversioned_path[1])
1097
yield (None, (None, unversioned_path[1]), True, (False, False),
1099
(None, unversioned_path[0][-1]),
1101
(None, to_executable))
1102
# Yield all remaining source paths
1103
for path, from_entry in from_entries_by_dir:
1104
file_id = from_entry.file_id
1105
if file_id in to_paths:
1108
if file_id not in self.target.all_file_ids():
1109
# common case - paths we have not emitted are not present in
1113
to_path = self.target.id2path(file_id)
1116
pb.update('comparing files', entry_count, num_entries)
1117
versioned = (True, False)
1118
parent = (from_entry.parent_id, None)
1119
name = (from_entry.name, None)
1120
from_kind, from_executable, stat_value = \
1121
self.source._comparison_data(from_entry, path)
1122
kind = (from_kind, None)
1123
executable = (from_executable, None)
1124
changed_content = from_kind is not None
1125
# the parent's path is necessarily known at this point.
1126
changed_file_ids.append(file_id)
1127
yield(file_id, (path, to_path), changed_content, versioned, parent,
1128
name, kind, executable)
1129
changed_file_ids = set(changed_file_ids)
1130
if specific_file_ids is not None:
1131
for result in self._handle_precise_ids(precise_file_ids,
1135
def _get_entry(self, tree, file_id):
1136
"""Get an inventory entry from a tree, with missing entries as None.
1138
If the tree raises NotImplementedError on accessing .inventory, then
1139
this is worked around using iter_entries_by_dir on just the file id
1142
:param tree: The tree to lookup the entry in.
1143
:param file_id: The file_id to lookup.
966
for child in tree.iter_child_entries(cur_path):
1146
inventory = tree.inventory
1147
except NotImplementedError:
1148
# No inventory available.
1150
iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
1151
return iterator.next()[1]
1152
except StopIteration:
1156
return inventory[file_id]
1157
except errors.NoSuchId:
1160
def _handle_precise_ids(self, precise_file_ids, changed_file_ids,
1161
discarded_changes=None):
1162
"""Fill out a partial iter_changes to be consistent.
1164
:param precise_file_ids: The file ids of parents that were seen during
1166
:param changed_file_ids: The file ids of already emitted items.
1167
:param discarded_changes: An optional dict of precalculated
1168
iter_changes items which the partial iter_changes had not output
1170
:return: A generator of iter_changes items to output.
1172
# process parents of things that had changed under the users
1173
# requested paths to prevent incorrect paths or parent ids which
1174
# aren't in the tree.
1175
while precise_file_ids:
1176
precise_file_ids.discard(None)
1177
# Don't emit file_ids twice
1178
precise_file_ids.difference_update(changed_file_ids)
1179
if not precise_file_ids:
1181
# If the there was something at a given output path in source, we
1182
# have to include the entry from source in the delta, or we would
1183
# be putting this entry into a used path.
1185
for parent_id in precise_file_ids:
968
if child.name == elt:
969
# if we found an exact match, we can stop now; if
970
# we found an approximate match we need to keep
971
# searching because there might be an exact match
973
new_path = osutils.pathjoin(cur_path, child.name)
975
elif normalize(child.name) == lelt:
976
new_path = osutils.pathjoin(cur_path, child.name)
1187
paths.append(self.target.id2path(parent_id))
977
1188
except errors.NoSuchId:
978
# before a change is committed we can see this error...
980
except errors.NotADirectory:
985
# got to the end of this directory and no entries matched.
986
# Return what matched so far, plus the rest as specified.
987
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
1189
# This id has been dragged in from the source by delta
1190
# expansion and isn't present in target at all: we don't
1191
# need to check for path collisions on it.
1194
old_id = self.source.path2id(path)
1195
precise_file_ids.add(old_id)
1196
precise_file_ids.discard(None)
1197
current_ids = precise_file_ids
1198
precise_file_ids = set()
1199
# We have to emit all of precise_file_ids that have been altered.
1200
# We may have to output the children of some of those ids if any
1201
# directories have stopped being directories.
1202
for file_id in current_ids:
1204
if discarded_changes:
1205
result = discarded_changes.get(file_id)
1210
old_entry = self._get_entry(self.source, file_id)
1211
new_entry = self._get_entry(self.target, file_id)
1212
result, changes = self._changes_from_entries(
1213
old_entry, new_entry)
1216
# Get this parents parent to examine.
1217
new_parent_id = result[4][1]
1218
precise_file_ids.add(new_parent_id)
1220
if (result[6][0] == 'directory' and
1221
result[6][1] != 'directory'):
1222
# This stopped being a directory, the old children have
1224
if old_entry is None:
1225
# Reusing a discarded change.
1226
old_entry = self._get_entry(self.source, file_id)
1227
for child in old_entry.children.values():
1228
precise_file_ids.add(child.file_id)
1229
changed_file_ids.add(result[0])
1233
class MultiWalker(object):
1234
"""Walk multiple trees simultaneously, getting combined results."""
1236
# Note: This could be written to not assume you can do out-of-order
1237
# lookups. Instead any nodes that don't match in all trees could be
1238
# marked as 'deferred', and then returned in the final cleanup loop.
1239
# For now, I think it is "nicer" to return things as close to the
1240
# "master_tree" order as we can.
1242
def __init__(self, master_tree, other_trees):
1243
"""Create a new MultiWalker.
1245
All trees being walked must implement "iter_entries_by_dir()", such
1246
that they yield (path, object) tuples, where that object will have a
1247
'.file_id' member, that can be used to check equality.
1249
:param master_tree: All trees will be 'slaved' to the master_tree such
1250
that nodes in master_tree will be used as 'first-pass' sync points.
1251
Any nodes that aren't in master_tree will be merged in a second
1253
:param other_trees: A list of other trees to walk simultaneously.
1255
self._master_tree = master_tree
1256
self._other_trees = other_trees
1258
# Keep track of any nodes that were properly processed just out of
1259
# order, that way we don't return them at the end, we don't have to
1260
# track *all* processed file_ids, just the out-of-order ones
1261
self._out_of_order_processed = set()
1264
def _step_one(iterator):
1265
"""Step an iter_entries_by_dir iterator.
1267
:return: (has_more, path, ie)
1268
If has_more is False, path and ie will be None.
1271
path, ie = iterator.next()
1272
except StopIteration:
1273
return False, None, None
1275
return True, path, ie
1278
def _cmp_path_by_dirblock(path1, path2):
1279
"""Compare two paths based on what directory they are in.
1281
This generates a sort order, such that all children of a directory are
1282
sorted together, and grandchildren are in the same order as the
1283
children appear. But all grandchildren come after all children.
1285
:param path1: first path
1286
:param path2: the second path
1287
:return: negative number if ``path1`` comes first,
1288
0 if paths are equal
1289
and a positive number if ``path2`` sorts first
1291
# Shortcut this special case
1294
# This is stolen from _dirstate_helpers_py.py, only switching it to
1295
# Unicode objects. Consider using encode_utf8() and then using the
1296
# optimized versions, or maybe writing optimized unicode versions.
1297
if not isinstance(path1, unicode):
1298
raise TypeError("'path1' must be a unicode string, not %s: %r"
1299
% (type(path1), path1))
1300
if not isinstance(path2, unicode):
1301
raise TypeError("'path2' must be a unicode string, not %s: %r"
1302
% (type(path2), path2))
1303
return cmp(MultiWalker._path_to_key(path1),
1304
MultiWalker._path_to_key(path2))
1307
def _path_to_key(path):
1308
dirname, basename = osutils.split(path)
1309
return (dirname.split(u'/'), basename)
1311
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1312
"""Lookup an inventory entry by file_id.
1314
This is called when an entry is missing in the normal order.
1315
Generally this is because a file was either renamed, or it was
1316
deleted/added. If the entry was found in the inventory and not in
1317
extra_entries, it will be added to self._out_of_order_processed
1319
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1320
should be filled with entries that were found before they were
1321
used. If file_id is present, it will be removed from the
1323
:param other_tree: The Tree to search, in case we didn't find the entry
1325
:param file_id: The file_id to look for
1326
:return: (path, ie) if found or (None, None) if not present.
1328
if file_id in extra_entries:
1329
return extra_entries.pop(file_id)
1330
# TODO: Is id2path better as the first call, or is
1331
# inventory[file_id] better as a first check?
1333
cur_path = other_tree.id2path(file_id)
1334
except errors.NoSuchId:
1336
if cur_path is None:
1339
self._out_of_order_processed.add(file_id)
1340
cur_ie = other_tree.inventory[file_id]
1341
return (cur_path, cur_ie)
1344
"""Match up the values in the different trees."""
1345
for result in self._walk_master_tree():
1347
self._finish_others()
1348
for result in self._walk_others():
1351
def _walk_master_tree(self):
1352
"""First pass, walk all trees in lock-step.
1354
When we are done, all nodes in the master_tree will have been
1355
processed. _other_walkers, _other_entries, and _others_extra will be
1356
set on 'self' for future processing.
1358
# This iterator has the most "inlining" done, because it tends to touch
1359
# every file in the tree, while the others only hit nodes that don't
1361
master_iterator = self._master_tree.iter_entries_by_dir()
1363
other_walkers = [other.iter_entries_by_dir()
1364
for other in self._other_trees]
1365
other_entries = [self._step_one(walker) for walker in other_walkers]
1366
# Track extra nodes in the other trees
1367
others_extra = [{} for i in xrange(len(self._other_trees))]
1369
master_has_more = True
1370
step_one = self._step_one
1371
lookup_by_file_id = self._lookup_by_file_id
1372
out_of_order_processed = self._out_of_order_processed
1374
while master_has_more:
1375
(master_has_more, path, master_ie) = step_one(master_iterator)
1376
if not master_has_more:
1379
file_id = master_ie.file_id
1381
other_values_append = other_values.append
1382
next_other_entries = []
1383
next_other_entries_append = next_other_entries.append
1384
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1385
if not other_has_more:
1386
other_values_append(lookup_by_file_id(
1387
others_extra[idx], self._other_trees[idx], file_id))
1388
next_other_entries_append((False, None, None))
1389
elif file_id == other_ie.file_id:
1390
# This is the critical code path, as most of the entries
1391
# should match between most trees.
1392
other_values_append((other_path, other_ie))
1393
next_other_entries_append(step_one(other_walkers[idx]))
1395
# This walker did not match, step it until it either
1396
# matches, or we know we are past the current walker.
1397
other_walker = other_walkers[idx]
1398
other_extra = others_extra[idx]
1399
while (other_has_more and
1400
self._cmp_path_by_dirblock(other_path, path) < 0):
1401
other_file_id = other_ie.file_id
1402
if other_file_id not in out_of_order_processed:
1403
other_extra[other_file_id] = (other_path, other_ie)
1404
other_has_more, other_path, other_ie = \
1405
step_one(other_walker)
1406
if other_has_more and other_ie.file_id == file_id:
1407
# We ended up walking to this point, match and step
1409
other_values_append((other_path, other_ie))
1410
other_has_more, other_path, other_ie = \
1411
step_one(other_walker)
1413
# This record isn't in the normal order, see if it
1415
other_values_append(lookup_by_file_id(
1416
other_extra, self._other_trees[idx], file_id))
1417
next_other_entries_append((other_has_more, other_path,
1419
other_entries = next_other_entries
1421
# We've matched all the walkers, yield this datapoint
1422
yield path, file_id, master_ie, other_values
1423
self._other_walkers = other_walkers
1424
self._other_entries = other_entries
1425
self._others_extra = others_extra
1427
def _finish_others(self):
1428
"""Finish walking the other iterators, so we get all entries."""
1429
for idx, info in enumerate(self._other_entries):
1430
other_extra = self._others_extra[idx]
1431
(other_has_more, other_path, other_ie) = info
1432
while other_has_more:
1433
other_file_id = other_ie.file_id
1434
if other_file_id not in self._out_of_order_processed:
1435
other_extra[other_file_id] = (other_path, other_ie)
1436
other_has_more, other_path, other_ie = \
1437
self._step_one(self._other_walkers[idx])
1438
del self._other_entries
1440
def _walk_others(self):
1441
"""Finish up by walking all the 'deferred' nodes."""
1442
# TODO: One alternative would be to grab all possible unprocessed
1443
# file_ids, and then sort by path, and then yield them. That
1444
# might ensure better ordering, in case a caller strictly
1445
# requires parents before children.
1446
for idx, other_extra in enumerate(self._others_extra):
1447
others = sorted(other_extra.itervalues(),
1448
key=lambda x: self._path_to_key(x[0]))
1449
for other_path, other_ie in others:
1450
file_id = other_ie.file_id
1451
# We don't need to check out_of_order_processed here, because
1452
# the lookup_by_file_id will be removing anything processed
1453
# from the extras cache
1454
other_extra.pop(file_id)
1455
other_values = [(None, None) for i in xrange(idx)]
1456
other_values.append((other_path, other_ie))
1457
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1458
alt_idx = alt_idx + idx + 1
1459
alt_extra = self._others_extra[alt_idx]
1460
alt_tree = self._other_trees[alt_idx]
1461
other_values.append(self._lookup_by_file_id(
1462
alt_extra, alt_tree, file_id))
1463
yield other_path, file_id, None, other_values