17
17
"""Tree classes, representing directory at point in time.
21
from collections.abc import deque
22
except ImportError: # python < 3.7
23
from collections import deque
25
from .lazy_import import lazy_import
20
from __future__ import absolute_import
24
from brzlib.lazy_import import lazy_import
26
25
lazy_import(globals(), """
29
29
conflicts as _mod_conflicts,
33
36
revision as _mod_revision,
37
from breezy.i18n import gettext
40
from brzlib.i18n import gettext
43
from brzlib.decorators import needs_read_lock
44
from brzlib.inter import InterObject
45
from brzlib.symbol_versioning import (
45
from .inter import InterObject
48
class FileTimestampUnavailable(errors.BzrError):
50
_fmt = "The filestamp for %(path)s is not available."
54
def __init__(self, path):
58
class MissingNestedTree(errors.BzrError):
60
_fmt = "The nested tree for %(path)s can not be resolved."""
62
def __init__(self, path):
66
class TreeEntry(object):
67
"""An entry that implements the minimum interface used by commands.
72
def __eq__(self, other):
73
# yes, this is ugly, TODO: best practice __eq__ style.
74
return (isinstance(other, TreeEntry)
75
and other.__class__ == self.__class__)
79
def kind_character(self):
82
def is_unmodified(self, other):
83
"""Does this entry reference the same entry?
85
This is mostly the same as __eq__, but returns False
86
for entries without enough information (i.e. revision is None)
91
class TreeDirectory(TreeEntry):
92
"""See TreeEntry. This is a directory in a working tree."""
98
def kind_character(self):
102
class TreeFile(TreeEntry):
103
"""See TreeEntry. This is a regular file in a working tree."""
109
def kind_character(self):
113
class TreeLink(TreeEntry):
114
"""See TreeEntry. This is a symlink in a working tree."""
120
def kind_character(self):
124
class TreeReference(TreeEntry):
125
"""See TreeEntry. This is a reference to a nested tree in a working tree."""
129
kind = 'tree-reference'
131
def kind_character(self):
135
class TreeChange(object):
136
"""Describes the changes between the same item in two different trees."""
138
__slots__ = ['file_id', 'path', 'changed_content', 'versioned', 'parent_id',
139
'name', 'kind', 'executable', 'copied']
141
def __init__(self, file_id, path, changed_content, versioned, parent_id,
142
name, kind, executable, copied=False):
143
self.file_id = file_id
145
self.changed_content = changed_content
146
self.versioned = versioned
147
self.parent_id = parent_id
150
self.executable = executable
154
return "%s%r" % (self.__class__.__name__, self._as_tuple())
157
return len(self.__slots__)
160
return (self.file_id, self.path, self.changed_content, self.versioned,
161
self.parent_id, self.name, self.kind, self.executable, self.copied)
163
def __eq__(self, other):
164
if isinstance(other, TreeChange):
165
return self._as_tuple() == other._as_tuple()
166
if isinstance(other, tuple):
167
return self._as_tuple() == other
170
def __lt__(self, other):
171
return self._as_tuple() < other._as_tuple()
173
def meta_modified(self):
174
if self.versioned == (True, True):
175
return (self.executable[0] != self.executable[1])
178
def is_reparented(self):
179
return self.parent_id[0] != self.parent_id[1]
181
def discard_new(self):
182
return self.__class__(
183
self.file_id, (self.path[0], None), self.changed_content,
184
(self.versioned[0], None), (self.parent_id[0], None),
185
(self.name[0], None), (self.kind[0], None),
186
(self.executable[0], None),
190
51
class Tree(object):
439
269
raise NotImplementedError(self._comparison_data)
441
def get_file(self, path):
442
"""Return a file object for the file path in the tree.
271
def _file_size(self, entry, stat_value):
272
raise NotImplementedError(self._file_size)
274
def get_file(self, file_id, path=None):
275
"""Return a file object for the file file_id in the tree.
277
If both file_id and path are defined, it is implementation defined as
278
to which one is used.
444
280
raise NotImplementedError(self.get_file)
446
def get_file_with_stat(self, path):
447
"""Get a file handle and stat object for path.
282
def get_file_with_stat(self, file_id, path=None):
283
"""Get a file handle and stat object for file_id.
449
285
The default implementation returns (self.get_file, None) for backwards
452
:param path: The path of the file.
288
:param file_id: The file id to read.
289
:param path: The path of the file, if it is known.
453
290
:return: A tuple (file_handle, stat_value_or_None). If the tree has
454
291
no stat facility, or need for a stat cache feedback during commit,
455
292
it may return None for the second element of the tuple.
457
return (self.get_file(path), None)
294
return (self.get_file(file_id, path), None)
459
def get_file_text(self, path):
296
def get_file_text(self, file_id, path=None):
460
297
"""Return the byte content of a file.
299
:param file_id: The file_id of the file.
462
300
:param path: The path of the file.
302
If both file_id and path are supplied, an implementation may use
464
305
:returns: A single byte string for the whole file.
466
with self.get_file(path) as my_file:
307
my_file = self.get_file(file_id, path)
467
309
return my_file.read()
469
def get_file_lines(self, path):
313
def get_file_lines(self, file_id, path=None):
470
314
"""Return the content of a file, as lines.
316
:param file_id: The file_id of the file.
472
317
:param path: The path of the file.
319
If both file_id and path are supplied, an implementation may use
474
return osutils.split_lines(self.get_file_text(path))
322
return osutils.split_lines(self.get_file_text(file_id, path))
476
def get_file_verifier(self, path, stat_value=None):
324
def get_file_verifier(self, file_id, path=None, stat_value=None):
477
325
"""Return a verifier for a file.
479
327
The default implementation returns a sha1.
329
:param file_id: The handle for this file.
481
330
:param path: The path that this file can be found at.
482
331
These must point to the same object.
483
332
:param stat_value: Optional stat value for the object
484
333
:return: Tuple with verifier name and verifier data
486
return ("SHA1", self.get_file_sha1(path, stat_value=stat_value))
335
return ("SHA1", self.get_file_sha1(file_id, path=path,
336
stat_value=stat_value))
488
def get_file_sha1(self, path, stat_value=None):
338
def get_file_sha1(self, file_id, path=None, stat_value=None):
489
339
"""Return the SHA1 file for a file.
491
341
:note: callers should use get_file_verifier instead
492
342
where possible, as the underlying repository implementation may
493
343
have quicker access to a non-sha1 verifier.
345
:param file_id: The handle for this file.
495
346
:param path: The path that this file can be found at.
347
These must point to the same object.
496
348
:param stat_value: Optional stat value for the object
498
350
raise NotImplementedError(self.get_file_sha1)
500
def get_file_mtime(self, path):
352
def get_file_mtime(self, file_id, path=None):
501
353
"""Return the modification time for a file.
355
:param file_id: The handle for this file.
503
356
:param path: The path that this file can be found at.
357
These must point to the same object.
505
359
raise NotImplementedError(self.get_file_mtime)
507
def get_file_size(self, path):
361
def get_file_size(self, file_id):
508
362
"""Return the size of a file in bytes.
510
364
This applies only to regular files. If invoked on directories or
511
365
symlinks, it will return None.
366
:param file_id: The file-id of the file
513
368
raise NotImplementedError(self.get_file_size)
515
def is_executable(self, path):
370
def is_executable(self, file_id, path=None):
516
371
"""Check if a file is executable.
373
:param file_id: The handle for this file.
518
374
:param path: The path that this file can be found at.
375
These must point to the same object.
520
377
raise NotImplementedError(self.is_executable)
535
392
this implementation, it is a tuple containing a single bytestring with
536
393
the complete text of the file.
538
:param desired_files: a list of (path, identifier) pairs
395
:param desired_files: a list of (file_id, identifier) pairs
540
for path, identifier in desired_files:
397
for file_id, identifier in desired_files:
541
398
# We wrap the string in a tuple so that we can return an iterable
542
399
# of bytestrings. (Technically, a bytestring is also an iterable
543
400
# of bytestrings, but iterating through each character is not
545
cur_file = (self.get_file_text(path),)
402
cur_file = (self.get_file_text(file_id),)
546
403
yield identifier, cur_file
548
def get_symlink_target(self, path):
549
"""Get the target for a given path.
405
def get_symlink_target(self, file_id, path=None):
406
"""Get the target for a given file_id.
551
It is assumed that the caller already knows that path is referencing
408
It is assumed that the caller already knows that file_id is referencing
410
:param file_id: Handle for the symlink entry.
553
411
:param path: The path of the file.
412
If both file_id and path are supplied, an implementation may use
554
414
:return: The path the symlink points to.
556
416
raise NotImplementedError(self.get_symlink_target)
558
def annotate_iter(self, path,
418
def get_root_id(self):
419
"""Return the file_id for the root of this tree."""
420
raise NotImplementedError(self.get_root_id)
422
def annotate_iter(self, file_id,
559
423
default_revision=_mod_revision.CURRENT_REVISION):
560
424
"""Return an iterator of revision_id, line tuples.
562
426
For working trees (and mutable trees in general), the special
563
427
revision_id 'current:' will be used for lines that are new in this
564
428
tree, e.g. uncommitted changes.
565
:param path: The file to produce an annotated version from
429
:param file_id: The file to produce an annotated version from
566
430
:param default_revision: For lines that don't match a basis, mark them
567
431
with this revision id. Not all implementations will make use of
570
434
raise NotImplementedError(self.annotate_iter)
436
def _get_plan_merge_data(self, file_id, other, base):
437
from brzlib import versionedfile
438
vf = versionedfile._PlanMergeVersionedFile(file_id)
439
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
440
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
442
last_revision_base = None
444
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
445
return vf, last_revision_a, last_revision_b, last_revision_base
447
def plan_file_merge(self, file_id, other, base=None):
448
"""Generate a merge plan based on annotations.
450
If the file contains uncommitted changes in this tree, they will be
451
attributed to the 'current:' pseudo-revision. If the file contains
452
uncommitted changes in the other tree, they will be assigned to the
453
'other:' pseudo-revision.
455
data = self._get_plan_merge_data(file_id, other, base)
456
vf, last_revision_a, last_revision_b, last_revision_base = data
457
return vf.plan_merge(last_revision_a, last_revision_b,
460
def plan_file_lca_merge(self, file_id, other, base=None):
461
"""Generate a merge plan based lca-newness.
463
If the file contains uncommitted changes in this tree, they will be
464
attributed to the 'current:' pseudo-revision. If the file contains
465
uncommitted changes in the other tree, they will be assigned to the
466
'other:' pseudo-revision.
468
data = self._get_plan_merge_data(file_id, other, base)
469
vf, last_revision_a, last_revision_b, last_revision_base = data
470
return vf.plan_lca_merge(last_revision_a, last_revision_b,
473
def _iter_parent_trees(self):
474
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
475
for revision_id in self.get_parent_ids():
477
yield self.revision_tree(revision_id)
478
except errors.NoSuchRevisionInTree:
479
yield self.repository.revision_tree(revision_id)
481
def _get_file_revision(self, file_id, vf, tree_revision):
482
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
484
if getattr(self, '_repository', None) is None:
485
last_revision = tree_revision
486
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
487
self._iter_parent_trees()]
488
vf.add_lines((file_id, last_revision), parent_keys,
489
self.get_file_lines(file_id))
490
repo = self.branch.repository
493
last_revision = self.get_file_revision(file_id)
494
base_vf = self._repository.texts
495
if base_vf not in vf.fallback_versionedfiles:
496
vf.fallback_versionedfiles.append(base_vf)
499
def _check_retrieved(self, ie, f):
502
fp = osutils.fingerprint_file(f)
505
if ie.text_size is not None:
506
if ie.text_size != fp['size']:
507
raise errors.BzrError(
508
"mismatched size for file %r in %r" %
509
(ie.file_id, self._store),
510
["inventory expects %d bytes" % ie.text_size,
511
"file is actually %d bytes" % fp['size'],
512
"store is probably damaged/corrupt"])
514
if ie.text_sha1 != fp['sha1']:
515
raise errors.BzrError("wrong SHA-1 for file %r in %r" %
516
(ie.file_id, self._store),
517
["inventory expects %s" % ie.text_sha1,
518
"file is actually %s" % fp['sha1'],
519
"store is probably damaged/corrupt"])
572
521
def path2id(self, path):
573
522
"""Return the id for path in this tree."""
574
523
raise NotImplementedError(self.path2id)
576
def is_versioned(self, path):
577
"""Check whether path is versioned.
579
:param path: Path to check
582
return self.path2id(path) is not None
584
def find_related_paths_across_trees(self, paths, trees=[],
585
require_versioned=True):
586
"""Find related paths in tree corresponding to specified filenames in any
589
All matches in all trees will be used, and all children of matched
590
directories will be used.
592
:param paths: The filenames to find related paths for (if None, returns
594
:param trees: The trees to find file_ids within
595
:param require_versioned: if true, all specified filenames must occur in
597
:return: a set of paths for the specified filenames and their children
600
raise NotImplementedError(self.find_related_paths_across_trees)
525
def paths2ids(self, paths, trees=[], require_versioned=True):
526
"""Return all the ids that can be reached by walking from paths.
528
Each path is looked up in this tree and any extras provided in
529
trees, and this is repeated recursively: the children in an extra tree
530
of a directory that has been renamed under a provided path in this tree
531
are all returned, even if none exist under a provided path in this
532
tree, and vice versa.
534
:param paths: An iterable of paths to start converting to ids from.
535
Alternatively, if paths is None, no ids should be calculated and None
536
will be returned. This is offered to make calling the api unconditional
537
for code that *might* take a list of files.
538
:param trees: Additional trees to consider.
539
:param require_versioned: If False, do not raise NotVersionedError if
540
an element of paths is not versioned in this tree and all of trees.
542
return find_ids_across_trees(paths, [self] + list(trees), require_versioned)
544
def iter_children(self, file_id):
545
"""Iterate over the file ids of the children of an entry.
547
:param file_id: File id of the entry
548
:return: Iterator over child file ids.
550
raise NotImplementedError(self.iter_children)
602
552
def lock_read(self):
603
553
"""Lock this tree for multiple read only operations.
605
:return: A breezy.lock.LogicalLockResult.
555
:return: A brzlib.lock.LogicalLockResult.
607
return lock.LogicalLockResult(self.unlock)
609
559
def revision_tree(self, revision_id):
610
560
"""Obtain a revision tree for the revision revision_id.
738
688
searcher = default_searcher
741
def archive(self, format, name, root='', subdir=None,
743
"""Create an archive of this tree.
745
:param format: Format name (e.g. 'tar')
746
:param name: target file name
747
:param root: Root directory name (or None)
748
:param subdir: Subdirectory to export (or None)
749
:return: Iterator over archive chunks
751
from .archive import create_archive
752
with self.lock_read():
753
return create_archive(format, self, name, root,
754
subdir, force_mtime=force_mtime)
757
def versionable_kind(cls, kind):
758
"""Check if this tree support versioning a specific file kind."""
759
return (kind in ('file', 'directory', 'symlink', 'tree-reference'))
692
class InventoryTree(Tree):
693
"""A tree that relies on an inventory for its metadata.
695
Trees contain an `Inventory` object, and also know how to retrieve
696
file texts mentioned in the inventory, either from a working
697
directory or from a store.
699
It is possible for trees to contain files that are not described
700
in their inventory or vice versa; for this use `filenames()`.
702
Subclasses should set the _inventory attribute, which is considered
703
private to external API users.
706
def get_canonical_inventory_paths(self, paths):
707
"""Like get_canonical_inventory_path() but works on multiple items.
709
:param paths: A sequence of paths relative to the root of the tree.
710
:return: A list of paths, with each item the corresponding input path
711
adjusted to account for existing elements that match case
714
return list(self._yield_canonical_inventory_paths(paths))
716
def get_canonical_inventory_path(self, path):
717
"""Returns the first inventory item that case-insensitively matches path.
719
If a path matches exactly, it is returned. If no path matches exactly
720
but more than one path matches case-insensitively, it is implementation
721
defined which is returned.
723
If no path matches case-insensitively, the input path is returned, but
724
with as many path entries that do exist changed to their canonical
727
If you need to resolve many names from the same tree, you should
728
use get_canonical_inventory_paths() to avoid O(N) behaviour.
730
:param path: A paths relative to the root of the tree.
731
:return: The input path adjusted to account for existing elements
732
that match case insensitively.
734
return self._yield_canonical_inventory_paths([path]).next()
736
def _yield_canonical_inventory_paths(self, paths):
738
# First, if the path as specified exists exactly, just use it.
739
if self.path2id(path) is not None:
743
cur_id = self.get_root_id()
745
bit_iter = iter(path.split("/"))
749
for child in self.iter_children(cur_id):
751
# XXX: it seem like if the child is known to be in the
752
# tree, we shouldn't need to go from its id back to
753
# its path -- mbp 2010-02-11
755
# XXX: it seems like we could be more efficient
756
# by just directly looking up the original name and
757
# only then searching all children; also by not
758
# chopping paths so much. -- mbp 2010-02-11
759
child_base = os.path.basename(self.id2path(child))
760
if (child_base == elt):
761
# if we found an exact match, we can stop now; if
762
# we found an approximate match we need to keep
763
# searching because there might be an exact match
766
new_path = osutils.pathjoin(cur_path, child_base)
768
elif child_base.lower() == lelt:
770
new_path = osutils.pathjoin(cur_path, child_base)
771
except errors.NoSuchId:
772
# before a change is committed we can see this error...
777
# got to the end of this directory and no entries matched.
778
# Return what matched so far, plus the rest as specified.
779
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
784
@deprecated_method(deprecated_in((2, 5, 0)))
785
def _get_inventory(self):
786
return self._inventory
788
inventory = property(_get_inventory,
789
doc="Inventory of this Tree")
791
def _get_root_inventory(self):
792
return self._inventory
794
root_inventory = property(_get_root_inventory,
795
doc="Root inventory of this tree")
797
def _unpack_file_id(self, file_id):
798
"""Find the inventory and inventory file id for a tree file id.
800
:param file_id: The tree file id, as bytestring or tuple
801
:return: Inventory and inventory file id
803
if isinstance(file_id, tuple):
804
if len(file_id) != 1:
805
raise ValueError("nested trees not yet supported: %r" % file_id)
807
return self.root_inventory, file_id
810
def path2id(self, path):
811
"""Return the id for path in this tree."""
812
return self._path2inv_file_id(path)[1]
814
def _path2inv_file_id(self, path):
815
"""Lookup a inventory and inventory file id by path.
817
:param path: Path to look up
818
:return: tuple with inventory and inventory file id
820
# FIXME: Support nested trees
821
return self.root_inventory, self.root_inventory.path2id(path)
823
def id2path(self, file_id):
824
"""Return the path for a file id.
828
inventory, file_id = self._unpack_file_id(file_id)
829
return inventory.id2path(file_id)
831
def has_id(self, file_id):
832
inventory, file_id = self._unpack_file_id(file_id)
833
return inventory.has_id(file_id)
835
def has_or_had_id(self, file_id):
836
inventory, file_id = self._unpack_file_id(file_id)
837
return inventory.has_id(file_id)
839
def all_file_ids(self):
841
[entry.file_id for path, entry in self.iter_entries_by_dir()])
843
@deprecated_method(deprecated_in((2, 4, 0)))
845
return iter(self.all_file_ids())
847
def filter_unversioned_files(self, paths):
848
"""Filter out paths that are versioned.
850
:return: set of paths.
852
# NB: we specifically *don't* call self.has_filename, because for
853
# WorkingTrees that can indicate files that exist on disk but that
855
return set((p for p in paths if self.path2id(p) is None))
858
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
859
"""Walk the tree in 'by_dir' order.
861
This will yield each entry in the tree as a (path, entry) tuple.
862
The order that they are yielded is:
864
See Tree.iter_entries_by_dir for details.
866
:param yield_parents: If True, yield the parents from the root leading
867
down to specific_file_ids that have been requested. This has no
868
impact if specific_file_ids is None.
870
if specific_file_ids is None:
871
inventory_file_ids = None
873
inventory_file_ids = []
874
for tree_file_id in specific_file_ids:
875
inventory, inv_file_id = self._unpack_file_id(tree_file_id)
876
if not inventory is self.root_inventory: # for now
877
raise AssertionError("%r != %r" % (
878
inventory, self.root_inventory))
879
inventory_file_ids.append(inv_file_id)
880
# FIXME: Handle nested trees
881
return self.root_inventory.iter_entries_by_dir(
882
specific_file_ids=inventory_file_ids, yield_parents=yield_parents)
885
def iter_child_entries(self, file_id, path=None):
886
inv, inv_file_id = self._unpack_file_id(file_id)
887
return inv[inv_file_id].children.itervalues()
889
@deprecated_method(deprecated_in((2, 5, 0)))
890
def get_file_by_path(self, path):
891
return self.get_file(self.path2id(path), path)
893
def iter_children(self, file_id, path=None):
894
"""See Tree.iter_children."""
895
entry = self.iter_entries_by_dir([file_id]).next()[1]
896
for child in getattr(entry, 'children', {}).itervalues():
900
def find_ids_across_trees(filenames, trees, require_versioned=True):
901
"""Find the ids corresponding to specified filenames.
903
All matches in all trees will be used, and all children of matched
904
directories will be used.
906
:param filenames: The filenames to find file_ids for (if None, returns
908
:param trees: The trees to find file_ids within
909
:param require_versioned: if true, all specified filenames must occur in
911
:return: a set of file ids for the specified filenames and their children.
915
specified_path_ids = _find_ids_across_trees(filenames, trees,
917
return _find_children_across_trees(specified_path_ids, trees)
920
def _find_ids_across_trees(filenames, trees, require_versioned):
921
"""Find the ids corresponding to specified filenames.
923
All matches in all trees will be used, but subdirectories are not scanned.
925
:param filenames: The filenames to find file_ids for
926
:param trees: The trees to find file_ids within
927
:param require_versioned: if true, all specified filenames must occur in
929
:return: a set of file ids for the specified filenames
932
interesting_ids = set()
933
for tree_path in filenames:
936
file_id = tree.path2id(tree_path)
937
if file_id is not None:
938
interesting_ids.add(file_id)
941
not_versioned.append(tree_path)
942
if len(not_versioned) > 0 and require_versioned:
943
raise errors.PathsNotVersionedError(not_versioned)
944
return interesting_ids
947
def _find_children_across_trees(specified_ids, trees):
948
"""Return a set including specified ids and their children.
950
All matches in all trees will be used.
952
:param trees: The trees to find file_ids within
953
:return: a set containing all specified ids and their children
955
interesting_ids = set(specified_ids)
956
pending = interesting_ids
957
# now handle children of interesting ids
958
# we loop so that we handle all children of each id in both trees
959
while len(pending) > 0:
961
for file_id in pending:
963
if not tree.has_or_had_id(file_id):
965
for child_id in tree.iter_children(file_id):
966
if child_id not in interesting_ids:
967
new_pending.add(child_id)
968
interesting_ids.update(new_pending)
969
pending = new_pending
970
return interesting_ids
762
973
class InterTree(InterObject):
948
1169
seen_dirs = set()
949
1170
if want_unversioned:
950
1171
all_unversioned = sorted([(p.split('/'), p) for p in
952
if specific_files is None or
953
osutils.is_inside_any(specific_files, p)])
954
all_unversioned = deque(all_unversioned)
1172
self.target.extras()
1173
if specific_files is None or
1174
osutils.is_inside_any(specific_files, p)])
1175
all_unversioned = collections.deque(all_unversioned)
956
all_unversioned = deque()
1177
all_unversioned = collections.deque()
958
1179
from_entries_by_dir = list(self.source.iter_entries_by_dir(
959
specific_files=source_specific_files))
960
from_data = dict(from_entries_by_dir)
1180
specific_file_ids=specific_file_ids))
1181
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
961
1182
to_entries_by_dir = list(self.target.iter_entries_by_dir(
962
specific_files=target_specific_files))
963
path_equivs = self.find_source_paths([p for p, e in to_entries_by_dir])
1183
specific_file_ids=specific_file_ids))
964
1184
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
966
1186
# the unversioned path lookup only occurs on real trees - where there
967
1187
# can be extras. So the fake_entry is solely used to look up
968
1188
# executable it values when execute is not supported.
969
fake_entry = TreeFile()
1189
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
970
1190
for target_path, target_entry in to_entries_by_dir:
971
1191
while (all_unversioned and
972
all_unversioned[0][0] < target_path.split('/')):
1192
all_unversioned[0][0] < target_path.split('/')):
973
1193
unversioned_path = all_unversioned.popleft()
974
1194
target_kind, target_executable, target_stat = \
975
self.target._comparison_data(
976
fake_entry, unversioned_path[1])
978
None, (None, unversioned_path[1]), True, (False, False),
1195
self.target._comparison_data(fake_entry, unversioned_path[1])
1196
yield (None, (None, unversioned_path[1]), True, (False, False),
980
1198
(None, unversioned_path[0][-1]),
981
1199
(None, target_kind),
982
1200
(None, target_executable))
983
source_path = path_equivs[target_path]
984
if source_path is not None:
985
source_entry = from_data.get(source_path)
988
result, changes = self._changes_from_entries(
989
source_entry, target_entry, source_path=source_path, target_path=target_path)
990
to_paths[result.file_id] = result.path[1]
1201
source_path, source_entry = from_data.get(target_entry.file_id,
1203
result, changes = self._changes_from_entries(source_entry,
1204
target_entry, source_path=source_path, target_path=target_path)
1205
to_paths[result[0]] = result[1][1]
991
1206
entry_count += 1
992
if result.versioned[0]:
993
1208
entry_count += 1
994
1209
if pb is not None:
995
1210
pb.update('comparing files', entry_count, num_entries)
996
1211
if changes or include_unchanged:
997
if specific_files is not None:
998
precise_file_ids.add(result.parent_id[1])
999
changed_file_ids.append(result.file_id)
1212
if specific_file_ids is not None:
1213
new_parent_id = result[4][1]
1214
precise_file_ids.add(new_parent_id)
1215
changed_file_ids.append(result[0])
1001
1217
# Ensure correct behaviour for reparented/added specific files.
1002
1218
if specific_files is not None:
1003
1219
# Record output dirs
1004
if result.kind[1] == 'directory':
1005
seen_dirs.add(result.file_id)
1220
if result[6][1] == 'directory':
1221
seen_dirs.add(result[0])
1006
1222
# Record parents of reparented/added entries.
1007
if not result.versioned[0] or result.is_reparented():
1008
seen_parents.add(result.parent_id[1])
1223
versioned = result[3]
1225
if not versioned[0] or parents[0] != parents[1]:
1226
seen_parents.add(parents[1])
1009
1227
while all_unversioned:
1010
1228
# yield any trailing unversioned paths
1011
1229
unversioned_path = all_unversioned.popleft()
1012
1230
to_kind, to_executable, to_stat = \
1013
1231
self.target._comparison_data(fake_entry, unversioned_path[1])
1015
None, (None, unversioned_path[1]), True, (False, False),
1232
yield (None, (None, unversioned_path[1]), True, (False, False),
1017
1234
(None, unversioned_path[0][-1]),
1018
1235
(None, to_kind),
1110
1338
# Examine file_id
1111
1339
if discarded_changes:
1112
1340
result = discarded_changes.get(file_id)
1116
1344
if result is None:
1118
source_path = self.source.id2path(file_id)
1119
except errors.NoSuchId:
1123
source_entry = self._get_entry(
1124
self.source, source_path)
1126
target_path = self.target.id2path(file_id)
1127
except errors.NoSuchId:
1131
target_entry = self._get_entry(
1132
self.target, target_path)
1345
old_entry = self._get_entry(self.source, file_id)
1346
new_entry = self._get_entry(self.target, file_id)
1133
1347
result, changes = self._changes_from_entries(
1134
source_entry, target_entry, source_path, target_path)
1348
old_entry, new_entry)
1137
1351
# Get this parents parent to examine.
1138
new_parent_id = result.parent_id[1]
1352
new_parent_id = result[4][1]
1139
1353
precise_file_ids.add(new_parent_id)
1141
if (result.kind[0] == 'directory' and
1142
result.kind[1] != 'directory'):
1355
if (result[6][0] == 'directory' and
1356
result[6][1] != 'directory'):
1143
1357
# This stopped being a directory, the old children have
1144
1358
# to be included.
1145
if source_entry is None:
1359
if old_entry is None:
1146
1360
# Reusing a discarded change.
1147
source_entry = self._get_entry(
1148
self.source, result.path[0])
1361
old_entry = self._get_entry(self.source, file_id)
1149
1362
precise_file_ids.update(
1151
for child in self.source.iter_child_entries(result.path[0]))
1152
changed_file_ids.add(result.file_id)
1363
self.source.iter_children(file_id))
1364
changed_file_ids.add(result[0])
1155
def file_content_matches(
1156
self, source_path, target_path,
1157
source_stat=None, target_stat=None):
1368
def file_content_matches(self, source_file_id, target_file_id,
1369
source_path=None, target_path=None, source_stat=None, target_stat=None):
1158
1370
"""Check if two files are the same in the source and target trees.
1160
1372
This only checks that the contents of the files are the same,
1161
1373
it does not touch anything else.
1375
:param source_file_id: File id of the file in the source tree
1376
:param target_file_id: File id of the file in the target tree
1163
1377
:param source_path: Path of the file in the source tree
1164
1378
:param target_path: Path of the file in the target tree
1165
1379
:param source_stat: Optional stat value of the file in the source tree
1166
1380
:param target_stat: Optional stat value of the file in the target tree
1167
1381
:return: Boolean indicating whether the files have the same contents
1169
with self.lock_read():
1170
source_verifier_kind, source_verifier_data = (
1171
self.source.get_file_verifier(source_path, source_stat))
1172
target_verifier_kind, target_verifier_data = (
1173
self.target.get_file_verifier(
1174
target_path, target_stat))
1175
if source_verifier_kind == target_verifier_kind:
1176
return (source_verifier_data == target_verifier_data)
1177
# Fall back to SHA1 for now
1178
if source_verifier_kind != "SHA1":
1179
source_sha1 = self.source.get_file_sha1(
1383
source_verifier_kind, source_verifier_data = self.source.get_file_verifier(
1384
source_file_id, source_path, source_stat)
1385
target_verifier_kind, target_verifier_data = self.target.get_file_verifier(
1386
target_file_id, target_path, target_stat)
1387
if source_verifier_kind == target_verifier_kind:
1388
return (source_verifier_data == target_verifier_data)
1389
# Fall back to SHA1 for now
1390
if source_verifier_kind != "SHA1":
1391
source_sha1 = self.source.get_file_sha1(source_file_id,
1180
1392
source_path, source_stat)
1182
source_sha1 = source_verifier_data
1183
if target_verifier_kind != "SHA1":
1184
target_sha1 = self.target.get_file_sha1(
1394
source_sha1 = source_verifier_data
1395
if target_verifier_kind != "SHA1":
1396
target_sha1 = self.target.get_file_sha1(target_file_id,
1185
1397
target_path, target_stat)
1187
target_sha1 = target_verifier_data
1188
return (source_sha1 == target_sha1)
1190
def find_target_path(self, path, recurse='none'):
1191
"""Find target tree path.
1193
:param path: Path to search for (exists in source)
1194
:return: path in target, or None if there is no equivalent path.
1195
:raise NoSuchFile: If the path doesn't exist in source
1197
file_id = self.source.path2id(path)
1199
raise errors.NoSuchFile(path)
1201
return self.target.id2path(file_id, recurse=recurse)
1202
except errors.NoSuchId:
1205
def find_source_path(self, path, recurse='none'):
1206
"""Find the source tree path.
1208
:param path: Path to search for (exists in target)
1209
:return: path in source, or None if there is no equivalent path.
1210
:raise NoSuchFile: if the path doesn't exist in target
1212
file_id = self.target.path2id(path)
1214
raise errors.NoSuchFile(path)
1216
return self.source.id2path(file_id, recurse=recurse)
1217
except errors.NoSuchId:
1220
def find_target_paths(self, paths, recurse='none'):
1221
"""Find target tree paths.
1223
:param paths: Iterable over paths in target to search for
1224
:return: Dictionary mapping from source paths to paths in target , or
1225
None if there is no equivalent path.
1229
ret[path] = self.find_target_path(path, recurse=recurse)
1232
def find_source_paths(self, paths, recurse='none'):
1233
"""Find source tree paths.
1235
:param paths: Iterable over paths in target to search for
1236
:return: Dictionary mapping from target paths to paths in source, or
1237
None if there is no equivalent path.
1241
ret[path] = self.find_source_path(path, recurse=recurse)
1399
target_sha1 = target_verifier_data
1400
return (source_sha1 == target_sha1)
1245
1402
InterTree.register_optimiser(InterTree)
1248
def find_previous_paths(from_tree, to_tree, paths, recurse='none'):
1249
"""Find previous tree paths.
1251
:param from_tree: From tree
1252
:param to_tree: To tree
1253
:param paths: Iterable over paths in from_tree to search for
1254
:return: Dictionary mapping from from_tree paths to paths in to_tree, or
1255
None if there is no equivalent path.
1257
return InterTree.get(to_tree, from_tree).find_source_paths(paths, recurse=recurse)
1260
def find_previous_path(from_tree, to_tree, path, recurse='none'):
1261
"""Find previous tree path.
1263
:param from_tree: From tree
1264
:param to_tree: To tree
1265
:param path: Path to search for (exists in from_tree)
1266
:return: path in to_tree, or None if there is no equivalent path.
1267
:raise NoSuchFile: If the path doesn't exist in from_tree
1269
return InterTree.get(to_tree, from_tree).find_source_path(
1270
path, recurse=recurse)
1273
def get_canonical_path(tree, path, normalize):
1274
"""Find the canonical path of an item, ignoring case.
1276
:param tree: Tree to traverse
1277
:param path: Case-insensitive path to look up
1278
:param normalize: Function to normalize a filename for comparison
1279
:return: The canonical path
1283
bit_iter = iter(path.split("/"))
1284
for elt in bit_iter:
1285
lelt = normalize(elt)
1288
for child in tree.iter_child_entries(cur_path):
1290
if child.name == elt:
1291
# if we found an exact match, we can stop now; if
1292
# we found an approximate match we need to keep
1293
# searching because there might be an exact match
1295
new_path = osutils.pathjoin(cur_path, child.name)
1297
elif normalize(child.name) == lelt:
1298
new_path = osutils.pathjoin(cur_path, child.name)
1299
except errors.NoSuchId:
1300
# before a change is committed we can see this error...
1302
except errors.NotADirectory:
1307
# got to the end of this directory and no entries matched.
1308
# Return what matched so far, plus the rest as specified.
1309
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
1405
class MultiWalker(object):
1406
"""Walk multiple trees simultaneously, getting combined results."""
1408
# Note: This could be written to not assume you can do out-of-order
1409
# lookups. Instead any nodes that don't match in all trees could be
1410
# marked as 'deferred', and then returned in the final cleanup loop.
1411
# For now, I think it is "nicer" to return things as close to the
1412
# "master_tree" order as we can.
1414
def __init__(self, master_tree, other_trees):
1415
"""Create a new MultiWalker.
1417
All trees being walked must implement "iter_entries_by_dir()", such
1418
that they yield (path, object) tuples, where that object will have a
1419
'.file_id' member, that can be used to check equality.
1421
:param master_tree: All trees will be 'slaved' to the master_tree such
1422
that nodes in master_tree will be used as 'first-pass' sync points.
1423
Any nodes that aren't in master_tree will be merged in a second
1425
:param other_trees: A list of other trees to walk simultaneously.
1427
self._master_tree = master_tree
1428
self._other_trees = other_trees
1430
# Keep track of any nodes that were properly processed just out of
1431
# order, that way we don't return them at the end, we don't have to
1432
# track *all* processed file_ids, just the out-of-order ones
1433
self._out_of_order_processed = set()
1436
def _step_one(iterator):
1437
"""Step an iter_entries_by_dir iterator.
1439
:return: (has_more, path, ie)
1440
If has_more is False, path and ie will be None.
1443
path, ie = iterator.next()
1444
except StopIteration:
1445
return False, None, None
1447
return True, path, ie
1450
def _cmp_path_by_dirblock(path1, path2):
1451
"""Compare two paths based on what directory they are in.
1453
This generates a sort order, such that all children of a directory are
1454
sorted together, and grandchildren are in the same order as the
1455
children appear. But all grandchildren come after all children.
1457
:param path1: first path
1458
:param path2: the second path
1459
:return: negative number if ``path1`` comes first,
1460
0 if paths are equal
1461
and a positive number if ``path2`` sorts first
1463
# Shortcut this special case
1466
# This is stolen from _dirstate_helpers_py.py, only switching it to
1467
# Unicode objects. Consider using encode_utf8() and then using the
1468
# optimized versions, or maybe writing optimized unicode versions.
1469
if not isinstance(path1, unicode):
1470
raise TypeError("'path1' must be a unicode string, not %s: %r"
1471
% (type(path1), path1))
1472
if not isinstance(path2, unicode):
1473
raise TypeError("'path2' must be a unicode string, not %s: %r"
1474
% (type(path2), path2))
1475
return cmp(MultiWalker._path_to_key(path1),
1476
MultiWalker._path_to_key(path2))
1479
def _path_to_key(path):
1480
dirname, basename = osutils.split(path)
1481
return (dirname.split(u'/'), basename)
1483
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1484
"""Lookup an inventory entry by file_id.
1486
This is called when an entry is missing in the normal order.
1487
Generally this is because a file was either renamed, or it was
1488
deleted/added. If the entry was found in the inventory and not in
1489
extra_entries, it will be added to self._out_of_order_processed
1491
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1492
should be filled with entries that were found before they were
1493
used. If file_id is present, it will be removed from the
1495
:param other_tree: The Tree to search, in case we didn't find the entry
1497
:param file_id: The file_id to look for
1498
:return: (path, ie) if found or (None, None) if not present.
1500
if file_id in extra_entries:
1501
return extra_entries.pop(file_id)
1502
# TODO: Is id2path better as the first call, or is
1503
# inventory[file_id] better as a first check?
1505
cur_path = other_tree.id2path(file_id)
1506
except errors.NoSuchId:
1508
if cur_path is None:
1511
self._out_of_order_processed.add(file_id)
1512
cur_ie = other_tree.root_inventory[file_id]
1513
return (cur_path, cur_ie)
1516
"""Match up the values in the different trees."""
1517
for result in self._walk_master_tree():
1519
self._finish_others()
1520
for result in self._walk_others():
1523
def _walk_master_tree(self):
1524
"""First pass, walk all trees in lock-step.
1526
When we are done, all nodes in the master_tree will have been
1527
processed. _other_walkers, _other_entries, and _others_extra will be
1528
set on 'self' for future processing.
1530
# This iterator has the most "inlining" done, because it tends to touch
1531
# every file in the tree, while the others only hit nodes that don't
1533
master_iterator = self._master_tree.iter_entries_by_dir()
1535
other_walkers = [other.iter_entries_by_dir()
1536
for other in self._other_trees]
1537
other_entries = [self._step_one(walker) for walker in other_walkers]
1538
# Track extra nodes in the other trees
1539
others_extra = [{} for i in xrange(len(self._other_trees))]
1541
master_has_more = True
1542
step_one = self._step_one
1543
lookup_by_file_id = self._lookup_by_file_id
1544
out_of_order_processed = self._out_of_order_processed
1546
while master_has_more:
1547
(master_has_more, path, master_ie) = step_one(master_iterator)
1548
if not master_has_more:
1551
file_id = master_ie.file_id
1553
other_values_append = other_values.append
1554
next_other_entries = []
1555
next_other_entries_append = next_other_entries.append
1556
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1557
if not other_has_more:
1558
other_values_append(lookup_by_file_id(
1559
others_extra[idx], self._other_trees[idx], file_id))
1560
next_other_entries_append((False, None, None))
1561
elif file_id == other_ie.file_id:
1562
# This is the critical code path, as most of the entries
1563
# should match between most trees.
1564
other_values_append((other_path, other_ie))
1565
next_other_entries_append(step_one(other_walkers[idx]))
1567
# This walker did not match, step it until it either
1568
# matches, or we know we are past the current walker.
1569
other_walker = other_walkers[idx]
1570
other_extra = others_extra[idx]
1571
while (other_has_more and
1572
self._cmp_path_by_dirblock(other_path, path) < 0):
1573
other_file_id = other_ie.file_id
1574
if other_file_id not in out_of_order_processed:
1575
other_extra[other_file_id] = (other_path, other_ie)
1576
other_has_more, other_path, other_ie = \
1577
step_one(other_walker)
1578
if other_has_more and other_ie.file_id == file_id:
1579
# We ended up walking to this point, match and step
1581
other_values_append((other_path, other_ie))
1582
other_has_more, other_path, other_ie = \
1583
step_one(other_walker)
1585
# This record isn't in the normal order, see if it
1587
other_values_append(lookup_by_file_id(
1588
other_extra, self._other_trees[idx], file_id))
1589
next_other_entries_append((other_has_more, other_path,
1591
other_entries = next_other_entries
1593
# We've matched all the walkers, yield this datapoint
1594
yield path, file_id, master_ie, other_values
1595
self._other_walkers = other_walkers
1596
self._other_entries = other_entries
1597
self._others_extra = others_extra
1599
def _finish_others(self):
1600
"""Finish walking the other iterators, so we get all entries."""
1601
for idx, info in enumerate(self._other_entries):
1602
other_extra = self._others_extra[idx]
1603
(other_has_more, other_path, other_ie) = info
1604
while other_has_more:
1605
other_file_id = other_ie.file_id
1606
if other_file_id not in self._out_of_order_processed:
1607
other_extra[other_file_id] = (other_path, other_ie)
1608
other_has_more, other_path, other_ie = \
1609
self._step_one(self._other_walkers[idx])
1610
del self._other_entries
1612
def _walk_others(self):
1613
"""Finish up by walking all the 'deferred' nodes."""
1614
# TODO: One alternative would be to grab all possible unprocessed
1615
# file_ids, and then sort by path, and then yield them. That
1616
# might ensure better ordering, in case a caller strictly
1617
# requires parents before children.
1618
for idx, other_extra in enumerate(self._others_extra):
1619
others = sorted(other_extra.itervalues(),
1620
key=lambda x: self._path_to_key(x[0]))
1621
for other_path, other_ie in others:
1622
file_id = other_ie.file_id
1623
# We don't need to check out_of_order_processed here, because
1624
# the lookup_by_file_id will be removing anything processed
1625
# from the extras cache
1626
other_extra.pop(file_id)
1627
other_values = [(None, None) for i in xrange(idx)]
1628
other_values.append((other_path, other_ie))
1629
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1630
alt_idx = alt_idx + idx + 1
1631
alt_extra = self._others_extra[alt_idx]
1632
alt_tree = self._other_trees[alt_idx]
1633
other_values.append(self._lookup_by_file_id(
1634
alt_extra, alt_tree, file_id))
1635
yield other_path, file_id, None, other_values