17
17
"""Tree classes, representing directory at point in time.
20
from __future__ import absolute_import
23
from collections.abc import deque
24
except ImportError: # python < 3.7
25
from collections import deque
27
from .lazy_import import lazy_import
28
lazy_import(globals(), """
21
from collections import deque
31
25
conflicts as _mod_conflicts,
35
30
revision as _mod_revision,
39
from breezy.i18n import gettext
47
from .inter import InterObject
54
class FileTimestampUnavailable(errors.BzrError):
56
_fmt = "The filestamp for %(path)s is not available."
60
def __init__(self, path):
64
class MissingNestedTree(errors.BzrError):
66
_fmt = "The nested tree for %(path)s can not be resolved."""
68
def __init__(self, path):
72
class TreeEntry(object):
73
"""An entry that implements the minimum interface used by commands.
78
def __eq__(self, other):
79
# yes, this is ugly, TODO: best practice __eq__ style.
80
return (isinstance(other, TreeEntry)
81
and other.__class__ == self.__class__)
85
def kind_character(self):
88
def is_unmodified(self, other):
89
"""Does this entry reference the same entry?
91
This is mostly the same as __eq__, but returns False
92
for entries without enough information (i.e. revision is None)
97
class TreeDirectory(TreeEntry):
98
"""See TreeEntry. This is a directory in a working tree."""
104
def kind_character(self):
108
class TreeFile(TreeEntry):
109
"""See TreeEntry. This is a regular file in a working tree."""
115
def kind_character(self):
119
class TreeLink(TreeEntry):
120
"""See TreeEntry. This is a symlink in a working tree."""
126
def kind_character(self):
130
class TreeReference(TreeEntry):
131
"""See TreeEntry. This is a reference to a nested tree in a working tree."""
135
kind = 'tree-reference'
137
def kind_character(self):
141
class TreeChange(object):
142
"""Describes the changes between the same item in two different trees."""
144
__slots__ = ['file_id', 'path', 'changed_content', 'versioned', 'parent_id',
145
'name', 'kind', 'executable', 'copied']
147
def __init__(self, file_id, path, changed_content, versioned, parent_id,
148
name, kind, executable, copied=False):
149
self.file_id = file_id
151
self.changed_content = changed_content
152
self.versioned = versioned
153
self.parent_id = parent_id
156
self.executable = executable
160
return "%s%r" % (self.__class__.__name__, self._as_tuple())
163
return len(self.__slots__)
166
return (self.file_id, self.path, self.changed_content, self.versioned,
167
self.parent_id, self.name, self.kind, self.executable, self.copied)
169
def __eq__(self, other):
170
if isinstance(other, TreeChange):
171
return self._as_tuple() == other._as_tuple()
172
if isinstance(other, tuple):
173
return self._as_tuple() == other
176
def __lt__(self, other):
177
return self._as_tuple() < other._as_tuple()
179
def meta_modified(self):
180
if self.versioned == (True, True):
181
return (self.executable[0] != self.executable[1])
184
def is_reparented(self):
185
return self.parent_id[0] != self.parent_id[1]
187
def discard_new(self):
188
return self.__class__(
189
self.file_id, (self.path[0], None), self.changed_content,
190
(self.versioned[0], None), (self.parent_id[0], None),
191
(self.name[0], None), (self.kind[0], None),
192
(self.executable[0], None),
33
from bzrlib.decorators import needs_read_lock
34
from bzrlib.errors import BzrError, NoSuchId
35
from bzrlib import errors
36
from bzrlib.inventory import InventoryFile
37
from bzrlib.inter import InterObject
38
from bzrlib.osutils import fingerprint_file
39
from bzrlib.symbol_versioning import deprecated_function, deprecated_in
40
from bzrlib.trace import note
196
43
class Tree(object):
341
194
The yield order (ignoring root) would be::
343
195
a, f, a/b, a/d, a/b/c, a/d/e, f/g
345
If recurse_nested is enabled then nested trees are included as if
346
they were a part of the tree. If is disabled then TreeReference
347
objects (without any children) are yielded.
349
raise NotImplementedError(self.iter_entries_by_dir)
351
def iter_child_entries(self, path):
352
"""Iterate over the children of a directory or tree reference.
354
:param path: Path of the directory
355
:raise NoSuchFile: When the path does not exist
356
:return: Iterator over entries in the directory
358
raise NotImplementedError(self.iter_child_entries)
360
def list_files(self, include_root=False, from_dir=None, recursive=True,
361
recurse_nested=False):
362
"""List all files in this tree.
364
:param include_root: Whether to include the entry for the tree root
365
:param from_dir: Directory under which to list files
366
:param recursive: Whether to list files recursively
367
:param recurse_nested: enter nested trees
368
:return: iterator over tuples of
369
(path, versioned, kind, inventory entry)
371
raise NotImplementedError(self.list_files)
197
:param yield_parents: If True, yield the parents from the root leading
198
down to specific_file_ids that have been requested. This has no
199
impact if specific_file_ids is None.
201
return self.inventory.iter_entries_by_dir(
202
specific_file_ids=specific_file_ids, yield_parents=yield_parents)
373
204
def iter_references(self):
374
205
if self.supports_tree_reference():
375
206
for path, entry in self.iter_entries_by_dir():
376
207
if entry.kind == 'tree-reference':
379
def get_containing_nested_tree(self, path):
380
"""Find the nested tree that contains a path.
382
:return: tuple with (nested tree and path inside the nested tree)
384
for nested_path in self.iter_references():
386
if path.startswith(nested_path):
387
nested_tree = self.get_nested_tree(nested_path)
388
return nested_tree, path[len(nested_path):]
392
def get_nested_tree(self, path):
393
"""Open the nested tree at the specified path.
395
:param path: Path from which to resolve tree reference.
396
:return: A Tree object for the nested tree
397
:raise MissingNestedTree: If the nested tree can not be resolved
399
raise NotImplementedError(self.get_nested_tree)
401
def kind(self, path):
208
yield path, entry.file_id
210
def kind(self, file_id):
402
211
raise NotImplementedError("Tree subclass %s must implement kind"
403
% self.__class__.__name__)
212
% self.__class__.__name__)
405
def stored_kind(self, path):
406
"""File kind stored for this path.
214
def stored_kind(self, file_id):
215
"""File kind stored for this file_id.
408
217
May not match kind on disk for working trees. Always available
409
218
for versioned files, even when the file itself is missing.
411
return self.kind(path)
220
return self.kind(file_id)
413
222
def path_content_summary(self, path):
414
223
"""Get a summary of the information about path.
445
254
raise NotImplementedError(self._comparison_data)
447
def get_file(self, path):
448
"""Return a file object for the file path in the tree.
256
def _file_size(self, entry, stat_value):
257
raise NotImplementedError(self._file_size)
259
def _get_inventory(self):
260
return self._inventory
262
def get_file(self, file_id, path=None):
263
"""Return a file object for the file file_id in the tree.
265
If both file_id and path are defined, it is implementation defined as
266
to which one is used.
450
268
raise NotImplementedError(self.get_file)
452
def get_file_with_stat(self, path):
453
"""Get a file handle and stat object for path.
270
def get_file_with_stat(self, file_id, path=None):
271
"""Get a file handle and stat object for file_id.
455
273
The default implementation returns (self.get_file, None) for backwards
458
:param path: The path of the file.
276
:param file_id: The file id to read.
277
:param path: The path of the file, if it is known.
459
278
:return: A tuple (file_handle, stat_value_or_None). If the tree has
460
279
no stat facility, or need for a stat cache feedback during commit,
461
280
it may return None for the second element of the tuple.
463
return (self.get_file(path), None)
282
return (self.get_file(file_id, path), None)
465
def get_file_text(self, path):
284
def get_file_text(self, file_id, path=None):
466
285
"""Return the byte content of a file.
287
:param file_id: The file_id of the file.
468
288
:param path: The path of the file.
470
:returns: A single byte string for the whole file.
289
If both file_id and path are supplied, an implementation may use
472
with self.get_file(path) as my_file:
292
my_file = self.get_file(file_id, path)
473
294
return my_file.read()
475
def get_file_lines(self, path):
298
def get_file_lines(self, file_id, path=None):
476
299
"""Return the content of a file, as lines.
301
:param file_id: The file_id of the file.
478
302
:param path: The path of the file.
303
If both file_id and path are supplied, an implementation may use
480
return osutils.split_lines(self.get_file_text(path))
482
def get_file_verifier(self, path, stat_value=None):
483
"""Return a verifier for a file.
485
The default implementation returns a sha1.
306
return osutils.split_lines(self.get_file_text(file_id, path))
308
def get_file_mtime(self, file_id, path=None):
309
"""Return the modification time for a file.
311
:param file_id: The handle for this file.
487
312
:param path: The path that this file can be found at.
488
313
These must point to the same object.
489
:param stat_value: Optional stat value for the object
490
:return: Tuple with verifier name and verifier data
492
return ("SHA1", self.get_file_sha1(path, stat_value=stat_value))
494
def get_file_sha1(self, path, stat_value=None):
495
"""Return the SHA1 file for a file.
497
:note: callers should use get_file_verifier instead
498
where possible, as the underlying repository implementation may
499
have quicker access to a non-sha1 verifier.
501
:param path: The path that this file can be found at.
502
:param stat_value: Optional stat value for the object
504
raise NotImplementedError(self.get_file_sha1)
506
def get_file_mtime(self, path):
507
"""Return the modification time for a file.
509
:param path: The path that this file can be found at.
511
315
raise NotImplementedError(self.get_file_mtime)
513
def get_file_size(self, path):
317
def get_file_size(self, file_id):
514
318
"""Return the size of a file in bytes.
516
320
This applies only to regular files. If invoked on directories or
517
321
symlinks, it will return None.
322
:param file_id: The file-id of the file
519
324
raise NotImplementedError(self.get_file_size)
521
def is_executable(self, path):
522
"""Check if a file is executable.
524
:param path: The path that this file can be found at.
526
raise NotImplementedError(self.is_executable)
326
def get_file_by_path(self, path):
327
return self.get_file(self._inventory.path2id(path), path)
528
329
def iter_files_bytes(self, desired_files):
529
330
"""Iterate through file contents.
541
342
this implementation, it is a tuple containing a single bytestring with
542
343
the complete text of the file.
544
:param desired_files: a list of (path, identifier) pairs
345
:param desired_files: a list of (file_id, identifier) pairs
546
for path, identifier in desired_files:
347
for file_id, identifier in desired_files:
547
348
# We wrap the string in a tuple so that we can return an iterable
548
349
# of bytestrings. (Technically, a bytestring is also an iterable
549
350
# of bytestrings, but iterating through each character is not
551
cur_file = (self.get_file_text(path),)
352
cur_file = (self.get_file_text(file_id),)
552
353
yield identifier, cur_file
554
def get_symlink_target(self, path):
555
"""Get the target for a given path.
355
def get_symlink_target(self, file_id):
356
"""Get the target for a given file_id.
557
It is assumed that the caller already knows that path is referencing
358
It is assumed that the caller already knows that file_id is referencing
559
:param path: The path of the file.
360
:param file_id: Handle for the symlink entry.
560
361
:return: The path the symlink points to.
562
363
raise NotImplementedError(self.get_symlink_target)
564
def annotate_iter(self, path,
365
def get_canonical_inventory_paths(self, paths):
366
"""Like get_canonical_inventory_path() but works on multiple items.
368
:param paths: A sequence of paths relative to the root of the tree.
369
:return: A list of paths, with each item the corresponding input path
370
adjusted to account for existing elements that match case
373
return list(self._yield_canonical_inventory_paths(paths))
375
def get_canonical_inventory_path(self, path):
376
"""Returns the first inventory item that case-insensitively matches path.
378
If a path matches exactly, it is returned. If no path matches exactly
379
but more than one path matches case-insensitively, it is implementation
380
defined which is returned.
382
If no path matches case-insensitively, the input path is returned, but
383
with as many path entries that do exist changed to their canonical
386
If you need to resolve many names from the same tree, you should
387
use get_canonical_inventory_paths() to avoid O(N) behaviour.
389
:param path: A paths relative to the root of the tree.
390
:return: The input path adjusted to account for existing elements
391
that match case insensitively.
393
return self._yield_canonical_inventory_paths([path]).next()
395
def _yield_canonical_inventory_paths(self, paths):
397
# First, if the path as specified exists exactly, just use it.
398
if self.path2id(path) is not None:
402
cur_id = self.get_root_id()
404
bit_iter = iter(path.split("/"))
408
for child in self.iter_children(cur_id):
410
# XXX: it seem like if the child is known to be in the
411
# tree, we shouldn't need to go from its id back to
412
# its path -- mbp 2010-02-11
414
# XXX: it seems like we could be more efficient
415
# by just directly looking up the original name and
416
# only then searching all children; also by not
417
# chopping paths so much. -- mbp 2010-02-11
418
child_base = os.path.basename(self.id2path(child))
419
if (child_base == elt):
420
# if we found an exact match, we can stop now; if
421
# we found an approximate match we need to keep
422
# searching because there might be an exact match
425
new_path = osutils.pathjoin(cur_path, child_base)
427
elif child_base.lower() == lelt:
429
new_path = osutils.pathjoin(cur_path, child_base)
431
# before a change is committed we can see this error...
436
# got to the end of this directory and no entries matched.
437
# Return what matched so far, plus the rest as specified.
438
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
443
def get_root_id(self):
444
"""Return the file_id for the root of this tree."""
445
raise NotImplementedError(self.get_root_id)
447
def annotate_iter(self, file_id,
565
448
default_revision=_mod_revision.CURRENT_REVISION):
566
449
"""Return an iterator of revision_id, line tuples.
568
451
For working trees (and mutable trees in general), the special
569
452
revision_id 'current:' will be used for lines that are new in this
570
453
tree, e.g. uncommitted changes.
571
:param path: The file to produce an annotated version from
454
:param file_id: The file to produce an annotated version from
572
455
:param default_revision: For lines that don't match a basis, mark them
573
456
with this revision id. Not all implementations will make use of
576
459
raise NotImplementedError(self.annotate_iter)
461
def _get_plan_merge_data(self, file_id, other, base):
462
from bzrlib import versionedfile
463
vf = versionedfile._PlanMergeVersionedFile(file_id)
464
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
465
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
467
last_revision_base = None
469
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
470
return vf, last_revision_a, last_revision_b, last_revision_base
472
def plan_file_merge(self, file_id, other, base=None):
473
"""Generate a merge plan based on annotations.
475
If the file contains uncommitted changes in this tree, they will be
476
attributed to the 'current:' pseudo-revision. If the file contains
477
uncommitted changes in the other tree, they will be assigned to the
478
'other:' pseudo-revision.
480
data = self._get_plan_merge_data(file_id, other, base)
481
vf, last_revision_a, last_revision_b, last_revision_base = data
482
return vf.plan_merge(last_revision_a, last_revision_b,
485
def plan_file_lca_merge(self, file_id, other, base=None):
486
"""Generate a merge plan based lca-newness.
488
If the file contains uncommitted changes in this tree, they will be
489
attributed to the 'current:' pseudo-revision. If the file contains
490
uncommitted changes in the other tree, they will be assigned to the
491
'other:' pseudo-revision.
493
data = self._get_plan_merge_data(file_id, other, base)
494
vf, last_revision_a, last_revision_b, last_revision_base = data
495
return vf.plan_lca_merge(last_revision_a, last_revision_b,
498
def _iter_parent_trees(self):
499
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
500
for revision_id in self.get_parent_ids():
502
yield self.revision_tree(revision_id)
503
except errors.NoSuchRevisionInTree:
504
yield self.repository.revision_tree(revision_id)
507
def _file_revision(revision_tree, file_id):
508
"""Determine the revision associated with a file in a given tree."""
509
revision_tree.lock_read()
511
return revision_tree.inventory[file_id].revision
513
revision_tree.unlock()
515
def _get_file_revision(self, file_id, vf, tree_revision):
516
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
518
if getattr(self, '_repository', None) is None:
519
last_revision = tree_revision
520
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
521
self._iter_parent_trees()]
522
vf.add_lines((file_id, last_revision), parent_keys,
523
self.get_file(file_id).readlines())
524
repo = self.branch.repository
527
last_revision = self._file_revision(self, file_id)
528
base_vf = self._repository.texts
529
if base_vf not in vf.fallback_versionedfiles:
530
vf.fallback_versionedfiles.append(base_vf)
533
inventory = property(_get_inventory,
534
doc="Inventory of this Tree")
536
def _check_retrieved(self, ie, f):
539
fp = fingerprint_file(f)
542
if ie.text_size is not None:
543
if ie.text_size != fp['size']:
544
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
545
["inventory expects %d bytes" % ie.text_size,
546
"file is actually %d bytes" % fp['size'],
547
"store is probably damaged/corrupt"])
549
if ie.text_sha1 != fp['sha1']:
550
raise BzrError("wrong SHA-1 for file %r in %r" % (ie.file_id, self._store),
551
["inventory expects %s" % ie.text_sha1,
552
"file is actually %s" % fp['sha1'],
553
"store is probably damaged/corrupt"])
578
556
def path2id(self, path):
579
557
"""Return the id for path in this tree."""
580
raise NotImplementedError(self.path2id)
582
def is_versioned(self, path):
583
"""Check whether path is versioned.
585
:param path: Path to check
588
return self.path2id(path) is not None
590
def find_related_paths_across_trees(self, paths, trees=[],
591
require_versioned=True):
592
"""Find related paths in tree corresponding to specified filenames in any
595
All matches in all trees will be used, and all children of matched
596
directories will be used.
598
:param paths: The filenames to find related paths for (if None, returns
600
:param trees: The trees to find file_ids within
601
:param require_versioned: if true, all specified filenames must occur in
603
:return: a set of paths for the specified filenames and their children
606
raise NotImplementedError(self.find_related_paths_across_trees)
558
return self._inventory.path2id(path)
560
def paths2ids(self, paths, trees=[], require_versioned=True):
561
"""Return all the ids that can be reached by walking from paths.
563
Each path is looked up in this tree and any extras provided in
564
trees, and this is repeated recursively: the children in an extra tree
565
of a directory that has been renamed under a provided path in this tree
566
are all returned, even if none exist under a provided path in this
567
tree, and vice versa.
569
:param paths: An iterable of paths to start converting to ids from.
570
Alternatively, if paths is None, no ids should be calculated and None
571
will be returned. This is offered to make calling the api unconditional
572
for code that *might* take a list of files.
573
:param trees: Additional trees to consider.
574
:param require_versioned: If False, do not raise NotVersionedError if
575
an element of paths is not versioned in this tree and all of trees.
577
return find_ids_across_trees(paths, [self] + list(trees), require_versioned)
579
def iter_children(self, file_id):
580
entry = self.iter_entries_by_dir([file_id]).next()[1]
581
for child in getattr(entry, 'children', {}).itervalues():
608
584
def lock_read(self):
609
"""Lock this tree for multiple read only operations.
611
:return: A breezy.lock.LogicalLockResult.
613
return lock.LogicalLockResult(self.unlock)
615
587
def revision_tree(self, revision_id):
616
588
"""Obtain a revision tree for the revision revision_id.
744
720
searcher = default_searcher
747
def archive(self, format, name, root='', subdir=None,
749
"""Create an archive of this tree.
751
:param format: Format name (e.g. 'tar')
752
:param name: target file name
753
:param root: Root directory name (or None)
754
:param subdir: Subdirectory to export (or None)
755
:return: Iterator over archive chunks
757
from .archive import create_archive
758
with self.lock_read():
759
return create_archive(format, self, name, root,
760
subdir, force_mtime=force_mtime)
763
def versionable_kind(cls, kind):
764
"""Check if this tree support versioning a specific file kind."""
765
return (kind in ('file', 'directory', 'symlink', 'tree-reference'))
724
######################################################################
727
# TODO: Merge these two functions into a single one that can operate
728
# on either a whole tree or a set of files.
730
# TODO: Return the diff in order by filename, not by category or in
731
# random order. Can probably be done by lock-stepping through the
732
# filenames from both trees.
735
def file_status(filename, old_tree, new_tree):
736
"""Return single-letter status, old and new names for a file.
738
The complexity here is in deciding how to represent renames;
739
many complex cases are possible.
741
old_inv = old_tree.inventory
742
new_inv = new_tree.inventory
743
new_id = new_inv.path2id(filename)
744
old_id = old_inv.path2id(filename)
746
if not new_id and not old_id:
747
# easy: doesn't exist in either; not versioned at all
748
if new_tree.is_ignored(filename):
749
return 'I', None, None
751
return '?', None, None
753
# There is now a file of this name, great.
756
# There is no longer a file of this name, but we can describe
757
# what happened to the file that used to have
758
# this name. There are two possibilities: either it was
759
# deleted entirely, or renamed.
760
if new_inv.has_id(old_id):
761
return 'X', old_inv.id2path(old_id), new_inv.id2path(old_id)
763
return 'D', old_inv.id2path(old_id), None
765
# if the file_id is new in this revision, it is added
766
if new_id and not old_inv.has_id(new_id):
769
# if there used to be a file of this name, but that ID has now
770
# disappeared, it is deleted
771
if old_id and not new_inv.has_id(old_id):
777
@deprecated_function(deprecated_in((1, 9, 0)))
778
def find_renames(old_inv, new_inv):
779
for file_id in old_inv:
780
if file_id not in new_inv:
782
old_name = old_inv.id2path(file_id)
783
new_name = new_inv.id2path(file_id)
784
if old_name != new_name:
785
yield (old_name, new_name)
788
def find_ids_across_trees(filenames, trees, require_versioned=True):
789
"""Find the ids corresponding to specified filenames.
791
All matches in all trees will be used, and all children of matched
792
directories will be used.
794
:param filenames: The filenames to find file_ids for (if None, returns
796
:param trees: The trees to find file_ids within
797
:param require_versioned: if true, all specified filenames must occur in
799
:return: a set of file ids for the specified filenames and their children.
803
specified_path_ids = _find_ids_across_trees(filenames, trees,
805
return _find_children_across_trees(specified_path_ids, trees)
808
def _find_ids_across_trees(filenames, trees, require_versioned):
809
"""Find the ids corresponding to specified filenames.
811
All matches in all trees will be used, but subdirectories are not scanned.
813
:param filenames: The filenames to find file_ids for
814
:param trees: The trees to find file_ids within
815
:param require_versioned: if true, all specified filenames must occur in
817
:return: a set of file ids for the specified filenames
820
interesting_ids = set()
821
for tree_path in filenames:
824
file_id = tree.path2id(tree_path)
825
if file_id is not None:
826
interesting_ids.add(file_id)
829
not_versioned.append(tree_path)
830
if len(not_versioned) > 0 and require_versioned:
831
raise errors.PathsNotVersionedError(not_versioned)
832
return interesting_ids
835
def _find_children_across_trees(specified_ids, trees):
836
"""Return a set including specified ids and their children.
838
All matches in all trees will be used.
840
:param trees: The trees to find file_ids within
841
:return: a set containing all specified ids and their children
843
interesting_ids = set(specified_ids)
844
pending = interesting_ids
845
# now handle children of interesting ids
846
# we loop so that we handle all children of each id in both trees
847
while len(pending) > 0:
849
for file_id in pending:
851
if not tree.has_or_had_id(file_id):
853
for child_id in tree.iter_children(file_id):
854
if child_id not in interesting_ids:
855
new_pending.add(child_id)
856
interesting_ids.update(new_pending)
857
pending = new_pending
858
return interesting_ids
768
861
class InterTree(InterObject):
954
1055
seen_dirs = set()
955
1056
if want_unversioned:
956
1057
all_unversioned = sorted([(p.split('/'), p) for p in
958
if specific_files is None or
959
osutils.is_inside_any(specific_files, p)])
1058
self.target.extras()
1059
if specific_files is None or
1060
osutils.is_inside_any(specific_files, p)])
960
1061
all_unversioned = deque(all_unversioned)
962
1063
all_unversioned = deque()
964
1065
from_entries_by_dir = list(self.source.iter_entries_by_dir(
965
specific_files=source_specific_files))
966
from_data = dict(from_entries_by_dir)
1066
specific_file_ids=specific_file_ids))
1067
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
967
1068
to_entries_by_dir = list(self.target.iter_entries_by_dir(
968
specific_files=target_specific_files))
969
path_equivs = self.find_source_paths([p for p, e in to_entries_by_dir])
1069
specific_file_ids=specific_file_ids))
970
1070
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
972
1072
# the unversioned path lookup only occurs on real trees - where there
973
1073
# can be extras. So the fake_entry is solely used to look up
974
1074
# executable it values when execute is not supported.
975
fake_entry = TreeFile()
1075
fake_entry = InventoryFile('unused', 'unused', 'unused')
976
1076
for target_path, target_entry in to_entries_by_dir:
977
1077
while (all_unversioned and
978
all_unversioned[0][0] < target_path.split('/')):
1078
all_unversioned[0][0] < target_path.split('/')):
979
1079
unversioned_path = all_unversioned.popleft()
980
1080
target_kind, target_executable, target_stat = \
981
self.target._comparison_data(
982
fake_entry, unversioned_path[1])
984
None, (None, unversioned_path[1]), True, (False, False),
1081
self.target._comparison_data(fake_entry, unversioned_path[1])
1082
yield (None, (None, unversioned_path[1]), True, (False, False),
986
1084
(None, unversioned_path[0][-1]),
987
1085
(None, target_kind),
988
1086
(None, target_executable))
989
source_path = path_equivs[target_path]
990
if source_path is not None:
991
source_entry = from_data.get(source_path)
994
result, changes = self._changes_from_entries(
995
source_entry, target_entry, source_path=source_path, target_path=target_path)
996
to_paths[result.file_id] = result.path[1]
1087
source_path, source_entry = from_data.get(target_entry.file_id,
1089
result, changes = self._changes_from_entries(source_entry,
1090
target_entry, source_path=source_path, target_path=target_path)
1091
to_paths[result[0]] = result[1][1]
997
1092
entry_count += 1
998
if result.versioned[0]:
999
1094
entry_count += 1
1000
1095
if pb is not None:
1001
1096
pb.update('comparing files', entry_count, num_entries)
1002
1097
if changes or include_unchanged:
1003
if specific_files is not None:
1004
precise_file_ids.add(result.parent_id[1])
1005
changed_file_ids.append(result.file_id)
1098
if specific_file_ids is not None:
1099
new_parent_id = result[4][1]
1100
precise_file_ids.add(new_parent_id)
1101
changed_file_ids.append(result[0])
1007
1103
# Ensure correct behaviour for reparented/added specific files.
1008
1104
if specific_files is not None:
1009
1105
# Record output dirs
1010
if result.kind[1] == 'directory':
1011
seen_dirs.add(result.file_id)
1106
if result[6][1] == 'directory':
1107
seen_dirs.add(result[0])
1012
1108
# Record parents of reparented/added entries.
1013
if not result.versioned[0] or result.is_reparented():
1014
seen_parents.add(result.parent_id[1])
1109
versioned = result[3]
1111
if not versioned[0] or parents[0] != parents[1]:
1112
seen_parents.add(parents[1])
1015
1113
while all_unversioned:
1016
1114
# yield any trailing unversioned paths
1017
1115
unversioned_path = all_unversioned.popleft()
1018
1116
to_kind, to_executable, to_stat = \
1019
1117
self.target._comparison_data(fake_entry, unversioned_path[1])
1021
None, (None, unversioned_path[1]), True, (False, False),
1118
yield (None, (None, unversioned_path[1]), True, (False, False),
1023
1120
(None, unversioned_path[0][-1]),
1024
1121
(None, to_kind),
1116
1224
# Examine file_id
1117
1225
if discarded_changes:
1118
1226
result = discarded_changes.get(file_id)
1122
1230
if result is None:
1124
source_path = self.source.id2path(file_id)
1125
except errors.NoSuchId:
1129
source_entry = self._get_entry(
1130
self.source, source_path)
1132
target_path = self.target.id2path(file_id)
1133
except errors.NoSuchId:
1137
target_entry = self._get_entry(
1138
self.target, target_path)
1231
old_entry = self._get_entry(self.source, file_id)
1232
new_entry = self._get_entry(self.target, file_id)
1139
1233
result, changes = self._changes_from_entries(
1140
source_entry, target_entry, source_path, target_path)
1234
old_entry, new_entry)
1143
1237
# Get this parents parent to examine.
1144
new_parent_id = result.parent_id[1]
1238
new_parent_id = result[4][1]
1145
1239
precise_file_ids.add(new_parent_id)
1147
if (result.kind[0] == 'directory' and
1148
result.kind[1] != 'directory'):
1241
if (result[6][0] == 'directory' and
1242
result[6][1] != 'directory'):
1149
1243
# This stopped being a directory, the old children have
1150
1244
# to be included.
1151
if source_entry is None:
1245
if old_entry is None:
1152
1246
# Reusing a discarded change.
1153
source_entry = self._get_entry(
1154
self.source, result.path[0])
1155
precise_file_ids.update(
1157
for child in self.source.iter_child_entries(result.path[0]))
1158
changed_file_ids.add(result.file_id)
1247
old_entry = self._get_entry(self.source, file_id)
1248
for child in old_entry.children.values():
1249
precise_file_ids.add(child.file_id)
1250
changed_file_ids.add(result[0])
1161
def file_content_matches(
1162
self, source_path, target_path,
1163
source_stat=None, target_stat=None):
1164
"""Check if two files are the same in the source and target trees.
1166
This only checks that the contents of the files are the same,
1167
it does not touch anything else.
1169
:param source_path: Path of the file in the source tree
1170
:param target_path: Path of the file in the target tree
1171
:param source_stat: Optional stat value of the file in the source tree
1172
:param target_stat: Optional stat value of the file in the target tree
1173
:return: Boolean indicating whether the files have the same contents
1175
with self.lock_read():
1176
source_verifier_kind, source_verifier_data = (
1177
self.source.get_file_verifier(source_path, source_stat))
1178
target_verifier_kind, target_verifier_data = (
1179
self.target.get_file_verifier(
1180
target_path, target_stat))
1181
if source_verifier_kind == target_verifier_kind:
1182
return (source_verifier_data == target_verifier_data)
1183
# Fall back to SHA1 for now
1184
if source_verifier_kind != "SHA1":
1185
source_sha1 = self.source.get_file_sha1(
1186
source_path, source_stat)
1188
source_sha1 = source_verifier_data
1189
if target_verifier_kind != "SHA1":
1190
target_sha1 = self.target.get_file_sha1(
1191
target_path, target_stat)
1193
target_sha1 = target_verifier_data
1194
return (source_sha1 == target_sha1)
1196
def find_target_path(self, path, recurse='none'):
1197
"""Find target tree path.
1199
:param path: Path to search for (exists in source)
1200
:return: path in target, or None if there is no equivalent path.
1201
:raise NoSuchFile: If the path doesn't exist in source
1203
file_id = self.source.path2id(path)
1205
raise errors.NoSuchFile(path)
1207
return self.target.id2path(file_id, recurse=recurse)
1208
except errors.NoSuchId:
1211
def find_source_path(self, path, recurse='none'):
1212
"""Find the source tree path.
1214
:param path: Path to search for (exists in target)
1215
:return: path in source, or None if there is no equivalent path.
1216
:raise NoSuchFile: if the path doesn't exist in target
1218
file_id = self.target.path2id(path)
1220
raise errors.NoSuchFile(path)
1222
return self.source.id2path(file_id, recurse=recurse)
1223
except errors.NoSuchId:
1226
def find_target_paths(self, paths, recurse='none'):
1227
"""Find target tree paths.
1229
:param paths: Iterable over paths in target to search for
1230
:return: Dictionary mapping from source paths to paths in target , or
1231
None if there is no equivalent path.
1235
ret[path] = self.find_target_path(path, recurse=recurse)
1238
def find_source_paths(self, paths, recurse='none'):
1239
"""Find source tree paths.
1241
:param paths: Iterable over paths in target to search for
1242
:return: Dictionary mapping from target paths to paths in source, or
1243
None if there is no equivalent path.
1247
ret[path] = self.find_source_path(path, recurse=recurse)
1251
InterTree.register_optimiser(InterTree)
1254
def find_previous_paths(from_tree, to_tree, paths, recurse='none'):
1255
"""Find previous tree paths.
1257
:param from_tree: From tree
1258
:param to_tree: To tree
1259
:param paths: Iterable over paths in from_tree to search for
1260
:return: Dictionary mapping from from_tree paths to paths in to_tree, or
1261
None if there is no equivalent path.
1263
return InterTree.get(to_tree, from_tree).find_source_paths(paths, recurse=recurse)
1266
def find_previous_path(from_tree, to_tree, path, recurse='none'):
1267
"""Find previous tree path.
1269
:param from_tree: From tree
1270
:param to_tree: To tree
1271
:param path: Path to search for (exists in from_tree)
1272
:return: path in to_tree, or None if there is no equivalent path.
1273
:raise NoSuchFile: If the path doesn't exist in from_tree
1275
return InterTree.get(to_tree, from_tree).find_source_path(
1276
path, recurse=recurse)
1279
def get_canonical_path(tree, path, normalize):
1280
"""Find the canonical path of an item, ignoring case.
1282
:param tree: Tree to traverse
1283
:param path: Case-insensitive path to look up
1284
:param normalize: Function to normalize a filename for comparison
1285
:return: The canonical path
1289
bit_iter = iter(path.split("/"))
1290
for elt in bit_iter:
1291
lelt = normalize(elt)
1294
for child in tree.iter_child_entries(cur_path):
1296
if child.name == elt:
1297
# if we found an exact match, we can stop now; if
1298
# we found an approximate match we need to keep
1299
# searching because there might be an exact match
1301
new_path = osutils.pathjoin(cur_path, child.name)
1303
elif normalize(child.name) == lelt:
1304
new_path = osutils.pathjoin(cur_path, child.name)
1305
except errors.NoSuchId:
1306
# before a change is committed we can see this error...
1308
except errors.NotADirectory:
1313
# got to the end of this directory and no entries matched.
1314
# Return what matched so far, plus the rest as specified.
1315
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
1254
class MultiWalker(object):
1255
"""Walk multiple trees simultaneously, getting combined results."""
1257
# Note: This could be written to not assume you can do out-of-order
1258
# lookups. Instead any nodes that don't match in all trees could be
1259
# marked as 'deferred', and then returned in the final cleanup loop.
1260
# For now, I think it is "nicer" to return things as close to the
1261
# "master_tree" order as we can.
1263
def __init__(self, master_tree, other_trees):
1264
"""Create a new MultiWalker.
1266
All trees being walked must implement "iter_entries_by_dir()", such
1267
that they yield (path, object) tuples, where that object will have a
1268
'.file_id' member, that can be used to check equality.
1270
:param master_tree: All trees will be 'slaved' to the master_tree such
1271
that nodes in master_tree will be used as 'first-pass' sync points.
1272
Any nodes that aren't in master_tree will be merged in a second
1274
:param other_trees: A list of other trees to walk simultaneously.
1276
self._master_tree = master_tree
1277
self._other_trees = other_trees
1279
# Keep track of any nodes that were properly processed just out of
1280
# order, that way we don't return them at the end, we don't have to
1281
# track *all* processed file_ids, just the out-of-order ones
1282
self._out_of_order_processed = set()
1285
def _step_one(iterator):
1286
"""Step an iter_entries_by_dir iterator.
1288
:return: (has_more, path, ie)
1289
If has_more is False, path and ie will be None.
1292
path, ie = iterator.next()
1293
except StopIteration:
1294
return False, None, None
1296
return True, path, ie
1299
def _cmp_path_by_dirblock(path1, path2):
1300
"""Compare two paths based on what directory they are in.
1302
This generates a sort order, such that all children of a directory are
1303
sorted together, and grandchildren are in the same order as the
1304
children appear. But all grandchildren come after all children.
1306
:param path1: first path
1307
:param path2: the second path
1308
:return: negative number if ``path1`` comes first,
1309
0 if paths are equal
1310
and a positive number if ``path2`` sorts first
1312
# Shortcut this special case
1315
# This is stolen from _dirstate_helpers_py.py, only switching it to
1316
# Unicode objects. Consider using encode_utf8() and then using the
1317
# optimized versions, or maybe writing optimized unicode versions.
1318
if not isinstance(path1, unicode):
1319
raise TypeError("'path1' must be a unicode string, not %s: %r"
1320
% (type(path1), path1))
1321
if not isinstance(path2, unicode):
1322
raise TypeError("'path2' must be a unicode string, not %s: %r"
1323
% (type(path2), path2))
1324
return cmp(MultiWalker._path_to_key(path1),
1325
MultiWalker._path_to_key(path2))
1328
def _path_to_key(path):
1329
dirname, basename = osutils.split(path)
1330
return (dirname.split(u'/'), basename)
1332
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1333
"""Lookup an inventory entry by file_id.
1335
This is called when an entry is missing in the normal order.
1336
Generally this is because a file was either renamed, or it was
1337
deleted/added. If the entry was found in the inventory and not in
1338
extra_entries, it will be added to self._out_of_order_processed
1340
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1341
should be filled with entries that were found before they were
1342
used. If file_id is present, it will be removed from the
1344
:param other_tree: The Tree to search, in case we didn't find the entry
1346
:param file_id: The file_id to look for
1347
:return: (path, ie) if found or (None, None) if not present.
1349
if file_id in extra_entries:
1350
return extra_entries.pop(file_id)
1351
# TODO: Is id2path better as the first call, or is
1352
# inventory[file_id] better as a first check?
1354
cur_path = other_tree.id2path(file_id)
1355
except errors.NoSuchId:
1357
if cur_path is None:
1360
self._out_of_order_processed.add(file_id)
1361
cur_ie = other_tree.inventory[file_id]
1362
return (cur_path, cur_ie)
1365
"""Match up the values in the different trees."""
1366
for result in self._walk_master_tree():
1368
self._finish_others()
1369
for result in self._walk_others():
1372
def _walk_master_tree(self):
1373
"""First pass, walk all trees in lock-step.
1375
When we are done, all nodes in the master_tree will have been
1376
processed. _other_walkers, _other_entries, and _others_extra will be
1377
set on 'self' for future processing.
1379
# This iterator has the most "inlining" done, because it tends to touch
1380
# every file in the tree, while the others only hit nodes that don't
1382
master_iterator = self._master_tree.iter_entries_by_dir()
1384
other_walkers = [other.iter_entries_by_dir()
1385
for other in self._other_trees]
1386
other_entries = [self._step_one(walker) for walker in other_walkers]
1387
# Track extra nodes in the other trees
1388
others_extra = [{} for i in xrange(len(self._other_trees))]
1390
master_has_more = True
1391
step_one = self._step_one
1392
lookup_by_file_id = self._lookup_by_file_id
1393
out_of_order_processed = self._out_of_order_processed
1395
while master_has_more:
1396
(master_has_more, path, master_ie) = step_one(master_iterator)
1397
if not master_has_more:
1400
file_id = master_ie.file_id
1402
other_values_append = other_values.append
1403
next_other_entries = []
1404
next_other_entries_append = next_other_entries.append
1405
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1406
if not other_has_more:
1407
other_values_append(lookup_by_file_id(
1408
others_extra[idx], self._other_trees[idx], file_id))
1409
next_other_entries_append((False, None, None))
1410
elif file_id == other_ie.file_id:
1411
# This is the critical code path, as most of the entries
1412
# should match between most trees.
1413
other_values_append((other_path, other_ie))
1414
next_other_entries_append(step_one(other_walkers[idx]))
1416
# This walker did not match, step it until it either
1417
# matches, or we know we are past the current walker.
1418
other_walker = other_walkers[idx]
1419
other_extra = others_extra[idx]
1420
while (other_has_more and
1421
self._cmp_path_by_dirblock(other_path, path) < 0):
1422
other_file_id = other_ie.file_id
1423
if other_file_id not in out_of_order_processed:
1424
other_extra[other_file_id] = (other_path, other_ie)
1425
other_has_more, other_path, other_ie = \
1426
step_one(other_walker)
1427
if other_has_more and other_ie.file_id == file_id:
1428
# We ended up walking to this point, match and step
1430
other_values_append((other_path, other_ie))
1431
other_has_more, other_path, other_ie = \
1432
step_one(other_walker)
1434
# This record isn't in the normal order, see if it
1436
other_values_append(lookup_by_file_id(
1437
other_extra, self._other_trees[idx], file_id))
1438
next_other_entries_append((other_has_more, other_path,
1440
other_entries = next_other_entries
1442
# We've matched all the walkers, yield this datapoint
1443
yield path, file_id, master_ie, other_values
1444
self._other_walkers = other_walkers
1445
self._other_entries = other_entries
1446
self._others_extra = others_extra
1448
def _finish_others(self):
1449
"""Finish walking the other iterators, so we get all entries."""
1450
for idx, info in enumerate(self._other_entries):
1451
other_extra = self._others_extra[idx]
1452
(other_has_more, other_path, other_ie) = info
1453
while other_has_more:
1454
other_file_id = other_ie.file_id
1455
if other_file_id not in self._out_of_order_processed:
1456
other_extra[other_file_id] = (other_path, other_ie)
1457
other_has_more, other_path, other_ie = \
1458
self._step_one(self._other_walkers[idx])
1459
del self._other_entries
1461
def _walk_others(self):
1462
"""Finish up by walking all the 'deferred' nodes."""
1463
# TODO: One alternative would be to grab all possible unprocessed
1464
# file_ids, and then sort by path, and then yield them. That
1465
# might ensure better ordering, in case a caller strictly
1466
# requires parents before children.
1467
for idx, other_extra in enumerate(self._others_extra):
1468
others = sorted(other_extra.itervalues(),
1469
key=lambda x: self._path_to_key(x[0]))
1470
for other_path, other_ie in others:
1471
file_id = other_ie.file_id
1472
# We don't need to check out_of_order_processed here, because
1473
# the lookup_by_file_id will be removing anything processed
1474
# from the extras cache
1475
other_extra.pop(file_id)
1476
other_values = [(None, None) for i in xrange(idx)]
1477
other_values.append((other_path, other_ie))
1478
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1479
alt_idx = alt_idx + idx + 1
1480
alt_extra = self._others_extra[alt_idx]
1481
alt_tree = self._other_trees[alt_idx]
1482
other_values.append(self._lookup_by_file_id(
1483
alt_extra, alt_tree, file_id))
1484
yield other_path, file_id, None, other_values