1
# Copyright (C) 2005-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Tree classes, representing directory at point in time.
20
from __future__ import absolute_import
24
from .lazy_import import lazy_import
25
lazy_import(globals(), """
29
conflicts as _mod_conflicts,
33
revision as _mod_revision,
37
from breezy.bzr import (
40
from breezy.i18n import gettext
48
from .inter import InterObject
54
class FileTimestampUnavailable(errors.BzrError):
56
_fmt = "The filestamp for %(path)s is not available."
60
def __init__(self, path):
65
"""Abstract file tree.
67
There are several subclasses:
69
* `WorkingTree` exists as files on disk editable by the user.
71
* `RevisionTree` is a tree as recorded at some point in the past.
73
Trees can be compared, etc, regardless of whether they are working
74
trees or versioned trees.
77
def has_versioned_directories(self):
78
"""Whether this tree can contain explicitly versioned directories.
80
This defaults to True, but some implementations may want to override
85
def changes_from(self, other, want_unchanged=False, specific_files=None,
86
extra_trees=None, require_versioned=False, include_root=False,
87
want_unversioned=False):
88
"""Return a TreeDelta of the changes from other to this tree.
90
:param other: A tree to compare with.
91
:param specific_files: An optional list of file paths to restrict the
92
comparison to. When mapping filenames to ids, all matches in all
93
trees (including optional extra_trees) are used, and all children of
94
matched directories are included.
95
:param want_unchanged: An optional boolean requesting the inclusion of
96
unchanged entries in the result.
97
:param extra_trees: An optional list of additional trees to use when
98
mapping the contents of specific_files (paths) to file_ids.
99
:param require_versioned: An optional boolean (defaults to False). When
100
supplied and True all the 'specific_files' must be versioned, or
101
a PathsNotVersionedError will be thrown.
102
:param want_unversioned: Scan for unversioned paths.
104
The comparison will be performed by an InterTree object looked up on
107
# Martin observes that Tree.changes_from returns a TreeDelta and this
108
# may confuse people, because the class name of the returned object is
109
# a synonym of the object referenced in the method name.
110
return InterTree.get(other, self).compare(
111
want_unchanged=want_unchanged,
112
specific_files=specific_files,
113
extra_trees=extra_trees,
114
require_versioned=require_versioned,
115
include_root=include_root,
116
want_unversioned=want_unversioned,
119
def iter_changes(self, from_tree, include_unchanged=False,
120
specific_files=None, pb=None, extra_trees=None,
121
require_versioned=True, want_unversioned=False):
122
"""See InterTree.iter_changes"""
123
intertree = InterTree.get(from_tree, self)
124
return intertree.iter_changes(include_unchanged, specific_files, pb,
125
extra_trees, require_versioned, want_unversioned=want_unversioned)
128
"""Get a list of the conflicts in the tree.
130
Each conflict is an instance of breezy.conflicts.Conflict.
132
return _mod_conflicts.ConflictList()
135
"""For trees that can have unversioned files, return all such paths."""
138
def get_parent_ids(self):
139
"""Get the parent ids for this tree.
141
:return: a list of parent ids. [] is returned to indicate
142
a tree with no parents.
143
:raises: BzrError if the parents are not known.
145
raise NotImplementedError(self.get_parent_ids)
147
def has_filename(self, filename):
148
"""True if the tree has given filename."""
149
raise NotImplementedError(self.has_filename)
151
def has_id(self, file_id):
152
raise NotImplementedError(self.has_id)
154
def has_or_had_id(self, file_id):
155
raise NotImplementedError(self.has_or_had_id)
157
def is_ignored(self, filename):
158
"""Check whether the filename is ignored by this tree.
160
:param filename: The relative filename within the tree.
161
:return: True if the filename is ignored.
165
def all_file_ids(self):
166
"""Iterate through all file ids, including ids for missing files."""
167
raise NotImplementedError(self.all_file_ids)
169
def id2path(self, file_id):
170
"""Return the path for a file id.
174
raise NotImplementedError(self.id2path)
176
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
177
"""Walk the tree in 'by_dir' order.
179
This will yield each entry in the tree as a (path, entry) tuple.
180
The order that they are yielded is:
182
Directories are walked in a depth-first lexicographical order,
183
however, whenever a directory is reached, all of its direct child
184
nodes are yielded in lexicographical order before yielding the
187
For example, in the tree::
197
The yield order (ignoring root) would be::
199
a, f, a/b, a/d, a/b/c, a/d/e, f/g
201
:param yield_parents: If True, yield the parents from the root leading
202
down to specific_file_ids that have been requested. This has no
203
impact if specific_file_ids is None.
205
raise NotImplementedError(self.iter_entries_by_dir)
207
def iter_child_entries(self, file_id, path=None):
208
"""Iterate over the children of a directory or tree reference.
210
:param file_id: File id of the directory/tree-reference
211
:param path: Optional path of the directory
212
:raise NoSuchId: When the file_id does not exist
213
:return: Iterator over entries in the directory
215
raise NotImplementedError(self.iter_child_entries)
217
def list_files(self, include_root=False, from_dir=None, recursive=True):
218
"""List all files in this tree.
220
:param include_root: Whether to include the entry for the tree root
221
:param from_dir: Directory under which to list files
222
:param recursive: Whether to list files recursively
223
:return: iterator over tuples of (path, versioned, kind, file_id,
226
raise NotImplementedError(self.list_files)
228
def iter_references(self):
229
if self.supports_tree_reference():
230
for path, entry in self.iter_entries_by_dir():
231
if entry.kind == 'tree-reference':
232
yield path, entry.file_id
234
def kind(self, file_id):
235
raise NotImplementedError("Tree subclass %s must implement kind"
236
% self.__class__.__name__)
238
def stored_kind(self, file_id):
239
"""File kind stored for this file_id.
241
May not match kind on disk for working trees. Always available
242
for versioned files, even when the file itself is missing.
244
return self.kind(file_id)
246
def path_content_summary(self, path):
247
"""Get a summary of the information about path.
249
All the attributes returned are for the canonical form, not the
250
convenient form (if content filters are in use.)
252
:param path: A relative path within the tree.
253
:return: A tuple containing kind, size, exec, sha1-or-link.
254
Kind is always present (see tree.kind()).
255
size is present if kind is file and the size of the
256
canonical form can be cheaply determined, None otherwise.
257
exec is None unless kind is file and the platform supports the 'x'
259
sha1-or-link is the link target if kind is symlink, or the sha1 if
260
it can be obtained without reading the file.
262
raise NotImplementedError(self.path_content_summary)
264
def get_reference_revision(self, file_id, path=None):
265
raise NotImplementedError("Tree subclass %s must implement "
266
"get_reference_revision"
267
% self.__class__.__name__)
269
def _comparison_data(self, entry, path):
270
"""Return a tuple of kind, executable, stat_value for a file.
272
entry may be None if there is no inventory entry for the file, but
273
path must always be supplied.
275
kind is None if there is no file present (even if an inventory id is
276
present). executable is False for non-file entries.
278
raise NotImplementedError(self._comparison_data)
280
def _file_size(self, entry, stat_value):
281
raise NotImplementedError(self._file_size)
283
def get_file(self, file_id, path=None):
284
"""Return a file object for the file file_id in the tree.
286
If both file_id and path are defined, it is implementation defined as
287
to which one is used.
289
raise NotImplementedError(self.get_file)
291
def get_file_with_stat(self, file_id, path=None):
292
"""Get a file handle and stat object for file_id.
294
The default implementation returns (self.get_file, None) for backwards
297
:param file_id: The file id to read.
298
:param path: The path of the file, if it is known.
299
:return: A tuple (file_handle, stat_value_or_None). If the tree has
300
no stat facility, or need for a stat cache feedback during commit,
301
it may return None for the second element of the tuple.
303
return (self.get_file(file_id, path), None)
305
def get_file_text(self, file_id, path=None):
306
"""Return the byte content of a file.
308
:param file_id: The file_id of the file.
309
:param path: The path of the file.
311
If both file_id and path are supplied, an implementation may use
314
:returns: A single byte string for the whole file.
316
my_file = self.get_file(file_id, path)
318
return my_file.read()
322
def get_file_lines(self, file_id, path=None):
323
"""Return the content of a file, as lines.
325
:param file_id: The file_id of the file.
326
:param path: The path of the file.
328
If both file_id and path are supplied, an implementation may use
331
return osutils.split_lines(self.get_file_text(file_id, path))
333
def get_file_verifier(self, file_id, path=None, stat_value=None):
334
"""Return a verifier for a file.
336
The default implementation returns a sha1.
338
:param file_id: The handle for this file.
339
:param path: The path that this file can be found at.
340
These must point to the same object.
341
:param stat_value: Optional stat value for the object
342
:return: Tuple with verifier name and verifier data
344
return ("SHA1", self.get_file_sha1(file_id, path=path,
345
stat_value=stat_value))
347
def get_file_sha1(self, file_id, path=None, stat_value=None):
348
"""Return the SHA1 file for a file.
350
:note: callers should use get_file_verifier instead
351
where possible, as the underlying repository implementation may
352
have quicker access to a non-sha1 verifier.
354
:param file_id: The handle for this file.
355
:param path: The path that this file can be found at.
356
These must point to the same object.
357
:param stat_value: Optional stat value for the object
359
raise NotImplementedError(self.get_file_sha1)
361
def get_file_mtime(self, file_id, path=None):
362
"""Return the modification time for a file.
364
:param file_id: The handle for this file.
365
:param path: The path that this file can be found at.
366
These must point to the same object.
368
raise NotImplementedError(self.get_file_mtime)
370
def get_file_size(self, file_id):
371
"""Return the size of a file in bytes.
373
This applies only to regular files. If invoked on directories or
374
symlinks, it will return None.
375
:param file_id: The file-id of the file
377
raise NotImplementedError(self.get_file_size)
379
def is_executable(self, file_id, path=None):
380
"""Check if a file is executable.
382
:param file_id: The handle for this file.
383
:param path: The path that this file can be found at.
384
These must point to the same object.
386
raise NotImplementedError(self.is_executable)
388
def iter_files_bytes(self, desired_files):
389
"""Iterate through file contents.
391
Files will not necessarily be returned in the order they occur in
392
desired_files. No specific order is guaranteed.
394
Yields pairs of identifier, bytes_iterator. identifier is an opaque
395
value supplied by the caller as part of desired_files. It should
396
uniquely identify the file version in the caller's context. (Examples:
397
an index number or a TreeTransform trans_id.)
399
bytes_iterator is an iterable of bytestrings for the file. The
400
kind of iterable and length of the bytestrings are unspecified, but for
401
this implementation, it is a tuple containing a single bytestring with
402
the complete text of the file.
404
:param desired_files: a list of (file_id, identifier) pairs
406
for file_id, identifier in desired_files:
407
# We wrap the string in a tuple so that we can return an iterable
408
# of bytestrings. (Technically, a bytestring is also an iterable
409
# of bytestrings, but iterating through each character is not
411
cur_file = (self.get_file_text(file_id),)
412
yield identifier, cur_file
414
def get_symlink_target(self, file_id, path=None):
415
"""Get the target for a given file_id.
417
It is assumed that the caller already knows that file_id is referencing
419
:param file_id: Handle for the symlink entry.
420
:param path: The path of the file.
421
If both file_id and path are supplied, an implementation may use
423
:return: The path the symlink points to.
425
raise NotImplementedError(self.get_symlink_target)
427
def get_root_id(self):
428
"""Return the file_id for the root of this tree."""
429
raise NotImplementedError(self.get_root_id)
431
def annotate_iter(self, file_id,
432
default_revision=_mod_revision.CURRENT_REVISION):
433
"""Return an iterator of revision_id, line tuples.
435
For working trees (and mutable trees in general), the special
436
revision_id 'current:' will be used for lines that are new in this
437
tree, e.g. uncommitted changes.
438
:param file_id: The file to produce an annotated version from
439
:param default_revision: For lines that don't match a basis, mark them
440
with this revision id. Not all implementations will make use of
443
raise NotImplementedError(self.annotate_iter)
445
def _get_plan_merge_data(self, file_id, other, base):
446
from .bzr import versionedfile
447
vf = versionedfile._PlanMergeVersionedFile(file_id)
448
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
449
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
451
last_revision_base = None
453
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
454
return vf, last_revision_a, last_revision_b, last_revision_base
456
def plan_file_merge(self, file_id, other, base=None):
457
"""Generate a merge plan based on annotations.
459
If the file contains uncommitted changes in this tree, they will be
460
attributed to the 'current:' pseudo-revision. If the file contains
461
uncommitted changes in the other tree, they will be assigned to the
462
'other:' pseudo-revision.
464
data = self._get_plan_merge_data(file_id, other, base)
465
vf, last_revision_a, last_revision_b, last_revision_base = data
466
return vf.plan_merge(last_revision_a, last_revision_b,
469
def plan_file_lca_merge(self, file_id, other, base=None):
470
"""Generate a merge plan based lca-newness.
472
If the file contains uncommitted changes in this tree, they will be
473
attributed to the 'current:' pseudo-revision. If the file contains
474
uncommitted changes in the other tree, they will be assigned to the
475
'other:' pseudo-revision.
477
data = self._get_plan_merge_data(file_id, other, base)
478
vf, last_revision_a, last_revision_b, last_revision_base = data
479
return vf.plan_lca_merge(last_revision_a, last_revision_b,
482
def _iter_parent_trees(self):
483
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
484
for revision_id in self.get_parent_ids():
486
yield self.revision_tree(revision_id)
487
except errors.NoSuchRevisionInTree:
488
yield self.repository.revision_tree(revision_id)
490
def _get_file_revision(self, file_id, vf, tree_revision):
491
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
493
if getattr(self, '_repository', None) is None:
494
last_revision = tree_revision
495
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
496
self._iter_parent_trees()]
497
vf.add_lines((file_id, last_revision), parent_keys,
498
self.get_file_lines(file_id))
499
repo = self.branch.repository
502
last_revision = self.get_file_revision(file_id)
503
base_vf = self._repository.texts
504
if base_vf not in vf.fallback_versionedfiles:
505
vf.fallback_versionedfiles.append(base_vf)
508
def _check_retrieved(self, ie, f):
511
fp = osutils.fingerprint_file(f)
514
if ie.text_size is not None:
515
if ie.text_size != fp['size']:
516
raise errors.BzrError(
517
"mismatched size for file %r in %r" %
518
(ie.file_id, self._store),
519
["inventory expects %d bytes" % ie.text_size,
520
"file is actually %d bytes" % fp['size'],
521
"store is probably damaged/corrupt"])
523
if ie.text_sha1 != fp['sha1']:
524
raise errors.BzrError("wrong SHA-1 for file %r in %r" %
525
(ie.file_id, self._store),
526
["inventory expects %s" % ie.text_sha1,
527
"file is actually %s" % fp['sha1'],
528
"store is probably damaged/corrupt"])
530
def path2id(self, path):
531
"""Return the id for path in this tree."""
532
raise NotImplementedError(self.path2id)
534
def paths2ids(self, paths, trees=[], require_versioned=True):
535
"""Return all the ids that can be reached by walking from paths.
537
Each path is looked up in this tree and any extras provided in
538
trees, and this is repeated recursively: the children in an extra tree
539
of a directory that has been renamed under a provided path in this tree
540
are all returned, even if none exist under a provided path in this
541
tree, and vice versa.
543
:param paths: An iterable of paths to start converting to ids from.
544
Alternatively, if paths is None, no ids should be calculated and None
545
will be returned. This is offered to make calling the api unconditional
546
for code that *might* take a list of files.
547
:param trees: Additional trees to consider.
548
:param require_versioned: If False, do not raise NotVersionedError if
549
an element of paths is not versioned in this tree and all of trees.
551
return find_ids_across_trees(paths, [self] + list(trees), require_versioned)
553
def iter_children(self, file_id):
554
"""Iterate over the file ids of the children of an entry.
556
:param file_id: File id of the entry
557
:return: Iterator over child file ids.
559
raise NotImplementedError(self.iter_children)
562
"""Lock this tree for multiple read only operations.
564
:return: A breezy.lock.LogicalLockResult.
566
return lock.LogicalLockResult(self.unlock)
568
def revision_tree(self, revision_id):
569
"""Obtain a revision tree for the revision revision_id.
571
The intention of this method is to allow access to possibly cached
572
tree data. Implementors of this method should raise NoSuchRevision if
573
the tree is not locally available, even if they could obtain the
574
tree via a repository or some other means. Callers are responsible
575
for finding the ultimate source for a revision tree.
577
:param revision_id: The revision_id of the requested tree.
579
:raises: NoSuchRevision if the tree cannot be obtained.
581
raise errors.NoSuchRevisionInTree(self, revision_id)
584
"""What files are present in this tree and unknown.
586
:return: an iterator over the unknown files.
593
def filter_unversioned_files(self, paths):
594
"""Filter out paths that are versioned.
596
:return: set of paths.
598
raise NotImplementedError(self.filter_unversioned_files)
600
def walkdirs(self, prefix=""):
601
"""Walk the contents of this tree from path down.
603
This yields all the data about the contents of a directory at a time.
604
After each directory has been yielded, if the caller has mutated the
605
list to exclude some directories, they are then not descended into.
607
The data yielded is of the form:
608
((directory-relpath, directory-path-from-root, directory-fileid),
609
[(relpath, basename, kind, lstat, path_from_tree_root, file_id,
610
versioned_kind), ...]),
611
- directory-relpath is the containing dirs relpath from prefix
612
- directory-path-from-root is the containing dirs path from /
613
- directory-fileid is the id of the directory if it is versioned.
614
- relpath is the relative path within the subtree being walked.
615
- basename is the basename
616
- kind is the kind of the file now. If unknonwn then the file is not
617
present within the tree - but it may be recorded as versioned. See
619
- lstat is the stat data *if* the file was statted.
620
- path_from_tree_root is the path from the root of the tree.
621
- file_id is the file_id if the entry is versioned.
622
- versioned_kind is the kind of the file as last recorded in the
623
versioning system. If 'unknown' the file is not versioned.
624
One of 'kind' and 'versioned_kind' must not be 'unknown'.
626
:param prefix: Start walking from prefix within the tree rather than
627
at the root. This allows one to walk a subtree but get paths that are
628
relative to a tree rooted higher up.
629
:return: an iterator over the directory data.
631
raise NotImplementedError(self.walkdirs)
633
def supports_content_filtering(self):
636
def _content_filter_stack(self, path=None, file_id=None):
637
"""The stack of content filters for a path if filtering is supported.
639
Readers will be applied in first-to-last order.
640
Writers will be applied in last-to-first order.
641
Either the path or the file-id needs to be provided.
643
:param path: path relative to the root of the tree
645
:param file_id: file_id or None if unknown
646
:return: the list of filters - [] if there are none
648
filter_pref_names = filters._get_registered_names()
649
if len(filter_pref_names) == 0:
652
path = self.id2path(file_id)
653
prefs = next(self.iter_search_rules([path], filter_pref_names))
654
stk = filters._get_filter_stack_for(prefs)
655
if 'filters' in debug.debug_flags:
656
trace.note(gettext("*** {0} content-filter: {1} => {2!r}").format(path,prefs,stk))
659
def _content_filter_stack_provider(self):
660
"""A function that returns a stack of ContentFilters.
662
The function takes a path (relative to the top of the tree) and a
663
file-id as parameters.
665
:return: None if content filtering is not supported by this tree.
667
if self.supports_content_filtering():
668
return lambda path, file_id: \
669
self._content_filter_stack(path, file_id)
673
def iter_search_rules(self, path_names, pref_names=None,
674
_default_searcher=None):
675
"""Find the preferences for filenames in a tree.
677
:param path_names: an iterable of paths to find attributes for.
678
Paths are given relative to the root of the tree.
679
:param pref_names: the list of preferences to lookup - None for all
680
:param _default_searcher: private parameter to assist testing - don't use
681
:return: an iterator of tuple sequences, one per path-name.
682
See _RulesSearcher.get_items for details on the tuple sequence.
684
if _default_searcher is None:
685
_default_searcher = rules._per_user_searcher
686
searcher = self._get_rules_searcher(_default_searcher)
687
if searcher is not None:
688
if pref_names is not None:
689
for path in path_names:
690
yield searcher.get_selected_items(path, pref_names)
692
for path in path_names:
693
yield searcher.get_items(path)
695
def _get_rules_searcher(self, default_searcher):
696
"""Get the RulesSearcher for this tree given the default one."""
697
searcher = default_searcher
701
def find_ids_across_trees(filenames, trees, require_versioned=True):
702
"""Find the ids corresponding to specified filenames.
704
All matches in all trees will be used, and all children of matched
705
directories will be used.
707
:param filenames: The filenames to find file_ids for (if None, returns
709
:param trees: The trees to find file_ids within
710
:param require_versioned: if true, all specified filenames must occur in
712
:return: a set of file ids for the specified filenames and their children.
716
specified_path_ids = _find_ids_across_trees(filenames, trees,
718
return _find_children_across_trees(specified_path_ids, trees)
721
def _find_ids_across_trees(filenames, trees, require_versioned):
722
"""Find the ids corresponding to specified filenames.
724
All matches in all trees will be used, but subdirectories are not scanned.
726
:param filenames: The filenames to find file_ids for
727
:param trees: The trees to find file_ids within
728
:param require_versioned: if true, all specified filenames must occur in
730
:return: a set of file ids for the specified filenames
733
interesting_ids = set()
734
for tree_path in filenames:
737
file_id = tree.path2id(tree_path)
738
if file_id is not None:
739
interesting_ids.add(file_id)
742
not_versioned.append(tree_path)
743
if len(not_versioned) > 0 and require_versioned:
744
raise errors.PathsNotVersionedError(not_versioned)
745
return interesting_ids
748
def _find_children_across_trees(specified_ids, trees):
749
"""Return a set including specified ids and their children.
751
All matches in all trees will be used.
753
:param trees: The trees to find file_ids within
754
:return: a set containing all specified ids and their children
756
interesting_ids = set(specified_ids)
757
pending = interesting_ids
758
# now handle children of interesting ids
759
# we loop so that we handle all children of each id in both trees
760
while len(pending) > 0:
762
for file_id in pending:
764
if not tree.has_or_had_id(file_id):
766
for child_id in tree.iter_children(file_id):
767
if child_id not in interesting_ids:
768
new_pending.add(child_id)
769
interesting_ids.update(new_pending)
770
pending = new_pending
771
return interesting_ids
774
class InterTree(InterObject):
775
"""This class represents operations taking place between two Trees.
777
Its instances have methods like 'compare' and contain references to the
778
source and target trees these operations are to be carried out on.
780
Clients of breezy should not need to use InterTree directly, rather they
781
should use the convenience methods on Tree such as 'Tree.compare()' which
782
will pass through to InterTree as appropriate.
785
# Formats that will be used to test this InterTree. If both are
786
# None, this InterTree will not be tested (e.g. because a complex
788
_matching_from_tree_format = None
789
_matching_to_tree_format = None
794
def is_compatible(kls, source, target):
795
# The default implementation is naive and uses the public API, so
796
# it works for all trees.
799
def _changes_from_entries(self, source_entry, target_entry,
800
source_path=None, target_path=None):
801
"""Generate a iter_changes tuple between source_entry and target_entry.
803
:param source_entry: An inventory entry from self.source, or None.
804
:param target_entry: An inventory entry from self.target, or None.
805
:param source_path: The path of source_entry, if known. If not known
806
it will be looked up.
807
:param target_path: The path of target_entry, if known. If not known
808
it will be looked up.
809
:return: A tuple, item 0 of which is an iter_changes result tuple, and
810
item 1 is True if there are any changes in the result tuple.
812
if source_entry is None:
813
if target_entry is None:
815
file_id = target_entry.file_id
817
file_id = source_entry.file_id
818
if source_entry is not None:
819
source_versioned = True
820
source_name = source_entry.name
821
source_parent = source_entry.parent_id
822
if source_path is None:
823
source_path = self.source.id2path(file_id)
824
source_kind, source_executable, source_stat = \
825
self.source._comparison_data(source_entry, source_path)
827
source_versioned = False
831
source_executable = None
832
if target_entry is not None:
833
target_versioned = True
834
target_name = target_entry.name
835
target_parent = target_entry.parent_id
836
if target_path is None:
837
target_path = self.target.id2path(file_id)
838
target_kind, target_executable, target_stat = \
839
self.target._comparison_data(target_entry, target_path)
841
target_versioned = False
845
target_executable = None
846
versioned = (source_versioned, target_versioned)
847
kind = (source_kind, target_kind)
848
changed_content = False
849
if source_kind != target_kind:
850
changed_content = True
851
elif source_kind == 'file':
852
if not self.file_content_matches(file_id, file_id, source_path,
853
target_path, source_stat, target_stat):
854
changed_content = True
855
elif source_kind == 'symlink':
856
if (self.source.get_symlink_target(file_id) !=
857
self.target.get_symlink_target(file_id)):
858
changed_content = True
859
elif source_kind == 'tree-reference':
860
if (self.source.get_reference_revision(file_id, source_path)
861
!= self.target.get_reference_revision(file_id, target_path)):
862
changed_content = True
863
parent = (source_parent, target_parent)
864
name = (source_name, target_name)
865
executable = (source_executable, target_executable)
866
if (changed_content is not False or versioned[0] != versioned[1]
867
or parent[0] != parent[1] or name[0] != name[1] or
868
executable[0] != executable[1]):
872
return (file_id, (source_path, target_path), changed_content,
873
versioned, parent, name, kind, executable), changes
875
def compare(self, want_unchanged=False, specific_files=None,
876
extra_trees=None, require_versioned=False, include_root=False,
877
want_unversioned=False):
878
"""Return the changes from source to target.
880
:return: A TreeDelta.
881
:param specific_files: An optional list of file paths to restrict the
882
comparison to. When mapping filenames to ids, all matches in all
883
trees (including optional extra_trees) are used, and all children of
884
matched directories are included.
885
:param want_unchanged: An optional boolean requesting the inclusion of
886
unchanged entries in the result.
887
:param extra_trees: An optional list of additional trees to use when
888
mapping the contents of specific_files (paths) to file_ids.
889
:param require_versioned: An optional boolean (defaults to False). When
890
supplied and True all the 'specific_files' must be versioned, or
891
a PathsNotVersionedError will be thrown.
892
:param want_unversioned: Scan for unversioned paths.
894
trees = (self.source,)
895
if extra_trees is not None:
896
trees = trees + tuple(extra_trees)
897
with self.lock_read():
898
# target is usually the newer tree:
899
specific_file_ids = self.target.paths2ids(specific_files, trees,
900
require_versioned=require_versioned)
901
if specific_files and not specific_file_ids:
902
# All files are unversioned, so just return an empty delta
903
# _compare_trees would think we want a complete delta
904
result = delta.TreeDelta()
905
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
906
result.unversioned = [(path, None,
907
self.target._comparison_data(fake_entry, path)[0]) for path in
910
return delta._compare_trees(self.source, self.target, want_unchanged,
911
specific_files, include_root, extra_trees=extra_trees,
912
require_versioned=require_versioned,
913
want_unversioned=want_unversioned)
915
def iter_changes(self, include_unchanged=False,
916
specific_files=None, pb=None, extra_trees=[],
917
require_versioned=True, want_unversioned=False):
918
"""Generate an iterator of changes between trees.
921
(file_id, (path_in_source, path_in_target),
922
changed_content, versioned, parent, name, kind,
925
Changed_content is True if the file's content has changed. This
926
includes changes to its kind, and to a symlink's target.
928
versioned, parent, name, kind, executable are tuples of (from, to).
929
If a file is missing in a tree, its kind is None.
931
Iteration is done in parent-to-child order, relative to the target
934
There is no guarantee that all paths are in sorted order: the
935
requirement to expand the search due to renames may result in children
936
that should be found early being found late in the search, after
937
lexically later results have been returned.
938
:param require_versioned: Raise errors.PathsNotVersionedError if a
939
path in the specific_files list is not versioned in one of
940
source, target or extra_trees.
941
:param specific_files: An optional list of file paths to restrict the
942
comparison to. When mapping filenames to ids, all matches in all
943
trees (including optional extra_trees) are used, and all children
944
of matched directories are included. The parents in the target tree
945
of the specific files up to and including the root of the tree are
946
always evaluated for changes too.
947
:param want_unversioned: Should unversioned files be returned in the
948
output. An unversioned file is defined as one with (False, False)
949
for the versioned pair.
951
lookup_trees = [self.source]
953
lookup_trees.extend(extra_trees)
954
# The ids of items we need to examine to insure delta consistency.
955
precise_file_ids = set()
956
changed_file_ids = []
957
if specific_files == []:
958
specific_file_ids = []
960
specific_file_ids = self.target.paths2ids(specific_files,
961
lookup_trees, require_versioned=require_versioned)
962
if specific_files is not None:
963
# reparented or added entries must have their parents included
964
# so that valid deltas can be created. The seen_parents set
965
# tracks the parents that we need to have.
966
# The seen_dirs set tracks directory entries we've yielded.
967
# After outputting version object in to_entries we set difference
968
# the two seen sets and start checking parents.
972
all_unversioned = sorted([(p.split('/'), p) for p in
974
if specific_files is None or
975
osutils.is_inside_any(specific_files, p)])
976
all_unversioned = collections.deque(all_unversioned)
978
all_unversioned = collections.deque()
980
from_entries_by_dir = list(self.source.iter_entries_by_dir(
981
specific_file_ids=specific_file_ids))
982
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
983
to_entries_by_dir = list(self.target.iter_entries_by_dir(
984
specific_file_ids=specific_file_ids))
985
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
987
# the unversioned path lookup only occurs on real trees - where there
988
# can be extras. So the fake_entry is solely used to look up
989
# executable it values when execute is not supported.
990
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
991
for target_path, target_entry in to_entries_by_dir:
992
while (all_unversioned and
993
all_unversioned[0][0] < target_path.split('/')):
994
unversioned_path = all_unversioned.popleft()
995
target_kind, target_executable, target_stat = \
996
self.target._comparison_data(fake_entry, unversioned_path[1])
997
yield (None, (None, unversioned_path[1]), True, (False, False),
999
(None, unversioned_path[0][-1]),
1000
(None, target_kind),
1001
(None, target_executable))
1002
source_path, source_entry = from_data.get(target_entry.file_id,
1004
result, changes = self._changes_from_entries(source_entry,
1005
target_entry, source_path=source_path, target_path=target_path)
1006
to_paths[result[0]] = result[1][1]
1011
pb.update('comparing files', entry_count, num_entries)
1012
if changes or include_unchanged:
1013
if specific_file_ids is not None:
1014
new_parent_id = result[4][1]
1015
precise_file_ids.add(new_parent_id)
1016
changed_file_ids.append(result[0])
1018
# Ensure correct behaviour for reparented/added specific files.
1019
if specific_files is not None:
1020
# Record output dirs
1021
if result[6][1] == 'directory':
1022
seen_dirs.add(result[0])
1023
# Record parents of reparented/added entries.
1024
versioned = result[3]
1026
if not versioned[0] or parents[0] != parents[1]:
1027
seen_parents.add(parents[1])
1028
while all_unversioned:
1029
# yield any trailing unversioned paths
1030
unversioned_path = all_unversioned.popleft()
1031
to_kind, to_executable, to_stat = \
1032
self.target._comparison_data(fake_entry, unversioned_path[1])
1033
yield (None, (None, unversioned_path[1]), True, (False, False),
1035
(None, unversioned_path[0][-1]),
1037
(None, to_executable))
1038
# Yield all remaining source paths
1039
for path, from_entry in from_entries_by_dir:
1040
file_id = from_entry.file_id
1041
if file_id in to_paths:
1044
if not self.target.has_id(file_id):
1045
# common case - paths we have not emitted are not present in
1049
to_path = self.target.id2path(file_id)
1052
pb.update('comparing files', entry_count, num_entries)
1053
versioned = (True, False)
1054
parent = (from_entry.parent_id, None)
1055
name = (from_entry.name, None)
1056
from_kind, from_executable, stat_value = \
1057
self.source._comparison_data(from_entry, path)
1058
kind = (from_kind, None)
1059
executable = (from_executable, None)
1060
changed_content = from_kind is not None
1061
# the parent's path is necessarily known at this point.
1062
changed_file_ids.append(file_id)
1063
yield(file_id, (path, to_path), changed_content, versioned, parent,
1064
name, kind, executable)
1065
changed_file_ids = set(changed_file_ids)
1066
if specific_file_ids is not None:
1067
for result in self._handle_precise_ids(precise_file_ids,
1071
def _get_entry(self, tree, file_id):
1072
"""Get an inventory entry from a tree, with missing entries as None.
1074
If the tree raises NotImplementedError on accessing .inventory, then
1075
this is worked around using iter_entries_by_dir on just the file id
1078
:param tree: The tree to lookup the entry in.
1079
:param file_id: The file_id to lookup.
1082
inventory = tree.root_inventory
1083
except NotImplementedError:
1084
# No inventory available.
1086
iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
1087
return iterator.next()[1]
1088
except StopIteration:
1092
return inventory[file_id]
1093
except errors.NoSuchId:
1096
def _handle_precise_ids(self, precise_file_ids, changed_file_ids,
1097
discarded_changes=None):
1098
"""Fill out a partial iter_changes to be consistent.
1100
:param precise_file_ids: The file ids of parents that were seen during
1102
:param changed_file_ids: The file ids of already emitted items.
1103
:param discarded_changes: An optional dict of precalculated
1104
iter_changes items which the partial iter_changes had not output
1106
:return: A generator of iter_changes items to output.
1108
# process parents of things that had changed under the users
1109
# requested paths to prevent incorrect paths or parent ids which
1110
# aren't in the tree.
1111
while precise_file_ids:
1112
precise_file_ids.discard(None)
1113
# Don't emit file_ids twice
1114
precise_file_ids.difference_update(changed_file_ids)
1115
if not precise_file_ids:
1117
# If the there was something at a given output path in source, we
1118
# have to include the entry from source in the delta, or we would
1119
# be putting this entry into a used path.
1121
for parent_id in precise_file_ids:
1123
paths.append(self.target.id2path(parent_id))
1124
except errors.NoSuchId:
1125
# This id has been dragged in from the source by delta
1126
# expansion and isn't present in target at all: we don't
1127
# need to check for path collisions on it.
1130
old_id = self.source.path2id(path)
1131
precise_file_ids.add(old_id)
1132
precise_file_ids.discard(None)
1133
current_ids = precise_file_ids
1134
precise_file_ids = set()
1135
# We have to emit all of precise_file_ids that have been altered.
1136
# We may have to output the children of some of those ids if any
1137
# directories have stopped being directories.
1138
for file_id in current_ids:
1140
if discarded_changes:
1141
result = discarded_changes.get(file_id)
1146
old_entry = self._get_entry(self.source, file_id)
1147
new_entry = self._get_entry(self.target, file_id)
1148
result, changes = self._changes_from_entries(
1149
old_entry, new_entry)
1152
# Get this parents parent to examine.
1153
new_parent_id = result[4][1]
1154
precise_file_ids.add(new_parent_id)
1156
if (result[6][0] == 'directory' and
1157
result[6][1] != 'directory'):
1158
# This stopped being a directory, the old children have
1160
if old_entry is None:
1161
# Reusing a discarded change.
1162
old_entry = self._get_entry(self.source, file_id)
1163
precise_file_ids.update(
1164
self.source.iter_children(file_id))
1165
changed_file_ids.add(result[0])
1168
def file_content_matches(
1169
self, source_file_id, target_file_id, source_path=None,
1170
target_path=None, source_stat=None, target_stat=None):
1171
"""Check if two files are the same in the source and target trees.
1173
This only checks that the contents of the files are the same,
1174
it does not touch anything else.
1176
:param source_file_id: File id of the file in the source tree
1177
:param target_file_id: File id of the file in the target tree
1178
:param source_path: Path of the file in the source tree
1179
:param target_path: Path of the file in the target tree
1180
:param source_stat: Optional stat value of the file in the source tree
1181
:param target_stat: Optional stat value of the file in the target tree
1182
:return: Boolean indicating whether the files have the same contents
1184
with self.lock_read():
1185
source_verifier_kind, source_verifier_data = (
1186
self.source.get_file_verifier(
1187
source_file_id, source_path, source_stat))
1188
target_verifier_kind, target_verifier_data = (
1189
self.target.get_file_verifier(
1190
target_file_id, target_path, target_stat))
1191
if source_verifier_kind == target_verifier_kind:
1192
return (source_verifier_data == target_verifier_data)
1193
# Fall back to SHA1 for now
1194
if source_verifier_kind != "SHA1":
1195
source_sha1 = self.source.get_file_sha1(
1196
source_file_id, source_path, source_stat)
1198
source_sha1 = source_verifier_data
1199
if target_verifier_kind != "SHA1":
1200
target_sha1 = self.target.get_file_sha1(
1201
target_file_id, target_path, target_stat)
1203
target_sha1 = target_verifier_data
1204
return (source_sha1 == target_sha1)
1206
InterTree.register_optimiser(InterTree)
1209
class MultiWalker(object):
1210
"""Walk multiple trees simultaneously, getting combined results."""
1212
# Note: This could be written to not assume you can do out-of-order
1213
# lookups. Instead any nodes that don't match in all trees could be
1214
# marked as 'deferred', and then returned in the final cleanup loop.
1215
# For now, I think it is "nicer" to return things as close to the
1216
# "master_tree" order as we can.
1218
def __init__(self, master_tree, other_trees):
1219
"""Create a new MultiWalker.
1221
All trees being walked must implement "iter_entries_by_dir()", such
1222
that they yield (path, object) tuples, where that object will have a
1223
'.file_id' member, that can be used to check equality.
1225
:param master_tree: All trees will be 'slaved' to the master_tree such
1226
that nodes in master_tree will be used as 'first-pass' sync points.
1227
Any nodes that aren't in master_tree will be merged in a second
1229
:param other_trees: A list of other trees to walk simultaneously.
1231
self._master_tree = master_tree
1232
self._other_trees = other_trees
1234
# Keep track of any nodes that were properly processed just out of
1235
# order, that way we don't return them at the end, we don't have to
1236
# track *all* processed file_ids, just the out-of-order ones
1237
self._out_of_order_processed = set()
1240
def _step_one(iterator):
1241
"""Step an iter_entries_by_dir iterator.
1243
:return: (has_more, path, ie)
1244
If has_more is False, path and ie will be None.
1247
path, ie = next(iterator)
1248
except StopIteration:
1249
return False, None, None
1251
return True, path, ie
1254
def _cmp_path_by_dirblock(path1, path2):
1255
"""Compare two paths based on what directory they are in.
1257
This generates a sort order, such that all children of a directory are
1258
sorted together, and grandchildren are in the same order as the
1259
children appear. But all grandchildren come after all children.
1261
:param path1: first path
1262
:param path2: the second path
1263
:return: negative number if ``path1`` comes first,
1264
0 if paths are equal
1265
and a positive number if ``path2`` sorts first
1267
# Shortcut this special case
1270
# This is stolen from _dirstate_helpers_py.py, only switching it to
1271
# Unicode objects. Consider using encode_utf8() and then using the
1272
# optimized versions, or maybe writing optimized unicode versions.
1273
if not isinstance(path1, unicode):
1274
raise TypeError("'path1' must be a unicode string, not %s: %r"
1275
% (type(path1), path1))
1276
if not isinstance(path2, unicode):
1277
raise TypeError("'path2' must be a unicode string, not %s: %r"
1278
% (type(path2), path2))
1279
return cmp(MultiWalker._path_to_key(path1),
1280
MultiWalker._path_to_key(path2))
1283
def _path_to_key(path):
1284
dirname, basename = osutils.split(path)
1285
return (dirname.split(u'/'), basename)
1287
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1288
"""Lookup an inventory entry by file_id.
1290
This is called when an entry is missing in the normal order.
1291
Generally this is because a file was either renamed, or it was
1292
deleted/added. If the entry was found in the inventory and not in
1293
extra_entries, it will be added to self._out_of_order_processed
1295
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1296
should be filled with entries that were found before they were
1297
used. If file_id is present, it will be removed from the
1299
:param other_tree: The Tree to search, in case we didn't find the entry
1301
:param file_id: The file_id to look for
1302
:return: (path, ie) if found or (None, None) if not present.
1304
if file_id in extra_entries:
1305
return extra_entries.pop(file_id)
1306
# TODO: Is id2path better as the first call, or is
1307
# inventory[file_id] better as a first check?
1309
cur_path = other_tree.id2path(file_id)
1310
except errors.NoSuchId:
1312
if cur_path is None:
1315
self._out_of_order_processed.add(file_id)
1316
cur_ie = other_tree.root_inventory[file_id]
1317
return (cur_path, cur_ie)
1320
"""Match up the values in the different trees."""
1321
for result in self._walk_master_tree():
1323
self._finish_others()
1324
for result in self._walk_others():
1327
def _walk_master_tree(self):
1328
"""First pass, walk all trees in lock-step.
1330
When we are done, all nodes in the master_tree will have been
1331
processed. _other_walkers, _other_entries, and _others_extra will be
1332
set on 'self' for future processing.
1334
# This iterator has the most "inlining" done, because it tends to touch
1335
# every file in the tree, while the others only hit nodes that don't
1337
master_iterator = self._master_tree.iter_entries_by_dir()
1339
other_walkers = [other.iter_entries_by_dir()
1340
for other in self._other_trees]
1341
other_entries = [self._step_one(walker) for walker in other_walkers]
1342
# Track extra nodes in the other trees
1343
others_extra = [{} for _ in range(len(self._other_trees))]
1345
master_has_more = True
1346
step_one = self._step_one
1347
lookup_by_file_id = self._lookup_by_file_id
1348
out_of_order_processed = self._out_of_order_processed
1350
while master_has_more:
1351
(master_has_more, path, master_ie) = step_one(master_iterator)
1352
if not master_has_more:
1355
file_id = master_ie.file_id
1357
other_values_append = other_values.append
1358
next_other_entries = []
1359
next_other_entries_append = next_other_entries.append
1360
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1361
if not other_has_more:
1362
other_values_append(lookup_by_file_id(
1363
others_extra[idx], self._other_trees[idx], file_id))
1364
next_other_entries_append((False, None, None))
1365
elif file_id == other_ie.file_id:
1366
# This is the critical code path, as most of the entries
1367
# should match between most trees.
1368
other_values_append((other_path, other_ie))
1369
next_other_entries_append(step_one(other_walkers[idx]))
1371
# This walker did not match, step it until it either
1372
# matches, or we know we are past the current walker.
1373
other_walker = other_walkers[idx]
1374
other_extra = others_extra[idx]
1375
while (other_has_more and
1376
self._cmp_path_by_dirblock(other_path, path) < 0):
1377
other_file_id = other_ie.file_id
1378
if other_file_id not in out_of_order_processed:
1379
other_extra[other_file_id] = (other_path, other_ie)
1380
other_has_more, other_path, other_ie = \
1381
step_one(other_walker)
1382
if other_has_more and other_ie.file_id == file_id:
1383
# We ended up walking to this point, match and step
1385
other_values_append((other_path, other_ie))
1386
other_has_more, other_path, other_ie = \
1387
step_one(other_walker)
1389
# This record isn't in the normal order, see if it
1391
other_values_append(lookup_by_file_id(
1392
other_extra, self._other_trees[idx], file_id))
1393
next_other_entries_append((other_has_more, other_path,
1395
other_entries = next_other_entries
1397
# We've matched all the walkers, yield this datapoint
1398
yield path, file_id, master_ie, other_values
1399
self._other_walkers = other_walkers
1400
self._other_entries = other_entries
1401
self._others_extra = others_extra
1403
def _finish_others(self):
1404
"""Finish walking the other iterators, so we get all entries."""
1405
for idx, info in enumerate(self._other_entries):
1406
other_extra = self._others_extra[idx]
1407
(other_has_more, other_path, other_ie) = info
1408
while other_has_more:
1409
other_file_id = other_ie.file_id
1410
if other_file_id not in self._out_of_order_processed:
1411
other_extra[other_file_id] = (other_path, other_ie)
1412
other_has_more, other_path, other_ie = \
1413
self._step_one(self._other_walkers[idx])
1414
del self._other_entries
1416
def _walk_others(self):
1417
"""Finish up by walking all the 'deferred' nodes."""
1418
# TODO: One alternative would be to grab all possible unprocessed
1419
# file_ids, and then sort by path, and then yield them. That
1420
# might ensure better ordering, in case a caller strictly
1421
# requires parents before children.
1422
for idx, other_extra in enumerate(self._others_extra):
1423
others = sorted(viewvalues(other_extra),
1424
key=lambda x: self._path_to_key(x[0]))
1425
for other_path, other_ie in others:
1426
file_id = other_ie.file_id
1427
# We don't need to check out_of_order_processed here, because
1428
# the lookup_by_file_id will be removing anything processed
1429
# from the extras cache
1430
other_extra.pop(file_id)
1431
other_values = [(None, None)] * idx
1432
other_values.append((other_path, other_ie))
1433
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1434
alt_idx = alt_idx + idx + 1
1435
alt_extra = self._others_extra[alt_idx]
1436
alt_tree = self._other_trees[alt_idx]
1437
other_values.append(self._lookup_by_file_id(
1438
alt_extra, alt_tree, file_id))
1439
yield other_path, file_id, None, other_values