1
# Copyright (C) 2005-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
# FIXME: This refactoring of the workingtree code doesn't seem to keep
18
# the WorkingTree's copy of the inventory in sync with the branch. The
19
# branch modifies its working inventory when it does a commit to make
20
# missing files permanently removed.
22
# TODO: Maybe also keep the full path of the entry, and the children?
23
# But those depend on its position within a particular inventory, and
24
# it would be nice not to need to hold the backpointer here.
26
# This should really be an id randomly assigned when the tree is
27
# created, but it's not for now.
30
from bzrlib.lazy_import import lazy_import
31
lazy_import(globals(), """
46
from bzrlib.errors import (
50
from bzrlib.trace import mutter
51
from bzrlib.static_tuple import StaticTuple
54
class InventoryEntry(object):
55
"""Description of a versioned file.
57
An InventoryEntry has the following fields, which are also
58
present in the XML inventory-entry element:
63
(within the parent directory)
66
file_id of the parent directory, or ROOT_ID
69
the revision_id in which this variation of this file was
73
Indicates that this file should be executable on systems
77
sha-1 of the text of the file
80
size in bytes of the text of the file
82
(reading a version 4 tree created a text_id field.)
87
>>> i.add(InventoryDirectory('123', 'src', ROOT_ID))
88
InventoryDirectory('123', 'src', parent_id='TREE_ROOT', revision=None)
89
>>> i.add(InventoryFile('2323', 'hello.c', parent_id='123'))
90
InventoryFile('2323', 'hello.c', parent_id='123', sha1=None, len=None, revision=None)
91
>>> shouldbe = {0: '', 1: 'src', 2: 'src/hello.c'}
92
>>> for ix, j in enumerate(i.iter_entries()):
93
... print (j[0] == shouldbe[ix], j[1])
95
(True, InventoryDirectory('TREE_ROOT', u'', parent_id=None, revision=None))
96
(True, InventoryDirectory('123', 'src', parent_id='TREE_ROOT', revision=None))
97
(True, InventoryFile('2323', 'hello.c', parent_id='123', sha1=None, len=None, revision=None))
98
>>> i.add(InventoryFile('2324', 'bye.c', '123'))
99
InventoryFile('2324', 'bye.c', parent_id='123', sha1=None, len=None, revision=None)
100
>>> i.add(InventoryDirectory('2325', 'wibble', '123'))
101
InventoryDirectory('2325', 'wibble', parent_id='123', revision=None)
102
>>> i.path2id('src/wibble')
106
>>> i.add(InventoryFile('2326', 'wibble.c', '2325'))
107
InventoryFile('2326', 'wibble.c', parent_id='2325', sha1=None, len=None, revision=None)
109
InventoryFile('2326', 'wibble.c', parent_id='2325', sha1=None, len=None, revision=None)
110
>>> for path, entry in i.iter_entries():
119
>>> i.id2path('2326')
120
'src/wibble/wibble.c'
123
# Constants returned by describe_change()
125
# TODO: These should probably move to some kind of FileChangeDescription
126
# class; that's like what's inside a TreeDelta but we want to be able to
127
# generate them just for one file at a time.
129
MODIFIED_AND_RENAMED = 'modified and renamed'
131
__slots__ = ['file_id', 'revision', 'parent_id', 'name']
133
# Attributes that all InventoryEntry instances are expected to have, but
134
# that don't vary for all kinds of entry. (e.g. symlink_target is only
135
# relevant to InventoryLink, so there's no reason to make every
136
# InventoryFile instance allocate space to hold a value for it.)
137
# Attributes that only vary for files: executable, text_sha1, text_size,
143
# Attributes that only vary for symlinks: symlink_target
144
symlink_target = None
145
# Attributes that only vary for tree-references: reference_revision
146
reference_revision = None
149
def detect_changes(self, old_entry):
150
"""Return a (text_modified, meta_modified) from this to old_entry.
152
_read_tree_state must have been called on self and old_entry prior to
153
calling detect_changes.
157
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
158
output_to, reverse=False):
159
"""Perform a diff between two entries of the same kind."""
161
def parent_candidates(self, previous_inventories):
162
"""Find possible per-file graph parents.
164
This is currently defined by:
165
- Select the last changed revision in the parent inventory.
166
- Do deal with a short lived bug in bzr 0.8's development two entries
167
that have the same last changed but different 'x' bit settings are
170
# revision:ie mapping for each ie found in previous_inventories.
172
# identify candidate head revision ids.
173
for inv in previous_inventories:
174
if self.file_id in inv:
175
ie = inv[self.file_id]
176
if ie.revision in candidates:
177
# same revision value in two different inventories:
178
# correct possible inconsistencies:
179
# * there was a bug in revision updates with 'x' bit
182
if candidates[ie.revision].executable != ie.executable:
183
candidates[ie.revision].executable = False
184
ie.executable = False
185
except AttributeError:
188
# add this revision as a candidate.
189
candidates[ie.revision] = ie
193
"""Return true if the object this entry represents has textual data.
195
Note that textual data includes binary content.
197
Also note that all entries get weave files created for them.
198
This attribute is primarily used when upgrading from old trees that
199
did not have the weave index for all inventory entries.
203
def __init__(self, file_id, name, parent_id):
204
"""Create an InventoryEntry
206
The filename must be a single component, relative to the
207
parent directory; it cannot be a whole path or relative name.
209
>>> e = InventoryFile('123', 'hello.c', ROOT_ID)
214
>>> e = InventoryFile('123', 'src/hello.c', ROOT_ID)
215
Traceback (most recent call last):
216
InvalidEntryName: Invalid entry name: src/hello.c
218
if '/' in name or '\\' in name:
219
raise errors.InvalidEntryName(name=name)
220
self.file_id = file_id
223
self.parent_id = parent_id
225
def kind_character(self):
226
"""Return a short kind indicator useful for appending to names."""
227
raise BzrError('unknown kind %r' % self.kind)
229
known_kinds = ('file', 'directory', 'symlink')
231
def sorted_children(self):
232
return sorted(self.children.items())
235
def versionable_kind(kind):
236
return (kind in ('file', 'directory', 'symlink', 'tree-reference'))
238
def check(self, checker, rev_id, inv):
239
"""Check this inventory entry is intact.
241
This is a template method, override _check for kind specific
244
:param checker: Check object providing context for the checks;
245
can be used to find out what parts of the repository have already
247
:param rev_id: Revision id from which this InventoryEntry was loaded.
248
Not necessarily the last-changed revision for this file.
249
:param inv: Inventory from which the entry was loaded.
251
if self.parent_id is not None:
252
if not inv.has_id(self.parent_id):
253
raise BzrCheckError('missing parent {%s} in inventory for revision {%s}'
254
% (self.parent_id, rev_id))
255
checker._add_entry_to_text_key_references(inv, self)
256
self._check(checker, rev_id)
258
def _check(self, checker, rev_id):
259
"""Check this inventory entry for kind specific errors."""
260
checker._report_items.append(
261
'unknown entry kind %r in revision {%s}' % (self.kind, rev_id))
264
"""Clone this inventory entry."""
265
raise NotImplementedError
268
def describe_change(old_entry, new_entry):
269
"""Describe the change between old_entry and this.
271
This smells of being an InterInventoryEntry situation, but as its
272
the first one, we're making it a static method for now.
274
An entry with a different parent, or different name is considered
275
to be renamed. Reparenting is an internal detail.
276
Note that renaming the parent does not trigger a rename for the
279
# TODO: Perhaps return an object rather than just a string
280
if old_entry is new_entry:
281
# also the case of both being None
283
elif old_entry is None:
285
elif new_entry is None:
287
if old_entry.kind != new_entry.kind:
289
text_modified, meta_modified = new_entry.detect_changes(old_entry)
290
if text_modified or meta_modified:
294
# TODO 20060511 (mbp, rbc) factor out 'detect_rename' here.
295
if old_entry.parent_id != new_entry.parent_id:
297
elif old_entry.name != new_entry.name:
301
if renamed and not modified:
302
return InventoryEntry.RENAMED
303
if modified and not renamed:
305
if modified and renamed:
306
return InventoryEntry.MODIFIED_AND_RENAMED
310
return ("%s(%r, %r, parent_id=%r, revision=%r)"
311
% (self.__class__.__name__,
317
def __eq__(self, other):
319
# For the case when objects are cached
321
if not isinstance(other, InventoryEntry):
322
return NotImplemented
324
return ((self.file_id == other.file_id)
325
and (self.name == other.name)
326
and (other.symlink_target == self.symlink_target)
327
and (self.text_sha1 == other.text_sha1)
328
and (self.text_size == other.text_size)
329
and (self.text_id == other.text_id)
330
and (self.parent_id == other.parent_id)
331
and (self.kind == other.kind)
332
and (self.revision == other.revision)
333
and (self.executable == other.executable)
334
and (self.reference_revision == other.reference_revision)
337
def __ne__(self, other):
338
return not (self == other)
341
raise ValueError('not hashable')
343
def _unchanged(self, previous_ie):
344
"""Has this entry changed relative to previous_ie.
346
This method should be overridden in child classes.
349
# different inv parent
350
if previous_ie.parent_id != self.parent_id:
353
elif previous_ie.name != self.name:
355
elif previous_ie.kind != self.kind:
359
def _read_tree_state(self, path, work_tree):
360
"""Populate fields in the inventory entry from the given tree.
362
Note that this should be modified to be a noop on virtual trees
363
as all entries created there are prepopulated.
365
# TODO: Rather than running this manually, we should check the
366
# working sha1 and other expensive properties when they're
367
# first requested, or preload them if they're already known
368
pass # nothing to do by default
370
def _forget_tree_state(self):
374
class InventoryDirectory(InventoryEntry):
375
"""A directory in an inventory."""
377
__slots__ = ['children']
381
def _check(self, checker, rev_id):
382
"""See InventoryEntry._check"""
383
# In non rich root repositories we do not expect a file graph for the
385
if self.name == '' and not checker.rich_roots:
387
# Directories are stored as an empty file, but the file should exist
388
# to provide a per-fileid log. The hash of every directory content is
389
# "da..." below (the sha1sum of '').
390
checker.add_pending_item(rev_id,
391
('texts', self.file_id, self.revision), 'text',
392
'da39a3ee5e6b4b0d3255bfef95601890afd80709')
395
other = InventoryDirectory(self.file_id, self.name, self.parent_id)
396
other.revision = self.revision
397
# note that children are *not* copied; they're pulled across when
401
def __init__(self, file_id, name, parent_id):
402
super(InventoryDirectory, self).__init__(file_id, name, parent_id)
405
def kind_character(self):
406
"""See InventoryEntry.kind_character."""
410
class InventoryFile(InventoryEntry):
411
"""A file in an inventory."""
413
__slots__ = ['text_sha1', 'text_size', 'text_id', 'executable']
417
def __init__(self, file_id, name, parent_id):
418
super(InventoryFile, self).__init__(file_id, name, parent_id)
419
self.text_sha1 = None
420
self.text_size = None
422
self.executable = False
424
def _check(self, checker, tree_revision_id):
425
"""See InventoryEntry._check"""
426
# TODO: check size too.
427
checker.add_pending_item(tree_revision_id,
428
('texts', self.file_id, self.revision), 'text',
430
if self.text_size is None:
431
checker._report_items.append(
432
'fileid {%s} in {%s} has None for text_size' % (self.file_id,
436
other = InventoryFile(self.file_id, self.name, self.parent_id)
437
other.executable = self.executable
438
other.text_id = self.text_id
439
other.text_sha1 = self.text_sha1
440
other.text_size = self.text_size
441
other.revision = self.revision
444
def detect_changes(self, old_entry):
445
"""See InventoryEntry.detect_changes."""
446
text_modified = (self.text_sha1 != old_entry.text_sha1)
447
meta_modified = (self.executable != old_entry.executable)
448
return text_modified, meta_modified
450
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
451
output_to, reverse=False):
452
"""See InventoryEntry._diff."""
453
from bzrlib.diff import DiffText
454
from_file_id = self.file_id
456
to_file_id = to_entry.file_id
460
to_file_id, from_file_id = from_file_id, to_file_id
461
tree, to_tree = to_tree, tree
462
from_label, to_label = to_label, from_label
463
differ = DiffText(tree, to_tree, output_to, 'utf-8', '', '',
465
return differ.diff_text(from_file_id, to_file_id, from_label, to_label)
468
"""See InventoryEntry.has_text."""
471
def kind_character(self):
472
"""See InventoryEntry.kind_character."""
475
def _read_tree_state(self, path, work_tree):
476
"""See InventoryEntry._read_tree_state."""
477
self.text_sha1 = work_tree.get_file_sha1(self.file_id, path=path)
478
# FIXME: 20050930 probe for the text size when getting sha1
479
# in _read_tree_state
480
self.executable = work_tree.is_executable(self.file_id, path=path)
483
return ("%s(%r, %r, parent_id=%r, sha1=%r, len=%s, revision=%s)"
484
% (self.__class__.__name__,
492
def _forget_tree_state(self):
493
self.text_sha1 = None
495
def _unchanged(self, previous_ie):
496
"""See InventoryEntry._unchanged."""
497
compatible = super(InventoryFile, self)._unchanged(previous_ie)
498
if self.text_sha1 != previous_ie.text_sha1:
501
# FIXME: 20050930 probe for the text size when getting sha1
502
# in _read_tree_state
503
self.text_size = previous_ie.text_size
504
if self.executable != previous_ie.executable:
509
class InventoryLink(InventoryEntry):
510
"""A file in an inventory."""
512
__slots__ = ['symlink_target']
516
def __init__(self, file_id, name, parent_id):
517
super(InventoryLink, self).__init__(file_id, name, parent_id)
518
self.symlink_target = None
520
def _check(self, checker, tree_revision_id):
521
"""See InventoryEntry._check"""
522
if self.symlink_target is None:
523
checker._report_items.append(
524
'symlink {%s} has no target in revision {%s}'
525
% (self.file_id, tree_revision_id))
526
# Symlinks are stored as ''
527
checker.add_pending_item(tree_revision_id,
528
('texts', self.file_id, self.revision), 'text',
529
'da39a3ee5e6b4b0d3255bfef95601890afd80709')
532
other = InventoryLink(self.file_id, self.name, self.parent_id)
533
other.symlink_target = self.symlink_target
534
other.revision = self.revision
537
def detect_changes(self, old_entry):
538
"""See InventoryEntry.detect_changes."""
539
# FIXME: which _modified field should we use ? RBC 20051003
540
text_modified = (self.symlink_target != old_entry.symlink_target)
542
mutter(" symlink target changed")
543
meta_modified = False
544
return text_modified, meta_modified
546
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
547
output_to, reverse=False):
548
"""See InventoryEntry._diff."""
549
from bzrlib.diff import DiffSymlink
550
old_target = self.symlink_target
551
if to_entry is not None:
552
new_target = to_entry.symlink_target
561
new_target, old_target = old_target, new_target
562
differ = DiffSymlink(old_tree, new_tree, output_to)
563
return differ.diff_symlink(old_target, new_target)
565
def kind_character(self):
566
"""See InventoryEntry.kind_character."""
569
def _read_tree_state(self, path, work_tree):
570
"""See InventoryEntry._read_tree_state."""
571
self.symlink_target = work_tree.get_symlink_target(self.file_id)
573
def _forget_tree_state(self):
574
self.symlink_target = None
576
def _unchanged(self, previous_ie):
577
"""See InventoryEntry._unchanged."""
578
compatible = super(InventoryLink, self)._unchanged(previous_ie)
579
if self.symlink_target != previous_ie.symlink_target:
584
class TreeReference(InventoryEntry):
586
__slots__ = ['reference_revision']
588
kind = 'tree-reference'
590
def __init__(self, file_id, name, parent_id, revision=None,
591
reference_revision=None):
592
InventoryEntry.__init__(self, file_id, name, parent_id)
593
self.revision = revision
594
self.reference_revision = reference_revision
597
return TreeReference(self.file_id, self.name, self.parent_id,
598
self.revision, self.reference_revision)
600
def _read_tree_state(self, path, work_tree):
601
"""Populate fields in the inventory entry from the given tree.
603
self.reference_revision = work_tree.get_reference_revision(
606
def _forget_tree_state(self):
607
self.reference_revision = None
609
def _unchanged(self, previous_ie):
610
"""See InventoryEntry._unchanged."""
611
compatible = super(TreeReference, self)._unchanged(previous_ie)
612
if self.reference_revision != previous_ie.reference_revision:
617
class CommonInventory(object):
618
"""Basic inventory logic, defined in terms of primitives like has_id.
620
An inventory is the metadata about the contents of a tree.
622
This is broadly a map from file_id to entries such as directories, files,
623
symlinks and tree references. Each entry maintains its own metadata like
624
SHA1 and length for files, or children for a directory.
626
Entries can be looked up either by path or by file_id.
628
InventoryEntry objects must not be modified after they are
629
inserted, other than through the Inventory API.
632
def __contains__(self, file_id):
633
"""True if this entry contains a file with given id.
635
>>> inv = Inventory()
636
>>> inv.add(InventoryFile('123', 'foo.c', ROOT_ID))
637
InventoryFile('123', 'foo.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
643
Note that this method along with __iter__ are not encouraged for use as
644
they are less clear than specific query methods - they may be rmeoved
647
return self.has_id(file_id)
649
def has_filename(self, filename):
650
return bool(self.path2id(filename))
652
def id2path(self, file_id):
653
"""Return as a string the path to file_id.
656
>>> e = i.add(InventoryDirectory('src-id', 'src', ROOT_ID))
657
>>> e = i.add(InventoryFile('foo-id', 'foo.c', parent_id='src-id'))
658
>>> print i.id2path('foo-id')
661
:raises NoSuchId: If file_id is not present in the inventory.
663
# get all names, skipping root
664
return '/'.join(reversed(
665
[parent.name for parent in
666
self._iter_file_id_parents(file_id)][:-1]))
668
def iter_entries(self, from_dir=None, recursive=True):
669
"""Return (path, entry) pairs, in order by name.
671
:param from_dir: if None, start from the root,
672
otherwise start from this directory (either file-id or entry)
673
:param recursive: recurse into directories or not
676
if self.root is None:
680
elif isinstance(from_dir, basestring):
681
from_dir = self[from_dir]
683
# unrolling the recursive called changed the time from
684
# 440ms/663ms (inline/total) to 116ms/116ms
685
children = from_dir.children.items()
688
for name, ie in children:
691
children = collections.deque(children)
692
stack = [(u'', children)]
694
from_dir_relpath, children = stack[-1]
697
name, ie = children.popleft()
699
# we know that from_dir_relpath never ends in a slash
700
# and 'f' doesn't begin with one, we can do a string op, rather
701
# than the checks of pathjoin(), though this means that all paths
703
path = from_dir_relpath + '/' + name
707
if ie.kind != 'directory':
710
# But do this child first
711
new_children = ie.children.items()
713
new_children = collections.deque(new_children)
714
stack.append((path, new_children))
715
# Break out of inner loop, so that we start outer loop with child
718
# if we finished all children, pop it off the stack
721
def _preload_cache(self):
722
"""Populate any caches, we are about to access all items.
724
The default implementation does nothing, because CommonInventory doesn't
729
def iter_entries_by_dir(self, from_dir=None, specific_file_ids=None,
730
yield_parents=False):
731
"""Iterate over the entries in a directory first order.
733
This returns all entries for a directory before returning
734
the entries for children of a directory. This is not
735
lexicographically sorted order, and is a hybrid between
736
depth-first and breadth-first.
738
:param yield_parents: If True, yield the parents from the root leading
739
down to specific_file_ids that have been requested. This has no
740
impact if specific_file_ids is None.
741
:return: This yields (path, entry) pairs
743
if specific_file_ids and not isinstance(specific_file_ids, set):
744
specific_file_ids = set(specific_file_ids)
745
# TODO? Perhaps this should return the from_dir so that the root is
746
# yielded? or maybe an option?
747
if from_dir is None and specific_file_ids is None:
748
# They are iterating from the root, and have not specified any
749
# specific entries to look at. All current callers fully consume the
750
# iterator, so we can safely assume we are accessing all entries
751
self._preload_cache()
753
if self.root is None:
755
# Optimize a common case
756
if (not yield_parents and specific_file_ids is not None and
757
len(specific_file_ids) == 1):
758
file_id = list(specific_file_ids)[0]
760
yield self.id2path(file_id), self[file_id]
763
if (specific_file_ids is None or yield_parents or
764
self.root.file_id in specific_file_ids):
766
elif isinstance(from_dir, basestring):
767
from_dir = self[from_dir]
769
if specific_file_ids is not None:
770
# TODO: jam 20070302 This could really be done as a loop rather
771
# than a bunch of recursive calls.
774
def add_ancestors(file_id):
775
if file_id not in byid:
777
parent_id = byid[file_id].parent_id
778
if parent_id is None:
780
if parent_id not in parents:
781
parents.add(parent_id)
782
add_ancestors(parent_id)
783
for file_id in specific_file_ids:
784
add_ancestors(file_id)
788
stack = [(u'', from_dir)]
790
cur_relpath, cur_dir = stack.pop()
793
for child_name, child_ie in sorted(cur_dir.children.iteritems()):
795
child_relpath = cur_relpath + child_name
797
if (specific_file_ids is None or
798
child_ie.file_id in specific_file_ids or
799
(yield_parents and child_ie.file_id in parents)):
800
yield child_relpath, child_ie
802
if child_ie.kind == 'directory':
803
if parents is None or child_ie.file_id in parents:
804
child_dirs.append((child_relpath+'/', child_ie))
805
stack.extend(reversed(child_dirs))
807
def _make_delta(self, old):
808
"""Make an inventory delta from two inventories."""
811
adds = new_ids - old_ids
812
deletes = old_ids - new_ids
813
common = old_ids.intersection(new_ids)
815
for file_id in deletes:
816
delta.append((old.id2path(file_id), None, file_id, None))
818
delta.append((None, self.id2path(file_id), file_id, self[file_id]))
819
for file_id in common:
820
if old[file_id] != self[file_id]:
821
delta.append((old.id2path(file_id), self.id2path(file_id),
822
file_id, self[file_id]))
825
def _get_mutable_inventory(self):
826
"""Returns a mutable copy of the object.
828
Some inventories are immutable, yet working trees, for example, needs
829
to mutate exisiting inventories instead of creating a new one.
831
raise NotImplementedError(self._get_mutable_inventory)
833
def make_entry(self, kind, name, parent_id, file_id=None):
834
"""Simple thunk to bzrlib.inventory.make_entry."""
835
return make_entry(kind, name, parent_id, file_id)
838
"""Return list of (path, ie) for all entries except the root.
840
This may be faster than iter_entries.
843
def descend(dir_ie, dir_path):
844
kids = dir_ie.children.items()
846
for name, ie in kids:
847
child_path = osutils.pathjoin(dir_path, name)
848
accum.append((child_path, ie))
849
if ie.kind == 'directory':
850
descend(ie, child_path)
852
if self.root is not None:
853
descend(self.root, u'')
856
def directories(self):
857
"""Return (path, entry) pairs for all directories, including the root.
860
def descend(parent_ie, parent_path):
861
accum.append((parent_path, parent_ie))
863
kids = [(ie.name, ie) for ie in parent_ie.children.itervalues() if ie.kind == 'directory']
866
for name, child_ie in kids:
867
child_path = osutils.pathjoin(parent_path, name)
868
descend(child_ie, child_path)
869
descend(self.root, u'')
872
def path2id(self, relpath):
873
"""Walk down through directories to return entry of last component.
875
:param relpath: may be either a list of path components, or a single
876
string, in which case it is automatically split.
878
This returns the entry of the last component in the path,
879
which may be either a file or a directory.
881
Returns None IFF the path is not found.
883
if isinstance(relpath, basestring):
884
names = osutils.splitpath(relpath)
890
except errors.NoSuchId:
891
# root doesn't exist yet so nothing else can
897
children = getattr(parent, 'children', None)
906
return parent.file_id
908
def filter(self, specific_fileids):
909
"""Get an inventory view filtered against a set of file-ids.
911
Children of directories and parents are included.
913
The result may or may not reference the underlying inventory
914
so it should be treated as immutable.
916
interesting_parents = set()
917
for fileid in specific_fileids:
919
interesting_parents.update(self.get_idpath(fileid))
920
except errors.NoSuchId:
921
# This fileid is not in the inventory - that's ok
923
entries = self.iter_entries()
924
if self.root is None:
925
return Inventory(root_id=None)
926
other = Inventory(entries.next()[1].file_id)
927
other.root.revision = self.root.revision
928
other.revision_id = self.revision_id
929
directories_to_expand = set()
930
for path, entry in entries:
931
file_id = entry.file_id
932
if (file_id in specific_fileids
933
or entry.parent_id in directories_to_expand):
934
if entry.kind == 'directory':
935
directories_to_expand.add(file_id)
936
elif file_id not in interesting_parents:
938
other.add(entry.copy())
941
def get_idpath(self, file_id):
942
"""Return a list of file_ids for the path to an entry.
944
The list contains one element for each directory followed by
945
the id of the file itself. So the length of the returned list
946
is equal to the depth of the file in the tree, counting the
947
root directory as depth 1.
950
for parent in self._iter_file_id_parents(file_id):
951
p.insert(0, parent.file_id)
955
class Inventory(CommonInventory):
956
"""Mutable dict based in-memory inventory.
958
We never store the full path to a file, because renaming a directory
959
implicitly moves all of its contents. This class internally maintains a
960
lookup tree that allows the children under a directory to be
963
>>> inv = Inventory()
964
>>> inv.add(InventoryFile('123-123', 'hello.c', ROOT_ID))
965
InventoryFile('123-123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
966
>>> inv['123-123'].name
969
Id's may be looked up from paths:
971
>>> inv.path2id('hello.c')
976
There are iterators over the contents:
978
>>> [entry[0] for entry in inv.iter_entries()]
982
def __init__(self, root_id=ROOT_ID, revision_id=None):
983
"""Create or read an inventory.
985
If a working directory is specified, the inventory is read
986
from there. If the file is specified, read from that. If not,
987
the inventory is created empty.
989
The inventory is created with a default root directory, with
992
if root_id is not None:
993
self._set_root(InventoryDirectory(root_id, u'', None))
997
self.revision_id = revision_id
1000
# More than one page of ouput is not useful anymore to debug
1003
contents = repr(self._byid)
1004
if len(contents) > max_len:
1005
contents = contents[:(max_len-len(closing))] + closing
1006
return "<Inventory object at %x, contents=%r>" % (id(self), contents)
1008
def apply_delta(self, delta):
1009
"""Apply a delta to this inventory.
1011
See the inventory developers documentation for the theory behind
1014
If delta application fails the inventory is left in an indeterminate
1015
state and must not be used.
1017
:param delta: A list of changes to apply. After all the changes are
1018
applied the final inventory must be internally consistent, but it
1019
is ok to supply changes which, if only half-applied would have an
1020
invalid result - such as supplying two changes which rename two
1021
files, 'A' and 'B' with each other : [('A', 'B', 'A-id', a_entry),
1022
('B', 'A', 'B-id', b_entry)].
1024
Each change is a tuple, of the form (old_path, new_path, file_id,
1027
When new_path is None, the change indicates the removal of an entry
1028
from the inventory and new_entry will be ignored (using None is
1029
appropriate). If new_path is not None, then new_entry must be an
1030
InventoryEntry instance, which will be incorporated into the
1031
inventory (and replace any existing entry with the same file id).
1033
When old_path is None, the change indicates the addition of
1034
a new entry to the inventory.
1036
When neither new_path nor old_path are None, the change is a
1037
modification to an entry, such as a rename, reparent, kind change
1040
The children attribute of new_entry is ignored. This is because
1041
this method preserves children automatically across alterations to
1042
the parent of the children, and cases where the parent id of a
1043
child is changing require the child to be passed in as a separate
1044
change regardless. E.g. in the recursive deletion of a directory -
1045
the directory's children must be included in the delta, or the
1046
final inventory will be invalid.
1048
Note that a file_id must only appear once within a given delta.
1049
An AssertionError is raised otherwise.
1051
# Check that the delta is legal. It would be nice if this could be
1052
# done within the loops below but it's safer to validate the delta
1053
# before starting to mutate the inventory, as there isn't a rollback
1055
list(_check_delta_unique_ids(_check_delta_unique_new_paths(
1056
_check_delta_unique_old_paths(_check_delta_ids_match_entry(
1057
_check_delta_ids_are_valid(
1058
_check_delta_new_path_entry_both_or_None(
1062
# Remove all affected items which were in the original inventory,
1063
# starting with the longest paths, thus ensuring parents are examined
1064
# after their children, which means that everything we examine has no
1065
# modified children remaining by the time we examine it.
1066
for old_path, file_id in sorted(((op, f) for op, np, f, e in delta
1067
if op is not None), reverse=True):
1068
# Preserve unaltered children of file_id for later reinsertion.
1069
file_id_children = getattr(self[file_id], 'children', {})
1070
if len(file_id_children):
1071
children[file_id] = file_id_children
1072
if self.id2path(file_id) != old_path:
1073
raise errors.InconsistentDelta(old_path, file_id,
1074
"Entry was at wrong other path %r." % self.id2path(file_id))
1075
# Remove file_id and the unaltered children. If file_id is not
1076
# being deleted it will be reinserted back later.
1077
self.remove_recursive_id(file_id)
1078
# Insert all affected which should be in the new inventory, reattaching
1079
# their children if they had any. This is done from shortest path to
1080
# longest, ensuring that items which were modified and whose parents in
1081
# the resulting inventory were also modified, are inserted after their
1083
for new_path, f, new_entry in sorted((np, f, e) for op, np, f, e in
1084
delta if np is not None):
1085
if new_entry.kind == 'directory':
1086
# Pop the child which to allow detection of children whose
1087
# parents were deleted and which were not reattached to a new
1089
replacement = InventoryDirectory(new_entry.file_id,
1090
new_entry.name, new_entry.parent_id)
1091
replacement.revision = new_entry.revision
1092
replacement.children = children.pop(replacement.file_id, {})
1093
new_entry = replacement
1096
except errors.DuplicateFileId:
1097
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1098
"New id is already present in target.")
1099
except AttributeError:
1100
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1101
"Parent is not a directory.")
1102
if self.id2path(new_entry.file_id) != new_path:
1103
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1104
"New path is not consistent with parent path.")
1106
# Get the parent id that was deleted
1107
parent_id, children = children.popitem()
1108
raise errors.InconsistentDelta("<deleted>", parent_id,
1109
"The file id was deleted but its children were not deleted.")
1111
def create_by_apply_delta(self, inventory_delta, new_revision_id,
1112
propagate_caches=False):
1113
"""See CHKInventory.create_by_apply_delta()"""
1114
new_inv = self.copy()
1115
new_inv.apply_delta(inventory_delta)
1116
new_inv.revision_id = new_revision_id
1119
def _set_root(self, ie):
1121
self._byid = {self.root.file_id: self.root}
1124
# TODO: jam 20051218 Should copy also copy the revision_id?
1125
entries = self.iter_entries()
1126
if self.root is None:
1127
return Inventory(root_id=None)
1128
other = Inventory(entries.next()[1].file_id)
1129
other.root.revision = self.root.revision
1130
# copy recursively so we know directories will be added before
1131
# their children. There are more efficient ways than this...
1132
for path, entry in entries:
1133
other.add(entry.copy())
1136
def _get_mutable_inventory(self):
1137
"""See CommonInventory._get_mutable_inventory."""
1138
return copy.deepcopy(self)
1141
"""Iterate over all file-ids."""
1142
return iter(self._byid)
1144
def iter_just_entries(self):
1145
"""Iterate over all entries.
1147
Unlike iter_entries(), just the entries are returned (not (path, ie))
1148
and the order of entries is undefined.
1150
XXX: We may not want to merge this into bzr.dev.
1152
if self.root is None:
1154
for _, ie in self._byid.iteritems():
1158
"""Returns number of entries."""
1159
return len(self._byid)
1161
def __getitem__(self, file_id):
1162
"""Return the entry for given file_id.
1164
>>> inv = Inventory()
1165
>>> inv.add(InventoryFile('123123', 'hello.c', ROOT_ID))
1166
InventoryFile('123123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
1167
>>> inv['123123'].name
1171
return self._byid[file_id]
1173
# really we're passing an inventory, not a tree...
1174
raise errors.NoSuchId(self, file_id)
1176
def get_file_kind(self, file_id):
1177
return self._byid[file_id].kind
1179
def get_child(self, parent_id, filename):
1180
return self[parent_id].children.get(filename)
1182
def _add_child(self, entry):
1183
"""Add an entry to the inventory, without adding it to its parent"""
1184
if entry.file_id in self._byid:
1185
raise BzrError("inventory already contains entry with id {%s}" %
1187
self._byid[entry.file_id] = entry
1188
for child in getattr(entry, 'children', {}).itervalues():
1189
self._add_child(child)
1192
def add(self, entry):
1193
"""Add entry to inventory.
1197
if entry.file_id in self._byid:
1198
raise errors.DuplicateFileId(entry.file_id,
1199
self._byid[entry.file_id])
1200
if entry.parent_id is None:
1204
parent = self._byid[entry.parent_id]
1206
raise errors.InconsistentDelta("<unknown>", entry.parent_id,
1207
"Parent not in inventory.")
1208
if entry.name in parent.children:
1209
raise errors.InconsistentDelta(
1210
self.id2path(parent.children[entry.name].file_id),
1212
"Path already versioned")
1213
parent.children[entry.name] = entry
1214
return self._add_child(entry)
1216
def add_path(self, relpath, kind, file_id=None, parent_id=None):
1217
"""Add entry from a path.
1219
The immediate parent must already be versioned.
1221
Returns the new entry object."""
1223
parts = osutils.splitpath(relpath)
1227
file_id = generate_ids.gen_root_id()
1228
self.root = InventoryDirectory(file_id, '', None)
1229
self._byid = {self.root.file_id: self.root}
1232
parent_path = parts[:-1]
1233
parent_id = self.path2id(parent_path)
1234
if parent_id is None:
1235
raise errors.NotVersionedError(path=parent_path)
1236
ie = make_entry(kind, parts[-1], parent_id, file_id)
1239
def __delitem__(self, file_id):
1240
"""Remove entry by id.
1242
>>> inv = Inventory()
1243
>>> inv.add(InventoryFile('123', 'foo.c', ROOT_ID))
1244
InventoryFile('123', 'foo.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
1252
del self._byid[file_id]
1253
if ie.parent_id is not None:
1254
del self[ie.parent_id].children[ie.name]
1256
def __eq__(self, other):
1257
"""Compare two sets by comparing their contents.
1259
>>> i1 = Inventory()
1260
>>> i2 = Inventory()
1263
>>> i1.add(InventoryFile('123', 'foo', ROOT_ID))
1264
InventoryFile('123', 'foo', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
1267
>>> i2.add(InventoryFile('123', 'foo', ROOT_ID))
1268
InventoryFile('123', 'foo', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
1272
if not isinstance(other, Inventory):
1273
return NotImplemented
1275
return self._byid == other._byid
1277
def __ne__(self, other):
1278
return not self.__eq__(other)
1281
raise ValueError('not hashable')
1283
def _iter_file_id_parents(self, file_id):
1284
"""Yield the parents of file_id up to the root."""
1285
while file_id is not None:
1287
ie = self._byid[file_id]
1289
raise errors.NoSuchId(tree=None, file_id=file_id)
1291
file_id = ie.parent_id
1293
def has_id(self, file_id):
1294
return (file_id in self._byid)
1296
def _make_delta(self, old):
1297
"""Make an inventory delta from two inventories."""
1298
old_getter = getattr(old, '_byid', old)
1299
new_getter = self._byid
1300
old_ids = set(old_getter)
1301
new_ids = set(new_getter)
1302
adds = new_ids - old_ids
1303
deletes = old_ids - new_ids
1304
if not adds and not deletes:
1307
common = old_ids.intersection(new_ids)
1309
for file_id in deletes:
1310
delta.append((old.id2path(file_id), None, file_id, None))
1311
for file_id in adds:
1312
delta.append((None, self.id2path(file_id), file_id, self[file_id]))
1313
for file_id in common:
1314
new_ie = new_getter[file_id]
1315
old_ie = old_getter[file_id]
1316
# If xml_serializer returns the cached InventoryEntries (rather
1317
# than always doing .copy()), inlining the 'is' check saves 2.7M
1318
# calls to __eq__. Under lsprof this saves 20s => 6s.
1319
# It is a minor improvement without lsprof.
1320
if old_ie is new_ie or old_ie == new_ie:
1323
delta.append((old.id2path(file_id), self.id2path(file_id),
1327
def remove_recursive_id(self, file_id):
1328
"""Remove file_id, and children, from the inventory.
1330
:param file_id: A file_id to remove.
1332
to_find_delete = [self._byid[file_id]]
1334
while to_find_delete:
1335
ie = to_find_delete.pop()
1336
to_delete.append(ie.file_id)
1337
if ie.kind == 'directory':
1338
to_find_delete.extend(ie.children.values())
1339
for file_id in reversed(to_delete):
1341
del self._byid[file_id]
1342
if ie.parent_id is not None:
1343
del self[ie.parent_id].children[ie.name]
1347
def rename(self, file_id, new_parent_id, new_name):
1348
"""Move a file within the inventory.
1350
This can change either the name, or the parent, or both.
1352
This does not move the working file.
1354
new_name = ensure_normalized_name(new_name)
1355
if not is_valid_name(new_name):
1356
raise BzrError("not an acceptable filename: %r" % new_name)
1358
new_parent = self._byid[new_parent_id]
1359
if new_name in new_parent.children:
1360
raise BzrError("%r already exists in %r" % (new_name, self.id2path(new_parent_id)))
1362
new_parent_idpath = self.get_idpath(new_parent_id)
1363
if file_id in new_parent_idpath:
1364
raise BzrError("cannot move directory %r into a subdirectory of itself, %r"
1365
% (self.id2path(file_id), self.id2path(new_parent_id)))
1367
file_ie = self._byid[file_id]
1368
old_parent = self._byid[file_ie.parent_id]
1370
# TODO: Don't leave things messed up if this fails
1372
del old_parent.children[file_ie.name]
1373
new_parent.children[new_name] = file_ie
1375
file_ie.name = new_name
1376
file_ie.parent_id = new_parent_id
1378
def is_root(self, file_id):
1379
return self.root is not None and file_id == self.root.file_id
1382
class CHKInventory(CommonInventory):
1383
"""An inventory persisted in a CHK store.
1385
By design, a CHKInventory is immutable so many of the methods
1386
supported by Inventory - add, rename, apply_delta, etc - are *not*
1387
supported. To create a new CHKInventory, use create_by_apply_delta()
1388
or from_inventory(), say.
1390
Internally, a CHKInventory has one or two CHKMaps:
1392
* id_to_entry - a map from (file_id,) => InventoryEntry as bytes
1393
* parent_id_basename_to_file_id - a map from (parent_id, basename_utf8)
1396
The second map is optional and not present in early CHkRepository's.
1398
No caching is performed: every method call or item access will perform
1399
requests to the storage layer. As such, keep references to objects you
1403
def __init__(self, search_key_name):
1404
CommonInventory.__init__(self)
1405
self._fileid_to_entry_cache = {}
1406
self._fully_cached = False
1407
self._path_to_fileid_cache = {}
1408
self._search_key_name = search_key_name
1411
def __eq__(self, other):
1412
"""Compare two sets by comparing their contents."""
1413
if not isinstance(other, CHKInventory):
1414
return NotImplemented
1416
this_key = self.id_to_entry.key()
1417
other_key = other.id_to_entry.key()
1418
this_pid_key = self.parent_id_basename_to_file_id.key()
1419
other_pid_key = other.parent_id_basename_to_file_id.key()
1420
if None in (this_key, this_pid_key, other_key, other_pid_key):
1422
return this_key == other_key and this_pid_key == other_pid_key
1424
def _entry_to_bytes(self, entry):
1425
"""Serialise entry as a single bytestring.
1427
:param Entry: An inventory entry.
1428
:return: A bytestring for the entry.
1431
ENTRY ::= FILE | DIR | SYMLINK | TREE
1432
FILE ::= "file: " COMMON SEP SHA SEP SIZE SEP EXECUTABLE
1433
DIR ::= "dir: " COMMON
1434
SYMLINK ::= "symlink: " COMMON SEP TARGET_UTF8
1435
TREE ::= "tree: " COMMON REFERENCE_REVISION
1436
COMMON ::= FILE_ID SEP PARENT_ID SEP NAME_UTF8 SEP REVISION
1439
if entry.parent_id is not None:
1440
parent_str = entry.parent_id
1443
name_str = entry.name.encode("utf8")
1444
if entry.kind == 'file':
1445
if entry.executable:
1449
return "file: %s\n%s\n%s\n%s\n%s\n%d\n%s" % (
1450
entry.file_id, parent_str, name_str, entry.revision,
1451
entry.text_sha1, entry.text_size, exec_str)
1452
elif entry.kind == 'directory':
1453
return "dir: %s\n%s\n%s\n%s" % (
1454
entry.file_id, parent_str, name_str, entry.revision)
1455
elif entry.kind == 'symlink':
1456
return "symlink: %s\n%s\n%s\n%s\n%s" % (
1457
entry.file_id, parent_str, name_str, entry.revision,
1458
entry.symlink_target.encode("utf8"))
1459
elif entry.kind == 'tree-reference':
1460
return "tree: %s\n%s\n%s\n%s\n%s" % (
1461
entry.file_id, parent_str, name_str, entry.revision,
1462
entry.reference_revision)
1464
raise ValueError("unknown kind %r" % entry.kind)
1466
def _expand_fileids_to_parents_and_children(self, file_ids):
1467
"""Give a more wholistic view starting with the given file_ids.
1469
For any file_id which maps to a directory, we will include all children
1470
of that directory. We will also include all directories which are
1471
parents of the given file_ids, but we will not include their children.
1478
fringle # fringle-id
1482
if given [foo-id] we will include
1483
TREE_ROOT as interesting parents
1485
foo-id, baz-id, frob-id, fringle-id
1489
# TODO: Pre-pass over the list of fileids to see if anything is already
1490
# deserialized in self._fileid_to_entry_cache
1492
directories_to_expand = set()
1493
children_of_parent_id = {}
1494
# It is okay if some of the fileids are missing
1495
for entry in self._getitems(file_ids):
1496
if entry.kind == 'directory':
1497
directories_to_expand.add(entry.file_id)
1498
interesting.add(entry.parent_id)
1499
children_of_parent_id.setdefault(entry.parent_id, []
1500
).append(entry.file_id)
1502
# Now, interesting has all of the direct parents, but not the
1503
# parents of those parents. It also may have some duplicates with
1505
remaining_parents = interesting.difference(file_ids)
1506
# When we hit the TREE_ROOT, we'll get an interesting parent of None,
1507
# but we don't actually want to recurse into that
1508
interesting.add(None) # this will auto-filter it in the loop
1509
remaining_parents.discard(None)
1510
while remaining_parents:
1511
next_parents = set()
1512
for entry in self._getitems(remaining_parents):
1513
next_parents.add(entry.parent_id)
1514
children_of_parent_id.setdefault(entry.parent_id, []
1515
).append(entry.file_id)
1516
# Remove any search tips we've already processed
1517
remaining_parents = next_parents.difference(interesting)
1518
interesting.update(remaining_parents)
1519
# We should probably also .difference(directories_to_expand)
1520
interesting.update(file_ids)
1521
interesting.discard(None)
1522
while directories_to_expand:
1523
# Expand directories by looking in the
1524
# parent_id_basename_to_file_id map
1525
keys = [StaticTuple(f,).intern() for f in directories_to_expand]
1526
directories_to_expand = set()
1527
items = self.parent_id_basename_to_file_id.iteritems(keys)
1528
next_file_ids = set([item[1] for item in items])
1529
next_file_ids = next_file_ids.difference(interesting)
1530
interesting.update(next_file_ids)
1531
for entry in self._getitems(next_file_ids):
1532
if entry.kind == 'directory':
1533
directories_to_expand.add(entry.file_id)
1534
children_of_parent_id.setdefault(entry.parent_id, []
1535
).append(entry.file_id)
1536
return interesting, children_of_parent_id
1538
def filter(self, specific_fileids):
1539
"""Get an inventory view filtered against a set of file-ids.
1541
Children of directories and parents are included.
1543
The result may or may not reference the underlying inventory
1544
so it should be treated as immutable.
1547
parent_to_children) = self._expand_fileids_to_parents_and_children(
1549
# There is some overlap here, but we assume that all interesting items
1550
# are in the _fileid_to_entry_cache because we had to read them to
1551
# determine if they were a dir we wanted to recurse, or just a file
1552
# This should give us all the entries we'll want to add, so start
1554
other = Inventory(self.root_id)
1555
other.root.revision = self.root.revision
1556
other.revision_id = self.revision_id
1557
if not interesting or not parent_to_children:
1558
# empty filter, or filtering entrys that don't exist
1559
# (if even 1 existed, then we would have populated
1560
# parent_to_children with at least the tree root.)
1562
cache = self._fileid_to_entry_cache
1563
remaining_children = collections.deque(parent_to_children[self.root_id])
1564
while remaining_children:
1565
file_id = remaining_children.popleft()
1567
if ie.kind == 'directory':
1568
ie = ie.copy() # We create a copy to depopulate the .children attribute
1569
# TODO: depending on the uses of 'other' we should probably alwyas
1570
# '.copy()' to prevent someone from mutating other and
1571
# invaliding our internal cache
1573
if file_id in parent_to_children:
1574
remaining_children.extend(parent_to_children[file_id])
1578
def _bytes_to_utf8name_key(bytes):
1579
"""Get the file_id, revision_id key out of bytes."""
1580
# We don't normally care about name, except for times when we want
1581
# to filter out empty names because of non rich-root...
1582
sections = bytes.split('\n')
1583
kind, file_id = sections[0].split(': ')
1584
return (sections[2], intern(file_id), intern(sections[3]))
1586
def _bytes_to_entry(self, bytes):
1587
"""Deserialise a serialised entry."""
1588
sections = bytes.split('\n')
1589
if sections[0].startswith("file: "):
1590
result = InventoryFile(sections[0][6:],
1591
sections[2].decode('utf8'),
1593
result.text_sha1 = sections[4]
1594
result.text_size = int(sections[5])
1595
result.executable = sections[6] == "Y"
1596
elif sections[0].startswith("dir: "):
1597
result = CHKInventoryDirectory(sections[0][5:],
1598
sections[2].decode('utf8'),
1600
elif sections[0].startswith("symlink: "):
1601
result = InventoryLink(sections[0][9:],
1602
sections[2].decode('utf8'),
1604
result.symlink_target = sections[4].decode('utf8')
1605
elif sections[0].startswith("tree: "):
1606
result = TreeReference(sections[0][6:],
1607
sections[2].decode('utf8'),
1609
result.reference_revision = sections[4]
1611
raise ValueError("Not a serialised entry %r" % bytes)
1612
result.file_id = intern(result.file_id)
1613
result.revision = intern(sections[3])
1614
if result.parent_id == '':
1615
result.parent_id = None
1616
self._fileid_to_entry_cache[result.file_id] = result
1619
def _get_mutable_inventory(self):
1620
"""See CommonInventory._get_mutable_inventory."""
1621
entries = self.iter_entries()
1622
inv = Inventory(None, self.revision_id)
1623
for path, inv_entry in entries:
1624
inv.add(inv_entry.copy())
1627
def create_by_apply_delta(self, inventory_delta, new_revision_id,
1628
propagate_caches=False):
1629
"""Create a new CHKInventory by applying inventory_delta to this one.
1631
See the inventory developers documentation for the theory behind
1634
:param inventory_delta: The inventory delta to apply. See
1635
Inventory.apply_delta for details.
1636
:param new_revision_id: The revision id of the resulting CHKInventory.
1637
:param propagate_caches: If True, the caches for this inventory are
1638
copied to and updated for the result.
1639
:return: The new CHKInventory.
1641
split = osutils.split
1642
result = CHKInventory(self._search_key_name)
1643
if propagate_caches:
1644
# Just propagate the path-to-fileid cache for now
1645
result._path_to_fileid_cache = dict(self._path_to_fileid_cache.iteritems())
1646
search_key_func = chk_map.search_key_registry.get(self._search_key_name)
1647
self.id_to_entry._ensure_root()
1648
maximum_size = self.id_to_entry._root_node.maximum_size
1649
result.revision_id = new_revision_id
1650
result.id_to_entry = chk_map.CHKMap(
1651
self.id_to_entry._store,
1652
self.id_to_entry.key(),
1653
search_key_func=search_key_func)
1654
result.id_to_entry._ensure_root()
1655
result.id_to_entry._root_node.set_maximum_size(maximum_size)
1656
# Change to apply to the parent_id_basename delta. The dict maps
1657
# (parent_id, basename) -> (old_key, new_value). We use a dict because
1658
# when a path has its id replaced (e.g. the root is changed, or someone
1659
# does bzr mv a b, bzr mv c a, we should output a single change to this
1660
# map rather than two.
1661
parent_id_basename_delta = {}
1662
if self.parent_id_basename_to_file_id is not None:
1663
result.parent_id_basename_to_file_id = chk_map.CHKMap(
1664
self.parent_id_basename_to_file_id._store,
1665
self.parent_id_basename_to_file_id.key(),
1666
search_key_func=search_key_func)
1667
result.parent_id_basename_to_file_id._ensure_root()
1668
self.parent_id_basename_to_file_id._ensure_root()
1669
result_p_id_root = result.parent_id_basename_to_file_id._root_node
1670
p_id_root = self.parent_id_basename_to_file_id._root_node
1671
result_p_id_root.set_maximum_size(p_id_root.maximum_size)
1672
result_p_id_root._key_width = p_id_root._key_width
1674
result.parent_id_basename_to_file_id = None
1675
result.root_id = self.root_id
1676
id_to_entry_delta = []
1677
# inventory_delta is only traversed once, so we just update the
1679
# Check for repeated file ids
1680
inventory_delta = _check_delta_unique_ids(inventory_delta)
1681
# Repeated old paths
1682
inventory_delta = _check_delta_unique_old_paths(inventory_delta)
1683
# Check for repeated new paths
1684
inventory_delta = _check_delta_unique_new_paths(inventory_delta)
1685
# Check for entries that don't match the fileid
1686
inventory_delta = _check_delta_ids_match_entry(inventory_delta)
1687
# Check for nonsense fileids
1688
inventory_delta = _check_delta_ids_are_valid(inventory_delta)
1689
# Check for new_path <-> entry consistency
1690
inventory_delta = _check_delta_new_path_entry_both_or_None(
1692
# All changed entries need to have their parents be directories and be
1693
# at the right path. This set contains (path, id) tuples.
1695
# When we delete an item, all the children of it must be either deleted
1696
# or altered in their own right. As we batch process the change via
1697
# CHKMap.apply_delta, we build a set of things to use to validate the
1701
for old_path, new_path, file_id, entry in inventory_delta:
1704
result.root_id = file_id
1705
if new_path is None:
1710
if propagate_caches:
1712
del result._path_to_fileid_cache[old_path]
1715
deletes.add(file_id)
1717
new_key = StaticTuple(file_id,)
1718
new_value = result._entry_to_bytes(entry)
1719
# Update caches. It's worth doing this whether
1720
# we're propagating the old caches or not.
1721
result._path_to_fileid_cache[new_path] = file_id
1722
parents.add((split(new_path)[0], entry.parent_id))
1723
if old_path is None:
1726
old_key = StaticTuple(file_id,)
1727
if self.id2path(file_id) != old_path:
1728
raise errors.InconsistentDelta(old_path, file_id,
1729
"Entry was at wrong other path %r." %
1730
self.id2path(file_id))
1731
altered.add(file_id)
1732
id_to_entry_delta.append(StaticTuple(old_key, new_key, new_value))
1733
if result.parent_id_basename_to_file_id is not None:
1734
# parent_id, basename changes
1735
if old_path is None:
1738
old_entry = self[file_id]
1739
old_key = self._parent_id_basename_key(old_entry)
1740
if new_path is None:
1744
new_key = self._parent_id_basename_key(entry)
1746
# If the two keys are the same, the value will be unchanged
1747
# as its always the file id for this entry.
1748
if old_key != new_key:
1749
# Transform a change into explicit delete/add preserving
1750
# a possible match on the key from a different file id.
1751
if old_key is not None:
1752
parent_id_basename_delta.setdefault(
1753
old_key, [None, None])[0] = old_key
1754
if new_key is not None:
1755
parent_id_basename_delta.setdefault(
1756
new_key, [None, None])[1] = new_value
1757
# validate that deletes are complete.
1758
for file_id in deletes:
1759
entry = self[file_id]
1760
if entry.kind != 'directory':
1762
# This loop could potentially be better by using the id_basename
1763
# map to just get the child file ids.
1764
for child in entry.children.values():
1765
if child.file_id not in altered:
1766
raise errors.InconsistentDelta(self.id2path(child.file_id),
1767
child.file_id, "Child not deleted or reparented when "
1769
result.id_to_entry.apply_delta(id_to_entry_delta)
1770
if parent_id_basename_delta:
1771
# Transform the parent_id_basename delta data into a linear delta
1772
# with only one record for a given key. Optimally this would allow
1773
# re-keying, but its simpler to just output that as a delete+add
1774
# to spend less time calculating the delta.
1776
for key, (old_key, value) in parent_id_basename_delta.iteritems():
1777
if value is not None:
1778
delta_list.append((old_key, key, value))
1780
delta_list.append((old_key, None, None))
1781
result.parent_id_basename_to_file_id.apply_delta(delta_list)
1782
parents.discard(('', None))
1783
for parent_path, parent in parents:
1785
if result[parent].kind != 'directory':
1786
raise errors.InconsistentDelta(result.id2path(parent), parent,
1787
'Not a directory, but given children')
1788
except errors.NoSuchId:
1789
raise errors.InconsistentDelta("<unknown>", parent,
1790
"Parent is not present in resulting inventory.")
1791
if result.path2id(parent_path) != parent:
1792
raise errors.InconsistentDelta(parent_path, parent,
1793
"Parent has wrong path %r." % result.path2id(parent_path))
1797
def deserialise(klass, chk_store, bytes, expected_revision_id):
1798
"""Deserialise a CHKInventory.
1800
:param chk_store: A CHK capable VersionedFiles instance.
1801
:param bytes: The serialised bytes.
1802
:param expected_revision_id: The revision ID we think this inventory is
1804
:return: A CHKInventory
1806
lines = bytes.split('\n')
1808
raise AssertionError('bytes to deserialize must end with an eol')
1810
if lines[0] != 'chkinventory:':
1811
raise ValueError("not a serialised CHKInventory: %r" % bytes)
1813
allowed_keys = frozenset(['root_id', 'revision_id', 'search_key_name',
1814
'parent_id_basename_to_file_id',
1816
for line in lines[1:]:
1817
key, value = line.split(': ', 1)
1818
if key not in allowed_keys:
1819
raise errors.BzrError('Unknown key in inventory: %r\n%r'
1822
raise errors.BzrError('Duplicate key in inventory: %r\n%r'
1825
revision_id = intern(info['revision_id'])
1826
root_id = intern(info['root_id'])
1827
search_key_name = intern(info.get('search_key_name', 'plain'))
1828
parent_id_basename_to_file_id = intern(info.get(
1829
'parent_id_basename_to_file_id', None))
1830
if not parent_id_basename_to_file_id.startswith('sha1:'):
1831
raise ValueError('parent_id_basename_to_file_id should be a sha1'
1832
' key not %r' % (parent_id_basename_to_file_id,))
1833
id_to_entry = info['id_to_entry']
1834
if not id_to_entry.startswith('sha1:'):
1835
raise ValueError('id_to_entry should be a sha1'
1836
' key not %r' % (id_to_entry,))
1838
result = CHKInventory(search_key_name)
1839
result.revision_id = revision_id
1840
result.root_id = root_id
1841
search_key_func = chk_map.search_key_registry.get(
1842
result._search_key_name)
1843
if parent_id_basename_to_file_id is not None:
1844
result.parent_id_basename_to_file_id = chk_map.CHKMap(
1845
chk_store, StaticTuple(parent_id_basename_to_file_id,),
1846
search_key_func=search_key_func)
1848
result.parent_id_basename_to_file_id = None
1850
result.id_to_entry = chk_map.CHKMap(chk_store,
1851
StaticTuple(id_to_entry,),
1852
search_key_func=search_key_func)
1853
if (result.revision_id,) != expected_revision_id:
1854
raise ValueError("Mismatched revision id and expected: %r, %r" %
1855
(result.revision_id, expected_revision_id))
1859
def from_inventory(klass, chk_store, inventory, maximum_size=0, search_key_name='plain'):
1860
"""Create a CHKInventory from an existing inventory.
1862
The content of inventory is copied into the chk_store, and a
1863
CHKInventory referencing that is returned.
1865
:param chk_store: A CHK capable VersionedFiles instance.
1866
:param inventory: The inventory to copy.
1867
:param maximum_size: The CHKMap node size limit.
1868
:param search_key_name: The identifier for the search key function
1870
result = klass(search_key_name)
1871
result.revision_id = inventory.revision_id
1872
result.root_id = inventory.root.file_id
1874
entry_to_bytes = result._entry_to_bytes
1875
parent_id_basename_key = result._parent_id_basename_key
1876
id_to_entry_dict = {}
1877
parent_id_basename_dict = {}
1878
for path, entry in inventory.iter_entries():
1879
key = StaticTuple(entry.file_id,).intern()
1880
id_to_entry_dict[key] = entry_to_bytes(entry)
1881
p_id_key = parent_id_basename_key(entry)
1882
parent_id_basename_dict[p_id_key] = entry.file_id
1884
result._populate_from_dicts(chk_store, id_to_entry_dict,
1885
parent_id_basename_dict, maximum_size=maximum_size)
1888
def _populate_from_dicts(self, chk_store, id_to_entry_dict,
1889
parent_id_basename_dict, maximum_size):
1890
search_key_func = chk_map.search_key_registry.get(self._search_key_name)
1891
root_key = chk_map.CHKMap.from_dict(chk_store, id_to_entry_dict,
1892
maximum_size=maximum_size, key_width=1,
1893
search_key_func=search_key_func)
1894
self.id_to_entry = chk_map.CHKMap(chk_store, root_key,
1896
root_key = chk_map.CHKMap.from_dict(chk_store,
1897
parent_id_basename_dict,
1898
maximum_size=maximum_size, key_width=2,
1899
search_key_func=search_key_func)
1900
self.parent_id_basename_to_file_id = chk_map.CHKMap(chk_store,
1901
root_key, search_key_func)
1903
def _parent_id_basename_key(self, entry):
1904
"""Create a key for a entry in a parent_id_basename_to_file_id index."""
1905
if entry.parent_id is not None:
1906
parent_id = entry.parent_id
1909
return StaticTuple(parent_id, entry.name.encode('utf8')).intern()
1911
def __getitem__(self, file_id):
1912
"""map a single file_id -> InventoryEntry."""
1914
raise errors.NoSuchId(self, file_id)
1915
result = self._fileid_to_entry_cache.get(file_id, None)
1916
if result is not None:
1919
return self._bytes_to_entry(
1920
self.id_to_entry.iteritems([StaticTuple(file_id,)]).next()[1])
1921
except StopIteration:
1922
# really we're passing an inventory, not a tree...
1923
raise errors.NoSuchId(self, file_id)
1925
def _getitems(self, file_ids):
1926
"""Similar to __getitem__, but lets you query for multiple.
1928
The returned order is undefined. And currently if an item doesn't
1929
exist, it isn't included in the output.
1933
for file_id in file_ids:
1934
entry = self._fileid_to_entry_cache.get(file_id, None)
1936
remaining.append(file_id)
1938
result.append(entry)
1939
file_keys = [StaticTuple(f,).intern() for f in remaining]
1940
for file_key, value in self.id_to_entry.iteritems(file_keys):
1941
entry = self._bytes_to_entry(value)
1942
result.append(entry)
1943
self._fileid_to_entry_cache[entry.file_id] = entry
1946
def has_id(self, file_id):
1947
# Perhaps have an explicit 'contains' method on CHKMap ?
1948
if self._fileid_to_entry_cache.get(file_id, None) is not None:
1951
self.id_to_entry.iteritems([StaticTuple(file_id,)]))) == 1
1953
def is_root(self, file_id):
1954
return file_id == self.root_id
1956
def _iter_file_id_parents(self, file_id):
1957
"""Yield the parents of file_id up to the root."""
1958
while file_id is not None:
1962
raise errors.NoSuchId(tree=self, file_id=file_id)
1964
file_id = ie.parent_id
1967
"""Iterate over all file-ids."""
1968
for key, _ in self.id_to_entry.iteritems():
1971
def iter_just_entries(self):
1972
"""Iterate over all entries.
1974
Unlike iter_entries(), just the entries are returned (not (path, ie))
1975
and the order of entries is undefined.
1977
XXX: We may not want to merge this into bzr.dev.
1979
for key, entry in self.id_to_entry.iteritems():
1981
ie = self._fileid_to_entry_cache.get(file_id, None)
1983
ie = self._bytes_to_entry(entry)
1984
self._fileid_to_entry_cache[file_id] = ie
1987
def _preload_cache(self):
1988
"""Make sure all file-ids are in _fileid_to_entry_cache"""
1989
if self._fully_cached:
1990
return # No need to do it again
1991
# The optimal sort order is to use iteritems() directly
1992
cache = self._fileid_to_entry_cache
1993
for key, entry in self.id_to_entry.iteritems():
1995
if file_id not in cache:
1996
ie = self._bytes_to_entry(entry)
2000
last_parent_id = last_parent_ie = None
2001
pid_items = self.parent_id_basename_to_file_id.iteritems()
2002
for key, child_file_id in pid_items:
2003
if key == ('', ''): # This is the root
2004
if child_file_id != self.root_id:
2005
raise ValueError('Data inconsistency detected.'
2006
' We expected data with key ("","") to match'
2007
' the root id, but %s != %s'
2008
% (child_file_id, self.root_id))
2010
parent_id, basename = key
2011
ie = cache[child_file_id]
2012
if parent_id == last_parent_id:
2013
parent_ie = last_parent_ie
2015
parent_ie = cache[parent_id]
2016
if parent_ie.kind != 'directory':
2017
raise ValueError('Data inconsistency detected.'
2018
' An entry in the parent_id_basename_to_file_id map'
2019
' has parent_id {%s} but the kind of that object'
2020
' is %r not "directory"' % (parent_id, parent_ie.kind))
2021
if parent_ie._children is None:
2022
parent_ie._children = {}
2023
basename = basename.decode('utf-8')
2024
if basename in parent_ie._children:
2025
existing_ie = parent_ie._children[basename]
2026
if existing_ie != ie:
2027
raise ValueError('Data inconsistency detected.'
2028
' Two entries with basename %r were found'
2029
' in the parent entry {%s}'
2030
% (basename, parent_id))
2031
if basename != ie.name:
2032
raise ValueError('Data inconsistency detected.'
2033
' In the parent_id_basename_to_file_id map, file_id'
2034
' {%s} is listed as having basename %r, but in the'
2035
' id_to_entry map it is %r'
2036
% (child_file_id, basename, ie.name))
2037
parent_ie._children[basename] = ie
2038
self._fully_cached = True
2040
def iter_changes(self, basis):
2041
"""Generate a Tree.iter_changes change list between this and basis.
2043
:param basis: Another CHKInventory.
2044
:return: An iterator over the changes between self and basis, as per
2045
tree.iter_changes().
2047
# We want: (file_id, (path_in_source, path_in_target),
2048
# changed_content, versioned, parent, name, kind,
2050
for key, basis_value, self_value in \
2051
self.id_to_entry.iter_changes(basis.id_to_entry):
2053
if basis_value is not None:
2054
basis_entry = basis._bytes_to_entry(basis_value)
2055
path_in_source = basis.id2path(file_id)
2056
basis_parent = basis_entry.parent_id
2057
basis_name = basis_entry.name
2058
basis_executable = basis_entry.executable
2060
path_in_source = None
2063
basis_executable = None
2064
if self_value is not None:
2065
self_entry = self._bytes_to_entry(self_value)
2066
path_in_target = self.id2path(file_id)
2067
self_parent = self_entry.parent_id
2068
self_name = self_entry.name
2069
self_executable = self_entry.executable
2071
path_in_target = None
2074
self_executable = None
2075
if basis_value is None:
2077
kind = (None, self_entry.kind)
2078
versioned = (False, True)
2079
elif self_value is None:
2081
kind = (basis_entry.kind, None)
2082
versioned = (True, False)
2084
kind = (basis_entry.kind, self_entry.kind)
2085
versioned = (True, True)
2086
changed_content = False
2087
if kind[0] != kind[1]:
2088
changed_content = True
2089
elif kind[0] == 'file':
2090
if (self_entry.text_size != basis_entry.text_size or
2091
self_entry.text_sha1 != basis_entry.text_sha1):
2092
changed_content = True
2093
elif kind[0] == 'symlink':
2094
if self_entry.symlink_target != basis_entry.symlink_target:
2095
changed_content = True
2096
elif kind[0] == 'tree-reference':
2097
if (self_entry.reference_revision !=
2098
basis_entry.reference_revision):
2099
changed_content = True
2100
parent = (basis_parent, self_parent)
2101
name = (basis_name, self_name)
2102
executable = (basis_executable, self_executable)
2103
if (not changed_content
2104
and parent[0] == parent[1]
2105
and name[0] == name[1]
2106
and executable[0] == executable[1]):
2107
# Could happen when only the revision changed for a directory
2110
yield (file_id, (path_in_source, path_in_target), changed_content,
2111
versioned, parent, name, kind, executable)
2114
"""Return the number of entries in the inventory."""
2115
return len(self.id_to_entry)
2117
def _make_delta(self, old):
2118
"""Make an inventory delta from two inventories."""
2119
if type(old) != CHKInventory:
2120
return CommonInventory._make_delta(self, old)
2122
for key, old_value, self_value in \
2123
self.id_to_entry.iter_changes(old.id_to_entry):
2125
if old_value is not None:
2126
old_path = old.id2path(file_id)
2129
if self_value is not None:
2130
entry = self._bytes_to_entry(self_value)
2131
self._fileid_to_entry_cache[file_id] = entry
2132
new_path = self.id2path(file_id)
2136
delta.append((old_path, new_path, file_id, entry))
2139
def path2id(self, relpath):
2140
"""See CommonInventory.path2id()."""
2141
# TODO: perhaps support negative hits?
2142
result = self._path_to_fileid_cache.get(relpath, None)
2143
if result is not None:
2145
if isinstance(relpath, basestring):
2146
names = osutils.splitpath(relpath)
2149
current_id = self.root_id
2150
if current_id is None:
2152
parent_id_index = self.parent_id_basename_to_file_id
2154
for basename in names:
2155
if cur_path is None:
2158
cur_path = cur_path + '/' + basename
2159
basename_utf8 = basename.encode('utf8')
2160
file_id = self._path_to_fileid_cache.get(cur_path, None)
2162
key_filter = [StaticTuple(current_id, basename_utf8)]
2163
items = parent_id_index.iteritems(key_filter)
2164
for (parent_id, name_utf8), file_id in items:
2165
if parent_id != current_id or name_utf8 != basename_utf8:
2166
raise errors.BzrError("corrupt inventory lookup! "
2167
"%r %r %r %r" % (parent_id, current_id, name_utf8,
2172
self._path_to_fileid_cache[cur_path] = file_id
2173
current_id = file_id
2177
"""Serialise the inventory to lines."""
2178
lines = ["chkinventory:\n"]
2179
if self._search_key_name != 'plain':
2180
# custom ordering grouping things that don't change together
2181
lines.append('search_key_name: %s\n' % (self._search_key_name,))
2182
lines.append("root_id: %s\n" % self.root_id)
2183
lines.append('parent_id_basename_to_file_id: %s\n' %
2184
(self.parent_id_basename_to_file_id.key()[0],))
2185
lines.append("revision_id: %s\n" % self.revision_id)
2186
lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
2188
lines.append("revision_id: %s\n" % self.revision_id)
2189
lines.append("root_id: %s\n" % self.root_id)
2190
if self.parent_id_basename_to_file_id is not None:
2191
lines.append('parent_id_basename_to_file_id: %s\n' %
2192
(self.parent_id_basename_to_file_id.key()[0],))
2193
lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
2198
"""Get the root entry."""
2199
return self[self.root_id]
2202
class CHKInventoryDirectory(InventoryDirectory):
2203
"""A directory in an inventory."""
2205
__slots__ = ['_children', '_chk_inventory']
2207
def __init__(self, file_id, name, parent_id, chk_inventory):
2208
# Don't call InventoryDirectory.__init__ - it isn't right for this
2210
InventoryEntry.__init__(self, file_id, name, parent_id)
2211
self._children = None
2212
self._chk_inventory = chk_inventory
2216
"""Access the list of children of this directory.
2218
With a parent_id_basename_to_file_id index, loads all the children,
2219
without loads the entire index. Without is bad. A more sophisticated
2220
proxy object might be nice, to allow partial loading of children as
2221
well when specific names are accessed. (So path traversal can be
2222
written in the obvious way but not examine siblings.).
2224
if self._children is not None:
2225
return self._children
2226
# No longer supported
2227
if self._chk_inventory.parent_id_basename_to_file_id is None:
2228
raise AssertionError("Inventories without"
2229
" parent_id_basename_to_file_id are no longer supported")
2231
# XXX: Todo - use proxy objects for the children rather than loading
2232
# all when the attribute is referenced.
2233
parent_id_index = self._chk_inventory.parent_id_basename_to_file_id
2235
for (parent_id, name_utf8), file_id in parent_id_index.iteritems(
2236
key_filter=[StaticTuple(self.file_id,)]):
2237
child_keys.add(StaticTuple(file_id,))
2239
for file_id_key in child_keys:
2240
entry = self._chk_inventory._fileid_to_entry_cache.get(
2241
file_id_key[0], None)
2242
if entry is not None:
2243
result[entry.name] = entry
2244
cached.add(file_id_key)
2245
child_keys.difference_update(cached)
2246
# populate; todo: do by name
2247
id_to_entry = self._chk_inventory.id_to_entry
2248
for file_id_key, bytes in id_to_entry.iteritems(child_keys):
2249
entry = self._chk_inventory._bytes_to_entry(bytes)
2250
result[entry.name] = entry
2251
self._chk_inventory._fileid_to_entry_cache[file_id_key[0]] = entry
2252
self._children = result
2256
'directory': InventoryDirectory,
2257
'file': InventoryFile,
2258
'symlink': InventoryLink,
2259
'tree-reference': TreeReference
2262
def make_entry(kind, name, parent_id, file_id=None):
2263
"""Create an inventory entry.
2265
:param kind: the type of inventory entry to create.
2266
:param name: the basename of the entry.
2267
:param parent_id: the parent_id of the entry.
2268
:param file_id: the file_id to use. if None, one will be created.
2271
file_id = generate_ids.gen_file_id(name)
2272
name = ensure_normalized_name(name)
2274
factory = entry_factory[kind]
2276
raise errors.BadFileKindError(name, kind)
2277
return factory(file_id, name, parent_id)
2280
def ensure_normalized_name(name):
2283
:raises InvalidNormalization: When name is not normalized, and cannot be
2284
accessed on this platform by the normalized path.
2285
:return: The NFC normalised version of name.
2287
#------- This has been copied to bzrlib.dirstate.DirState.add, please
2288
# keep them synchronised.
2289
# we dont import normalized_filename directly because we want to be
2290
# able to change the implementation at runtime for tests.
2291
norm_name, can_access = osutils.normalized_filename(name)
2292
if norm_name != name:
2296
# TODO: jam 20060701 This would probably be more useful
2297
# if the error was raised with the full path
2298
raise errors.InvalidNormalization(name)
2304
def is_valid_name(name):
2306
if _NAME_RE is None:
2307
_NAME_RE = re.compile(r'^[^/\\]+$')
2309
return bool(_NAME_RE.match(name))
2312
def _check_delta_unique_ids(delta):
2313
"""Decorate a delta and check that the file ids in it are unique.
2315
:return: A generator over delta.
2319
length = len(ids) + 1
2321
if len(ids) != length:
2322
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2327
def _check_delta_unique_new_paths(delta):
2328
"""Decorate a delta and check that the new paths in it are unique.
2330
:return: A generator over delta.
2334
length = len(paths) + 1
2336
if path is not None:
2338
if len(paths) != length:
2339
raise errors.InconsistentDelta(path, item[2], "repeated path")
2343
def _check_delta_unique_old_paths(delta):
2344
"""Decorate a delta and check that the old paths in it are unique.
2346
:return: A generator over delta.
2350
length = len(paths) + 1
2352
if path is not None:
2354
if len(paths) != length:
2355
raise errors.InconsistentDelta(path, item[2], "repeated path")
2359
def _check_delta_ids_are_valid(delta):
2360
"""Decorate a delta and check that the ids in it are valid.
2362
:return: A generator over delta.
2367
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2368
"entry with file_id None %r" % entry)
2369
if type(item[2]) != str:
2370
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2371
"entry with non bytes file_id %r" % entry)
2375
def _check_delta_ids_match_entry(delta):
2376
"""Decorate a delta and check that the ids in it match the entry.file_id.
2378
:return: A generator over delta.
2382
if entry is not None:
2383
if entry.file_id != item[2]:
2384
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2385
"mismatched id with %r" % entry)
2389
def _check_delta_new_path_entry_both_or_None(delta):
2390
"""Decorate a delta and check that the new_path and entry are paired.
2392
:return: A generator over delta.
2397
if new_path is None and entry is not None:
2398
raise errors.InconsistentDelta(item[0], item[1],
2399
"Entry with no new_path")
2400
if new_path is not None and entry is None:
2401
raise errors.InconsistentDelta(new_path, item[1],
2402
"new_path with no entry")