421
430
inventory_keys = source_vf.keys()
422
431
missing_inventories = set(self.revision_keys).difference(inventory_keys)
423
432
if missing_inventories:
424
missing_inventories = sorted(missing_inventories)
425
raise ValueError('We are missing inventories for revisions: %s'
426
% (missing_inventories,))
433
# Go back to the original repo, to see if these are really missing
434
# https://bugs.launchpad.net/bzr/+bug/437003
435
# If we are packing a subset of the repo, it is fine to just have
436
# the data in another Pack file, which is not included in this pack
438
inv_index = self._pack_collection.repo.inventories._index
439
pmap = inv_index.get_parent_map(missing_inventories)
440
really_missing = missing_inventories.difference(pmap)
442
missing_inventories = sorted(really_missing)
443
raise ValueError('We are missing inventories for revisions: %s'
444
% (missing_inventories,))
427
445
self._copy_stream(source_vf, target_vf, inventory_keys,
428
446
'inventories', self._get_filtered_inv_stream, 2)
448
def _get_chk_vfs_for_copy(self):
449
return self._build_vfs('chk', False, False)
430
451
def _copy_chk_texts(self):
431
source_vf, target_vf = self._build_vfs('chk', False, False)
452
source_vf, target_vf = self._get_chk_vfs_for_copy()
432
453
# TODO: This is technically spurious... if it is a performance issue,
434
455
total_keys = source_vf.keys()
580
601
return new_pack.data_inserted() and self._data_changed
604
class GCCHKCanonicalizingPacker(GCCHKPacker):
605
"""A packer that ensures inventories have canonical-form CHK maps.
607
Ideally this would be part of reconcile, but it's very slow and rarely
608
needed. (It repairs repositories affected by
609
https://bugs.launchpad.net/bzr/+bug/522637).
612
def __init__(self, *args, **kwargs):
613
super(GCCHKCanonicalizingPacker, self).__init__(*args, **kwargs)
614
self._data_changed = False
616
def _exhaust_stream(self, source_vf, keys, message, vf_to_stream, pb_offset):
617
"""Create and exhaust a stream, but don't insert it.
619
This is useful to get the side-effects of generating a stream.
621
self.pb.update('scanning %s' % (message,), pb_offset)
622
child_pb = ui.ui_factory.nested_progress_bar()
624
list(vf_to_stream(source_vf, keys, message, child_pb))
628
def _copy_inventory_texts(self):
629
source_vf, target_vf = self._build_vfs('inventory', True, True)
630
source_chk_vf, target_chk_vf = self._get_chk_vfs_for_copy()
631
inventory_keys = source_vf.keys()
632
# First, copy the existing CHKs on the assumption that most of them
633
# will be correct. This will save us from having to reinsert (and
634
# recompress) these records later at the cost of perhaps preserving a
636
# (Iterate but don't insert _get_filtered_inv_stream to populate the
637
# variables needed by GCCHKPacker._copy_chk_texts.)
638
self._exhaust_stream(source_vf, inventory_keys, 'inventories',
639
self._get_filtered_inv_stream, 2)
640
GCCHKPacker._copy_chk_texts(self)
641
# Now copy and fix the inventories, and any regenerated CHKs.
642
def chk_canonicalizing_inv_stream(source_vf, keys, message, pb=None):
643
return self._get_filtered_canonicalizing_inv_stream(
644
source_vf, keys, message, pb, source_chk_vf, target_chk_vf)
645
self._copy_stream(source_vf, target_vf, inventory_keys,
646
'inventories', chk_canonicalizing_inv_stream, 4)
648
def _copy_chk_texts(self):
649
# No-op; in this class this happens during _copy_inventory_texts.
652
def _get_filtered_canonicalizing_inv_stream(self, source_vf, keys, message,
653
pb=None, source_chk_vf=None, target_chk_vf=None):
654
"""Filter the texts of inventories, regenerating CHKs to make sure they
657
total_keys = len(keys)
658
target_chk_vf = versionedfile.NoDupeAddLinesDecorator(target_chk_vf)
659
def _filtered_inv_stream():
660
stream = source_vf.get_record_stream(keys, 'groupcompress', True)
661
search_key_name = None
662
for idx, record in enumerate(stream):
663
# Inventories should always be with revisions; assume success.
664
bytes = record.get_bytes_as('fulltext')
665
chk_inv = inventory.CHKInventory.deserialise(
666
source_chk_vf, bytes, record.key)
668
pb.update('inv', idx, total_keys)
669
chk_inv.id_to_entry._ensure_root()
670
if search_key_name is None:
671
# Find the name corresponding to the search_key_func
672
search_key_reg = chk_map.search_key_registry
673
for search_key_name, func in viewitems(search_key_reg):
674
if func == chk_inv.id_to_entry._search_key_func:
676
canonical_inv = inventory.CHKInventory.from_inventory(
677
target_chk_vf, chk_inv,
678
maximum_size=chk_inv.id_to_entry._root_node._maximum_size,
679
search_key_name=search_key_name)
680
if chk_inv.id_to_entry.key() != canonical_inv.id_to_entry.key():
682
'Non-canonical CHK map for id_to_entry of inv: %s '
683
'(root is %s, should be %s)' % (chk_inv.revision_id,
684
chk_inv.id_to_entry.key()[0],
685
canonical_inv.id_to_entry.key()[0]))
686
self._data_changed = True
687
p_id_map = chk_inv.parent_id_basename_to_file_id
688
p_id_map._ensure_root()
689
canon_p_id_map = canonical_inv.parent_id_basename_to_file_id
690
if p_id_map.key() != canon_p_id_map.key():
692
'Non-canonical CHK map for parent_id_to_basename of '
693
'inv: %s (root is %s, should be %s)'
694
% (chk_inv.revision_id, p_id_map.key()[0],
695
canon_p_id_map.key()[0]))
696
self._data_changed = True
697
yield versionedfile.ChunkedContentFactory(record.key,
698
record.parents, record.sha1,
699
canonical_inv.to_lines())
700
# We have finished processing all of the inventory records, we
701
# don't need these sets anymore
702
return _filtered_inv_stream()
704
def _use_pack(self, new_pack):
705
"""Override _use_pack to check for reconcile having changed content."""
706
return new_pack.data_inserted() and self._data_changed
583
709
class GCRepositoryPackCollection(RepositoryPackCollection):
585
711
pack_factory = GCPack
586
712
resumed_pack_factory = ResumedGCPack
713
normal_packer_class = GCCHKPacker
714
optimising_packer_class = GCCHKPacker
588
716
def _check_new_inventories(self):
589
717
"""Detect missing inventories or chk root entries for the new revisions
671
802
% (sorted(missing_text_keys),))
674
def _execute_pack_operations(self, pack_operations,
675
_packer_class=GCCHKPacker,
677
"""Execute a series of pack operations.
679
:param pack_operations: A list of [revision_count, packs_to_combine].
680
:param _packer_class: The class of packer to use (default: Packer).
683
# XXX: Copied across from RepositoryPackCollection simply because we
684
# want to override the _packer_class ... :(
685
for revision_count, packs in pack_operations:
686
# we may have no-ops from the setup logic
689
packer = GCCHKPacker(self, packs, '.autopack',
690
reload_func=reload_func)
692
result = packer.pack()
693
except errors.RetryWithNewPacks:
694
# An exception is propagating out of this context, make sure
695
# this packer has cleaned up. Packer() doesn't set its new_pack
696
# state into the RepositoryPackCollection object, so we only
697
# have access to it directly here.
698
if packer.new_pack is not None:
699
packer.new_pack.abort()
704
self._remove_pack_from_memory(pack)
705
# record the newly available packs and stop advertising the old
708
for _, packs in pack_operations:
709
to_be_obsoleted.extend(packs)
710
result = self._save_pack_names(clear_obsolete_packs=True,
711
obsolete_packs=to_be_obsoleted)
715
class CHKInventoryRepository(KnitPackRepository):
716
"""subclass of KnitPackRepository that uses CHK based inventories."""
718
def __init__(self, _format, a_bzrdir, control_files, _commit_builder_class,
806
class CHKInventoryRepository(PackRepository):
807
"""subclass of PackRepository that uses CHK based inventories."""
809
def __init__(self, _format, a_controldir, control_files, _commit_builder_class,
720
811
"""Overridden to change pack collection class."""
721
KnitPackRepository.__init__(self, _format, a_bzrdir, control_files,
722
_commit_builder_class, _serializer)
723
# and now replace everything it did :)
812
super(CHKInventoryRepository, self).__init__(_format, a_controldir,
813
control_files, _commit_builder_class, _serializer)
724
814
index_transport = self._transport.clone('indices')
725
815
self._pack_collection = GCRepositoryPackCollection(self,
726
816
self._transport, index_transport,
896
988
if record.storage_kind != 'absent':
897
989
texts[record.key] = record.get_bytes_as('fulltext')
899
raise errors.NoSuchRevision(self, record.key)
991
texts[record.key] = None
901
yield inventory.CHKInventory.deserialise(self.chk_bytes, texts[key], key)
995
yield (None, key[-1])
997
yield (inventory.CHKInventory.deserialise(
998
self.chk_bytes, bytes, key), key[-1])
903
def _iter_inventory_xmls(self, revision_ids, ordering):
1000
def _get_inventory_xml(self, revision_id):
1001
"""Get serialized inventory as a string."""
904
1002
# Without a native 'xml' inventory, this method doesn't make sense.
905
1003
# However older working trees, and older bundles want it - so we supply
906
1004
# it allowing _get_inventory_xml to work. Bundles currently use the
907
1005
# serializer directly; this also isn't ideal, but there isn't an xml
908
# iteration interface offered at all for repositories. We could make
909
# _iter_inventory_xmls be part of the contract, even if kept private.
910
inv_to_str = self._serializer.write_inventory_to_string
911
for inv in self.iter_inventories(revision_ids, ordering=ordering):
912
yield inv_to_str(inv), inv.revision_id
1006
# iteration interface offered at all for repositories.
1007
return self._serializer.write_inventory_to_string(
1008
self.get_inventory(revision_id))
914
1010
def _find_present_inventory_keys(self, revision_keys):
915
1011
parent_map = self.inventories.get_parent_map(revision_keys)
1013
1122
return GroupCHKStreamSource(self, to_format)
1014
1123
return super(CHKInventoryRepository, self)._get_source(to_format)
1017
class GroupCHKStreamSource(KnitPackStreamSource):
1125
def _find_inconsistent_revision_parents(self, revisions_iterator=None):
1126
"""Find revisions with different parent lists in the revision object
1127
and in the index graph.
1129
:param revisions_iterator: None, or an iterator of (revid,
1130
Revision-or-None). This iterator controls the revisions checked.
1131
:returns: an iterator yielding tuples of (revison-id, parents-in-index,
1132
parents-in-revision).
1134
if not self.is_locked():
1135
raise AssertionError()
1137
if revisions_iterator is None:
1138
revisions_iterator = self.iter_revisions(self.all_revision_ids())
1139
for revid, revision in revisions_iterator:
1140
if revision is None:
1142
parent_map = vf.get_parent_map([(revid,)])
1143
parents_according_to_index = tuple(parent[-1] for parent in
1144
parent_map[(revid,)])
1145
parents_according_to_revision = tuple(revision.parent_ids)
1146
if parents_according_to_index != parents_according_to_revision:
1147
yield (revid, parents_according_to_index,
1148
parents_according_to_revision)
1150
def _check_for_inconsistent_revision_parents(self):
1151
inconsistencies = list(self._find_inconsistent_revision_parents())
1153
raise errors.BzrCheckError(
1154
"Revision index has inconsistent parents.")
1157
class GroupCHKStreamSource(StreamSource):
1018
1158
"""Used when both the source and target repo are GroupCHK repos."""
1020
1160
def __init__(self, from_repository, to_format):
1108
1247
self._chk_p_id_roots = None
1109
1248
yield 'chk_bytes', _get_parent_id_basename_to_file_id_pages()
1250
def _get_text_stream(self):
1251
# Note: We know we don't have to handle adding root keys, because both
1252
# the source and target are the identical network name.
1253
text_stream = self.from_repository.texts.get_record_stream(
1254
self._text_keys, self._text_fetch_order, False)
1255
return ('texts', text_stream)
1111
1257
def get_stream(self, search):
1258
def wrap_and_count(pb, rc, stream):
1259
"""Yield records from stream while showing progress."""
1261
for record in stream:
1262
if count == rc.STEP:
1264
pb.update('Estimate', rc.current, rc.max)
1112
1269
revision_ids = search.get_keys()
1270
pb = ui.ui_factory.nested_progress_bar()
1271
rc = self._record_counter
1272
self._record_counter.setup(len(revision_ids))
1113
1273
for stream_info in self._fetch_revision_texts(revision_ids):
1274
yield (stream_info[0],
1275
wrap_and_count(pb, rc, stream_info[1]))
1115
1276
self._revision_keys = [(rev_id,) for rev_id in revision_ids]
1116
self.from_repository.revisions.clear_cache()
1117
self.from_repository.signatures.clear_cache()
1118
yield self._get_inventory_stream(self._revision_keys)
1119
self.from_repository.inventories.clear_cache()
1120
1277
# TODO: The keys to exclude might be part of the search recipe
1121
1278
# For now, exclude all parents that are at the edge of ancestry, for
1122
1279
# which we have inventories
1123
1280
from_repo = self.from_repository
1124
1281
parent_keys = from_repo._find_parent_keys_of_revisions(
1125
1282
self._revision_keys)
1283
self.from_repository.revisions.clear_cache()
1284
self.from_repository.signatures.clear_cache()
1285
# Clear the repo's get_parent_map cache too.
1286
self.from_repository._unstacked_provider.disable_cache()
1287
self.from_repository._unstacked_provider.enable_cache()
1288
s = self._get_inventory_stream(self._revision_keys)
1289
yield (s[0], wrap_and_count(pb, rc, s[1]))
1290
self.from_repository.inventories.clear_cache()
1126
1291
for stream_info in self._get_filtered_chk_streams(parent_keys):
1292
yield (stream_info[0], wrap_and_count(pb, rc, stream_info[1]))
1128
1293
self.from_repository.chk_bytes.clear_cache()
1129
yield self._get_text_stream()
1294
s = self._get_text_stream()
1295
yield (s[0], wrap_and_count(pb, rc, s[1]))
1130
1296
self.from_repository.texts.clear_cache()
1297
pb.update('Done', rc.max, rc.max)
1132
1300
def get_stream_for_missing_keys(self, missing_keys):
1133
1301
# missing keys can only occur when we are byte copying and not
1190
def _filter_text_keys(interesting_nodes_iterable, text_keys, bytes_to_info):
1358
def _filter_text_keys(interesting_nodes_iterable, text_keys, bytes_to_text_key):
1191
1359
"""Iterate the result of iter_interesting_nodes, yielding the records
1192
1360
and adding to text_keys.
1362
text_keys_update = text_keys.update
1194
1363
for record, items in interesting_nodes_iterable:
1195
for name, bytes in items:
1196
# Note: we don't care about name_utf8, because groupcompress repos
1197
# are always rich-root, so there are no synthesised root records to
1199
_, file_id, revision_id = bytes_to_info(bytes)
1200
file_id = intern(file_id)
1201
revision_id = intern(revision_id)
1202
text_keys.add(StaticTuple(file_id, revision_id).intern())
1364
text_keys_update([bytes_to_text_key(b) for n, b in items])
1208
class RepositoryFormatCHK1(RepositoryFormatPack):
1209
"""A hashed CHK+group compress pack repository."""
1368
class RepositoryFormat2a(RepositoryFormatPack):
1369
"""A CHK repository that uses the bencode revision serializer."""
1211
1371
repository_class = CHKInventoryRepository
1212
1372
supports_external_lookups = True
1213
1373
supports_chks = True
1214
# For right now, setting this to True gives us InterModel1And2 rather
1215
# than InterDifferingSerializer
1216
1374
_commit_builder_class = PackRootCommitBuilder
1217
1375
rich_root_data = True
1218
_serializer = chk_serializer.chk_serializer_255_bigpage
1376
_serializer = chk_serializer.chk_bencode_serializer
1219
1377
_commit_inv_deltas = True
1220
1378
# What index classes to use
1221
1379
index_builder_class = BTreeBuilder
1232
1390
pack_compresses = True
1234
1392
def _get_matching_bzrdir(self):
1235
return bzrdir.format_registry.make_bzrdir('development6-rich-root')
1237
def _ignore_setting_bzrdir(self, format):
1240
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
1242
def get_format_string(self):
1243
"""See RepositoryFormat.get_format_string()."""
1244
return ('Bazaar development format - group compression and chk inventory'
1245
' (needs bzr.dev from 1.14)\n')
1247
def get_format_description(self):
1248
"""See RepositoryFormat.get_format_description()."""
1249
return ("Development repository format - rich roots, group compression"
1250
" and chk inventories")
1253
class RepositoryFormatCHK2(RepositoryFormatCHK1):
1254
"""A CHK repository that uses the bencode revision serializer."""
1256
_serializer = chk_serializer.chk_bencode_serializer
1258
def _get_matching_bzrdir(self):
1259
return bzrdir.format_registry.make_bzrdir('development7-rich-root')
1261
def _ignore_setting_bzrdir(self, format):
1264
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
1266
def get_format_string(self):
1267
"""See RepositoryFormat.get_format_string()."""
1268
return ('Bazaar development format - chk repository with bencode '
1269
'revision serialization (needs bzr.dev from 1.16)\n')
1272
class RepositoryFormat2a(RepositoryFormatCHK2):
1273
"""A CHK repository that uses the bencode revision serializer.
1275
This is the same as RepositoryFormatCHK2 but with a public name.
1278
_serializer = chk_serializer.chk_bencode_serializer
1280
def _get_matching_bzrdir(self):
1281
return bzrdir.format_registry.make_bzrdir('2a')
1283
def _ignore_setting_bzrdir(self, format):
1286
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
1288
def get_format_string(self):
1393
return controldir.format_registry.make_controldir('2a')
1395
def _ignore_setting_bzrdir(self, format):
1398
_matchingcontroldir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
1401
def get_format_string(cls):
1289
1402
return ('Bazaar repository format 2a (needs bzr 1.16 or later)\n')
1291
1404
def get_format_description(self):
1292
1405
"""See RepositoryFormat.get_format_description()."""
1293
1406
return ("Repository format 2a - rich roots, group compression"
1294
1407
" and chk inventories")
1410
class RepositoryFormat2aSubtree(RepositoryFormat2a):
1411
"""A 2a repository format that supports nested trees.
1415
def _get_matching_bzrdir(self):
1416
return controldir.format_registry.make_controldir('development-subtree')
1418
def _ignore_setting_bzrdir(self, format):
1421
_matchingcontroldir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
1424
def get_format_string(cls):
1425
return ('Bazaar development format 8\n')
1427
def get_format_description(self):
1428
"""See RepositoryFormat.get_format_description()."""
1429
return ("Development repository format 8 - nested trees, "
1430
"group compression and chk inventories")
1433
supports_tree_reference = True