76
81
class KnitPackRepository(PackRepository, KnitRepository):
78
def __init__(self, _format, a_controldir, control_files, _commit_builder_class,
80
PackRepository.__init__(self, _format, a_controldir, control_files,
81
_commit_builder_class, _serializer)
83
def __init__(self, _format, a_bzrdir, control_files, _commit_builder_class,
85
PackRepository.__init__(self, _format, a_bzrdir, control_files,
86
_commit_builder_class, _serializer)
82
87
if self._format.supports_chks:
83
88
raise AssertionError("chk not supported")
84
89
index_transport = self._transport.clone('indices')
85
90
self._pack_collection = KnitRepositoryPackCollection(self,
88
self._transport.clone(
90
self._transport.clone(
92
_format.index_builder_class,
93
self._transport.clone('upload'),
94
self._transport.clone('packs'),
95
_format.index_builder_class,
96
99
self.inventories = KnitVersionedFiles(
97
100
_KnitGraphIndex(self._pack_collection.inventory_index.combined_index,
98
add_callback=self._pack_collection.inventory_index.add_callback,
99
deltas=True, parents=True, is_locked=self.is_locked),
101
add_callback=self._pack_collection.inventory_index.add_callback,
102
deltas=True, parents=True, is_locked=self.is_locked),
100
103
data_access=self._pack_collection.inventory_index.data_access,
101
104
max_delta_chain=200)
102
105
self.revisions = KnitVersionedFiles(
103
106
_KnitGraphIndex(self._pack_collection.revision_index.combined_index,
104
add_callback=self._pack_collection.revision_index.add_callback,
105
deltas=False, parents=True, is_locked=self.is_locked,
106
track_external_parent_refs=True),
107
add_callback=self._pack_collection.revision_index.add_callback,
108
deltas=False, parents=True, is_locked=self.is_locked,
109
track_external_parent_refs=True),
107
110
data_access=self._pack_collection.revision_index.data_access,
108
111
max_delta_chain=0)
109
112
self.signatures = KnitVersionedFiles(
110
113
_KnitGraphIndex(self._pack_collection.signature_index.combined_index,
111
add_callback=self._pack_collection.signature_index.add_callback,
112
deltas=False, parents=False, is_locked=self.is_locked),
114
add_callback=self._pack_collection.signature_index.add_callback,
115
deltas=False, parents=False, is_locked=self.is_locked),
113
116
data_access=self._pack_collection.signature_index.data_access,
114
117
max_delta_chain=0)
115
118
self.texts = KnitVersionedFiles(
116
119
_KnitGraphIndex(self._pack_collection.text_index.combined_index,
117
add_callback=self._pack_collection.text_index.add_callback,
118
deltas=True, parents=True, is_locked=self.is_locked),
120
add_callback=self._pack_collection.text_index.add_callback,
121
deltas=True, parents=True, is_locked=self.is_locked),
119
122
data_access=self._pack_collection.text_index.data_access,
120
123
max_delta_chain=200)
121
124
self.chk_bytes = None
158
160
index_class = GraphIndex
160
162
def _get_matching_bzrdir(self):
161
return controldir.format_registry.make_controldir('pack-0.92')
163
return controldir.format_registry.make_bzrdir('pack-0.92')
163
165
def _ignore_setting_bzrdir(self, format):
166
_matchingcontroldir = property(
167
_get_matching_bzrdir, _ignore_setting_bzrdir)
168
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
170
171
def get_format_string(cls):
171
172
"""See RepositoryFormat.get_format_string()."""
172
return b"Bazaar pack repository format 1 (needs bzr 0.92)\n"
173
return "Bazaar pack repository format 1 (needs bzr 0.92)\n"
174
175
def get_format_description(self):
175
176
"""See RepositoryFormat.get_format_description()."""
200
200
index_class = GraphIndex
202
202
def _get_matching_bzrdir(self):
203
return controldir.format_registry.make_controldir(
203
return controldir.format_registry.make_bzrdir(
204
204
'pack-0.92-subtree')
206
206
def _ignore_setting_bzrdir(self, format):
209
_matchingcontroldir = property(
210
_get_matching_bzrdir, _ignore_setting_bzrdir)
209
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
213
212
def get_format_string(cls):
214
213
"""See RepositoryFormat.get_format_string()."""
215
return b"Bazaar pack repository format 1 with subtree support (needs bzr 0.92)\n"
214
return "Bazaar pack repository format 1 with subtree support (needs bzr 0.92)\n"
217
216
def get_format_description(self):
218
217
"""See RepositoryFormat.get_format_description()."""
241
239
index_class = GraphIndex
243
241
def _get_matching_bzrdir(self):
244
return controldir.format_registry.make_controldir(
242
return controldir.format_registry.make_bzrdir(
245
243
'rich-root-pack')
247
245
def _ignore_setting_bzrdir(self, format):
250
_matchingcontroldir = property(
251
_get_matching_bzrdir, _ignore_setting_bzrdir)
248
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
254
251
def get_format_string(cls):
255
252
"""See RepositoryFormat.get_format_string()."""
256
return (b"Bazaar pack repository format 1 with rich root"
257
b" (needs bzr 1.0)\n")
253
return ("Bazaar pack repository format 1 with rich root"
254
" (needs bzr 1.0)\n")
259
256
def get_format_description(self):
260
257
"""See RepositoryFormat.get_format_description()."""
280
279
return xml5.serializer_v5
282
281
def _get_matching_bzrdir(self):
283
return controldir.format_registry.make_controldir('1.6')
282
return controldir.format_registry.make_bzrdir('1.6')
285
284
def _ignore_setting_bzrdir(self, format):
288
_matchingcontroldir = property(
289
_get_matching_bzrdir, _ignore_setting_bzrdir)
287
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
292
290
def get_format_string(cls):
293
291
"""See RepositoryFormat.get_format_string()."""
294
return b"Bazaar RepositoryFormatKnitPack5 (bzr 1.6)\n"
292
return "Bazaar RepositoryFormatKnitPack5 (bzr 1.6)\n"
296
294
def get_format_description(self):
297
295
"""See RepositoryFormat.get_format_description()."""
301
299
class RepositoryFormatKnitPack5RichRoot(RepositoryFormatPack):
302
300
"""A repository with rich roots and stacking.
302
New in release 1.6.1.
304
304
Supports stacking on other repositories, allowing data to be accessed
305
305
without being stored locally.
308
308
repository_class = KnitPackRepository
309
_commit_builder_class = PackCommitBuilder
309
_commit_builder_class = PackRootCommitBuilder
310
310
rich_root_data = True
311
supports_tree_reference = False # no subtrees
311
supports_tree_reference = False # no subtrees
312
312
supports_external_lookups = True
313
313
# What index classes to use
314
314
index_builder_class = InMemoryGraphIndex
319
319
return xml6.serializer_v6
321
321
def _get_matching_bzrdir(self):
322
return controldir.format_registry.make_controldir(
322
return controldir.format_registry.make_bzrdir(
323
323
'1.6.1-rich-root')
325
325
def _ignore_setting_bzrdir(self, format):
328
_matchingcontroldir = property(
329
_get_matching_bzrdir, _ignore_setting_bzrdir)
328
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
332
331
def get_format_string(cls):
333
332
"""See RepositoryFormat.get_format_string()."""
334
return b"Bazaar RepositoryFormatKnitPack5RichRoot (bzr 1.6.1)\n"
333
return "Bazaar RepositoryFormatKnitPack5RichRoot (bzr 1.6.1)\n"
336
335
def get_format_description(self):
337
336
return "Packs 5 rich-root (adds stacking support, requires bzr 1.6.1)"
371
372
def _ignore_setting_bzrdir(self, format):
374
_matchingcontroldir = property(
375
_get_matching_bzrdir, _ignore_setting_bzrdir)
375
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
378
378
def get_format_string(cls):
379
379
"""See RepositoryFormat.get_format_string()."""
380
return b"Bazaar RepositoryFormatKnitPack5RichRoot (bzr 1.6)\n"
380
return "Bazaar RepositoryFormatKnitPack5RichRoot (bzr 1.6)\n"
382
382
def get_format_description(self):
383
383
return ("Packs 5 rich-root (adds stacking support, requires bzr 1.6)"
406
406
return xml5.serializer_v5
408
408
def _get_matching_bzrdir(self):
409
return controldir.format_registry.make_controldir('1.9')
409
return controldir.format_registry.make_bzrdir('1.9')
411
411
def _ignore_setting_bzrdir(self, format):
414
_matchingcontroldir = property(
415
_get_matching_bzrdir, _ignore_setting_bzrdir)
414
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
418
417
def get_format_string(cls):
419
418
"""See RepositoryFormat.get_format_string()."""
420
return b"Bazaar RepositoryFormatKnitPack6 (bzr 1.9)\n"
419
return "Bazaar RepositoryFormatKnitPack6 (bzr 1.9)\n"
422
421
def get_format_description(self):
423
422
"""See RepositoryFormat.get_format_description()."""
444
443
return xml6.serializer_v6
446
445
def _get_matching_bzrdir(self):
447
return controldir.format_registry.make_controldir(
446
return controldir.format_registry.make_bzrdir(
450
449
def _ignore_setting_bzrdir(self, format):
453
_matchingcontroldir = property(
454
_get_matching_bzrdir, _ignore_setting_bzrdir)
452
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
457
455
def get_format_string(cls):
458
456
"""See RepositoryFormat.get_format_string()."""
459
return b"Bazaar RepositoryFormatKnitPack6RichRoot (bzr 1.9)\n"
457
return "Bazaar RepositoryFormatKnitPack6RichRoot (bzr 1.9)\n"
461
459
def get_format_description(self):
462
460
return "Packs 6 rich-root (uses btree indexes, requires bzr 1.9)"
486
484
return xml7.serializer_v7
488
486
def _get_matching_bzrdir(self):
489
return controldir.format_registry.make_controldir(
487
return controldir.format_registry.make_bzrdir(
490
488
'development5-subtree')
492
490
def _ignore_setting_bzrdir(self, format):
495
_matchingcontroldir = property(
496
_get_matching_bzrdir, _ignore_setting_bzrdir)
493
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
499
496
def get_format_string(cls):
500
497
"""See RepositoryFormat.get_format_string()."""
501
return (b"Bazaar development format 2 with subtree support "
502
b"(needs bzr.dev from before 1.8)\n")
498
return ("Bazaar development format 2 with subtree support "
499
"(needs bzr.dev from before 1.8)\n")
504
501
def get_format_description(self):
505
502
"""See RepositoryFormat.get_format_description()."""
506
503
return ("Development repository format, currently the same as "
507
"1.6.1-subtree with B+Tree indices.\n")
504
"1.6.1-subtree with B+Tree indices.\n")
510
507
class KnitPackStreamSource(StreamSource):
534
531
content_text_keys = set()
535
532
knit = KnitVersionedFiles(None, None)
536
533
factory = KnitPlainFactory()
538
534
def find_text_keys_from_content(record):
539
535
if record.storage_kind not in ('knit-delta-gz', 'knit-ft-gz'):
540
536
raise ValueError("Unknown content storage kind for"
541
" inventory text: %s" % (record.storage_kind,))
537
" inventory text: %s" % (record.storage_kind,))
542
538
# It's a knit record, it has a _raw_record field (even if it was
543
539
# reconstituted from a network stream).
544
540
raw_data = record._raw_record
620
615
return all_index.iter_entries(key_filter)
622
617
def _copy_nodes(self, nodes, index_map, writer, write_index,
624
619
"""Copy knit nodes between packs with no graph references.
626
621
:param output_lines: Output full texts of copied items.
628
with ui.ui_factory.nested_progress_bar() as pb:
623
pb = ui.ui_factory.nested_progress_bar()
629
625
return self._do_copy_nodes(nodes, index_map, writer,
630
write_index, pb, output_lines=output_lines)
626
write_index, pb, output_lines=output_lines)
632
630
def _do_copy_nodes(self, nodes, index_map, writer, write_index, pb,
634
632
# for record verification
635
633
knit = KnitVersionedFiles(None, None)
636
634
# plan a readv on each source pack:
647
645
request_groups[index].append((key, value))
649
647
pb.update("Copied record", record_index, len(nodes))
650
for index, items in request_groups.items():
648
for index, items in viewitems(request_groups):
651
649
pack_readv_requests = []
652
650
for key, value in items:
653
651
# ---- KnitGraphIndex.get_position
654
bits = value[1:].split(b' ')
652
bits = value[1:].split(' ')
655
653
offset, length = int(bits[0]), int(bits[1])
656
pack_readv_requests.append((offset, length, (key, value[0:1])))
654
pack_readv_requests.append((offset, length, (key, value[0])))
657
655
# linear scan up the pack
658
656
pack_readv_requests.sort()
676
674
df, _ = knit._parse_record_header(key, raw_data)
678
pos, size = writer.add_bytes_record([raw_data], len(raw_data), names)
679
write_index.add_node(key, eol_flag + b"%d %d" % (pos, size))
676
pos, size = writer.add_bytes_record(raw_data, names)
677
write_index.add_node(key, eol_flag + "%d %d" % (pos, size))
680
678
pb.update("Copied record", record_index)
681
679
record_index += 1
683
681
def _copy_nodes_graph(self, index_map, writer, write_index,
684
readv_group_iter, total_items, output_lines=False):
682
readv_group_iter, total_items, output_lines=False):
685
683
"""Copy knit nodes between packs.
687
685
:param output_lines: Return lines present in the copied data as
688
686
an iterator of line,version_id.
690
with ui.ui_factory.nested_progress_bar() as pb:
688
pb = ui.ui_factory.nested_progress_bar()
691
690
for result in self._do_copy_nodes_graph(index_map, writer,
692
write_index, output_lines, pb, readv_group_iter, total_items):
691
write_index, output_lines, pb, readv_group_iter, total_items):
694
# Python 2.4 does not permit try:finally: in a generator.
695
700
def _do_copy_nodes_graph(self, index_map, writer, write_index,
696
output_lines, pb, readv_group_iter, total_items):
701
output_lines, pb, readv_group_iter, total_items):
697
702
# for record verification
698
703
knit = KnitVersionedFiles(None, None)
699
704
# for line extraction when requested (inventories only)
739
743
fileid_revisions = repo._find_file_ids_from_xml_inventory_lines(
740
744
inv_lines, self.revision_keys)
742
for fileid, file_revids in fileid_revisions.items():
743
text_filter.extend([(fileid, file_revid)
744
for file_revid in file_revids])
746
for fileid, file_revids in viewitems(fileid_revisions):
747
text_filter.extend([(fileid, file_revid) for file_revid in file_revids])
745
748
self._text_filter = text_filter
747
750
def _copy_inventory_texts(self):
748
751
# select inventory keys
749
inv_keys = self._revision_keys # currently the same keyspace, and note that
752
inv_keys = self._revision_keys # currently the same keyspace, and note that
750
753
# querying for keys here could introduce a bug where an inventory item
751
754
# is missed, so do not change it to query separately without cross
752
755
# checking like the text key check below.
761
764
# Only grab the output lines if we will be processing them
762
765
output_lines = bool(self.revision_ids)
763
766
inv_lines = self._copy_nodes_graph(inventory_index_map,
764
self.new_pack._writer, self.new_pack.inventory_index,
765
readv_group_iter, total_items, output_lines=output_lines)
767
self.new_pack._writer, self.new_pack.inventory_index,
768
readv_group_iter, total_items, output_lines=output_lines)
766
769
if self.revision_ids:
767
770
self._process_inventory_lines(inv_lines)
771
774
self._text_filter = None
772
775
if 'pack' in debug.debug_flags:
773
776
trace.mutter('%s: create_pack: inventories copied: %s%s %d items t+%6.3fs',
774
time.ctime(), self._pack_collection._upload_transport.base,
775
self.new_pack.random_name,
776
self.new_pack.inventory_index.key_count(),
777
time.time() - self.new_pack.start_time)
777
time.ctime(), self._pack_collection._upload_transport.base,
778
self.new_pack.random_name,
779
self.new_pack.inventory_index.key_count(),
780
time.time() - self.new_pack.start_time)
779
782
def _update_pack_order(self, entries, index_to_pack_map):
780
783
"""Determine how we want our packs to be ordered.
825
827
self._update_pack_order(revision_nodes, revision_index_map)
826
828
# copy revision keys and adjust values
827
829
self.pb.update("Copying revision texts", 1)
828
total_items, readv_group_iter = self._revision_node_readv(
830
total_items, readv_group_iter = self._revision_node_readv(revision_nodes)
830
831
list(self._copy_nodes_graph(revision_index_map, self.new_pack._writer,
831
self.new_pack.revision_index, readv_group_iter, total_items))
832
self.new_pack.revision_index, readv_group_iter, total_items))
832
833
if 'pack' in debug.debug_flags:
833
834
trace.mutter('%s: create_pack: revisions copied: %s%s %d items t+%6.3fs',
834
time.ctime(), self._pack_collection._upload_transport.base,
835
self.new_pack.random_name,
836
self.new_pack.revision_index.key_count(),
837
time.time() - self.new_pack.start_time)
835
time.ctime(), self._pack_collection._upload_transport.base,
836
self.new_pack.random_name,
837
self.new_pack.revision_index.key_count(),
838
time.time() - self.new_pack.start_time)
838
839
self._revision_keys = revision_keys
840
841
def _get_text_nodes(self):
841
842
text_index_map, text_indices = self._pack_map_and_index_list(
843
844
return text_index_map, self._index_contents(text_indices,
846
847
def _copy_text_texts(self):
847
848
# select text keys
859
860
if missing_text_keys:
860
861
# TODO: raise a specific error that can handle many missing
862
trace.mutter("missing keys during fetch: %r",
863
trace.mutter("missing keys during fetch: %r", missing_text_keys)
864
864
a_missing_key = missing_text_keys.pop()
865
865
raise errors.RevisionNotPresent(a_missing_key[1],
867
867
# copy text keys and adjust values
868
868
self.pb.update("Copying content texts", 3)
869
total_items, readv_group_iter = self._least_readv_node_readv(
869
total_items, readv_group_iter = self._least_readv_node_readv(text_nodes)
871
870
list(self._copy_nodes_graph(text_index_map, self.new_pack._writer,
872
self.new_pack.text_index, readv_group_iter, total_items))
871
self.new_pack.text_index, readv_group_iter, total_items))
873
872
self._log_copied_texts()
875
874
def _create_pack_from_packs(self):
878
877
new_pack = self.new_pack
879
878
# buffer data - we won't be reading-back during the pack creation and
880
879
# this makes a significant difference on sftp pushes.
881
new_pack.set_write_cache_size(1024 * 1024)
880
new_pack.set_write_cache_size(1024*1024)
882
881
if 'pack' in debug.debug_flags:
883
882
plain_pack_list = ['%s%s' % (a_pack.pack_transport.base, a_pack.name)
884
for a_pack in self.packs]
883
for a_pack in self.packs]
885
884
if self.revision_ids is not None:
886
885
rev_count = len(self.revision_ids)
888
887
rev_count = 'all'
889
888
trace.mutter('%s: create_pack: creating pack from source packs: '
890
'%s%s %s revisions wanted %s t=0',
891
time.ctime(), self._pack_collection._upload_transport.base, new_pack.random_name,
892
plain_pack_list, rev_count)
889
'%s%s %s revisions wanted %s t=0',
890
time.ctime(), self._pack_collection._upload_transport.base, new_pack.random_name,
891
plain_pack_list, rev_count)
893
892
self._copy_revision_texts()
894
893
self._copy_inventory_texts()
895
894
self._copy_text_texts()
896
895
# select signature keys
897
signature_filter = self._revision_keys # same keyspace
896
signature_filter = self._revision_keys # same keyspace
898
897
signature_index_map, signature_indices = self._pack_map_and_index_list(
899
898
'signature_index')
900
899
signature_nodes = self._index_contents(signature_indices,
902
901
# copy signature keys and adjust values
903
902
self.pb.update("Copying signature texts", 4)
904
903
self._copy_nodes(signature_nodes, signature_index_map, new_pack._writer,
905
new_pack.signature_index)
904
new_pack.signature_index)
906
905
if 'pack' in debug.debug_flags:
907
906
trace.mutter('%s: create_pack: revision signatures copied: %s%s %d items t+%6.3fs',
908
time.ctime(), self._pack_collection._upload_transport.base, new_pack.random_name,
909
new_pack.signature_index.key_count(),
910
time.time() - new_pack.start_time)
907
time.ctime(), self._pack_collection._upload_transport.base, new_pack.random_name,
908
new_pack.signature_index.key_count(),
909
time.time() - new_pack.start_time)
911
910
new_pack._check_references()
912
911
if not self._use_pack(new_pack):
938
937
request_groups[index] = []
939
938
request_groups[index].append((key, value, references))
941
for index, items in request_groups.items():
940
for index, items in viewitems(request_groups):
942
941
pack_readv_requests = []
943
942
for key, value, references in items:
944
943
# ---- KnitGraphIndex.get_position
945
bits = value[1:].split(b' ')
944
bits = value[1:].split(' ')
946
945
offset, length = int(bits[0]), int(bits[1])
947
946
pack_readv_requests.append(
948
((offset, length), (key, value[0:1], references)))
947
((offset, length), (key, value[0], references)))
949
948
# linear scan up the pack to maximum range combining.
950
949
pack_readv_requests.sort()
951
950
# split out the readv and the node data.
1039
1038
# 3) bulk copy the ok data
1040
1039
total_items, readv_group_iter = self._least_readv_node_readv(ok_nodes)
1041
1040
list(self._copy_nodes_graph(text_index_map, self.new_pack._writer,
1042
self.new_pack.text_index, readv_group_iter, total_items))
1041
self.new_pack.text_index, readv_group_iter, total_items))
1043
1042
# 4) adhoc copy all the other texts.
1044
1043
# We have to topologically insert all texts otherwise we can fail to
1045
1044
# reconcile when parts of a single delta chain are preserved intact,
1053
1052
# space (we only topo sort the revisions, which is smaller).
1054
1053
topo_order = tsort.topo_sort(ancestors)
1055
1054
rev_order = dict(zip(topo_order, range(len(topo_order))))
1056
bad_texts.sort(key=lambda key: rev_order.get(key[0][1], 0))
1055
bad_texts.sort(key=lambda key:rev_order.get(key[0][1], 0))
1057
1056
transaction = repo.get_transaction()
1058
1057
file_id_index = GraphIndexPrefixAdapter(
1059
1058
self.new_pack.text_index,
1060
1059
('blank', ), 1,
1061
1060
add_nodes_callback=self.new_pack.text_index.add_nodes)
1062
1061
data_access = _DirectPackAccess(
1063
{self.new_pack.text_index: self.new_pack.access_tuple()})
1062
{self.new_pack.text_index:self.new_pack.access_tuple()})
1064
1063
data_access.set_writer(self.new_pack._writer, self.new_pack.text_index,
1065
self.new_pack.access_tuple())
1064
self.new_pack.access_tuple())
1066
1065
output_texts = KnitVersionedFiles(
1067
1066
_KnitGraphIndex(self.new_pack.text_index,
1068
add_callback=self.new_pack.text_index.add_nodes,
1069
deltas=True, parents=True, is_locked=repo.is_locked),
1067
add_callback=self.new_pack.text_index.add_nodes,
1068
deltas=True, parents=True, is_locked=repo.is_locked),
1070
1069
data_access=data_access, max_delta_chain=200)
1071
1070
for key, parent_keys in bad_texts:
1072
1071
# We refer to the new pack to delta data being output.
1078
1077
if parent_key[0] != key[0]:
1079
1078
# Graph parents must match the fileid
1080
1079
raise errors.BzrError('Mismatched key parent %r:%r' %
1082
1081
parents.append(parent_key[1])
1083
text_lines = next(repo.texts.get_record_stream(
1084
[key], 'unordered', True)).get_bytes_as('lines')
1082
text_lines = osutils.split_lines(repo.texts.get_record_stream(
1083
[key], 'unordered', True).next().get_bytes_as('fulltext'))
1085
1084
output_texts.add_lines(key, parent_keys, text_lines,
1086
random_id=True, check_content=False)
1085
random_id=True, check_content=False)
1087
1086
# 5) check that nothing inserted has a reference outside the keyspace.
1088
1087
missing_text_keys = self.new_pack.text_index._external_references()
1089
1088
if missing_text_keys:
1090
1089
raise errors.BzrCheckError('Reference to missing compression parents %r'
1091
% (missing_text_keys,))
1090
% (missing_text_keys,))
1092
1091
self._log_copied_texts()
1094
1093
def _use_pack(self, new_pack):
1132
1131
for key in reversed(order):
1133
1132
index, value, references = by_key[key]
1134
1133
# ---- KnitGraphIndex.get_position
1135
bits = value[1:].split(b' ')
1134
bits = value[1:].split(' ')
1136
1135
offset, length = int(bits[0]), int(bits[1])
1137
1136
requests.append(
1138
(index, [(offset, length)], [(key, value[0:1], references)]))
1137
(index, [(offset, length)], [(key, value[0], references)]))
1139
1138
# TODO: combine requests in the same index that are in ascending order.
1140
1139
return total, requests