17
20
"""Versioned text file storage api."""
19
from __future__ import absolute_import
21
22
from copy import copy
23
from cStringIO import StringIO
25
26
from zlib import adler32
27
from ..lazy_import import lazy_import
28
from bzrlib.lazy_import import lazy_import
28
29
lazy_import(globals(), """
33
35
graph as _mod_graph,
40
from breezy.bzr import (
45
from bzrlib.graph import DictParentsProvider, Graph, StackedParentsProvider
46
from bzrlib.transport.memory import MemoryTransport
46
from ..registry import Registry
47
from ..sixish import (
53
from ..textmerge import TextMerge
48
from bzrlib.registry import Registry
49
from bzrlib.symbol_versioning import *
50
from bzrlib.textmerge import TextMerge
51
from bzrlib import bencode
56
54
adapter_registry = Registry()
57
adapter_registry.register_lazy(('knit-delta-gz', 'fulltext'), 'breezy.bzr.knit',
58
'DeltaPlainToFullText')
59
adapter_registry.register_lazy(('knit-ft-gz', 'fulltext'), 'breezy.bzr.knit',
55
adapter_registry.register_lazy(('knit-delta-gz', 'fulltext'), 'bzrlib.knit',
56
'DeltaPlainToFullText')
57
adapter_registry.register_lazy(('knit-ft-gz', 'fulltext'), 'bzrlib.knit',
61
59
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'knit-delta-gz'),
62
'breezy.bzr.knit', 'DeltaAnnotatedToUnannotated')
60
'bzrlib.knit', 'DeltaAnnotatedToUnannotated')
63
61
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'fulltext'),
64
'breezy.bzr.knit', 'DeltaAnnotatedToFullText')
62
'bzrlib.knit', 'DeltaAnnotatedToFullText')
65
63
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'knit-ft-gz'),
66
'breezy.bzr.knit', 'FTAnnotatedToUnannotated')
64
'bzrlib.knit', 'FTAnnotatedToUnannotated')
67
65
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'fulltext'),
68
'breezy.bzr.knit', 'FTAnnotatedToFullText')
66
'bzrlib.knit', 'FTAnnotatedToFullText')
69
67
# adapter_registry.register_lazy(('knit-annotated-ft-gz', 'chunked'),
70
# 'breezy.bzr.knit', 'FTAnnotatedToChunked')
68
# 'bzrlib.knit', 'FTAnnotatedToChunked')
73
71
class ContentFactory(object):
213
class _MPDiffGenerator(object):
214
"""Pull out the functionality for generating mp_diffs."""
216
def __init__(self, vf, keys):
218
# This is the order the keys were requested in
219
self.ordered_keys = tuple(keys)
220
# keys + their parents, what we need to compute the diffs
221
self.needed_keys = ()
222
# Map from key: mp_diff
224
# Map from key: parents_needed (may have ghosts)
226
# Parents that aren't present
227
self.ghost_parents = ()
228
# Map from parent_key => number of children for this text
230
# Content chunks that are cached while we still need them
233
def _find_needed_keys(self):
234
"""Find the set of keys we need to request.
236
This includes all the original keys passed in, and the non-ghost
237
parents of those keys.
239
:return: (needed_keys, refcounts)
240
needed_keys is the set of all texts we need to extract
241
refcounts is a dict of {key: num_children} letting us know when we
242
no longer need to cache a given parent text
244
# All the keys and their parents
245
needed_keys = set(self.ordered_keys)
246
parent_map = self.vf.get_parent_map(needed_keys)
247
self.parent_map = parent_map
248
# TODO: Should we be using a different construct here? I think this
249
# uses difference_update internally, and we expect the result to
251
missing_keys = needed_keys.difference(parent_map)
253
raise errors.RevisionNotPresent(list(missing_keys)[0], self.vf)
254
# Parents that might be missing. They are allowed to be ghosts, but we
255
# should check for them
257
setdefault = refcounts.setdefault
259
for child_key, parent_keys in viewitems(parent_map):
261
# parent_keys may be None if a given VersionedFile claims to
262
# not support graph operations.
264
just_parents.update(parent_keys)
265
needed_keys.update(parent_keys)
266
for p in parent_keys:
267
refcounts[p] = setdefault(p, 0) + 1
268
just_parents.difference_update(parent_map)
269
# Remove any parents that are actually ghosts from the needed set
270
self.present_parents = set(self.vf.get_parent_map(just_parents))
271
self.ghost_parents = just_parents.difference(self.present_parents)
272
needed_keys.difference_update(self.ghost_parents)
273
self.needed_keys = needed_keys
274
self.refcounts = refcounts
275
return needed_keys, refcounts
277
def _compute_diff(self, key, parent_lines, lines):
278
"""Compute a single mp_diff, and store it in self._diffs"""
279
if len(parent_lines) > 0:
280
# XXX: _extract_blocks is not usefully defined anywhere...
281
# It was meant to extract the left-parent diff without
282
# having to recompute it for Knit content (pack-0.92,
283
# etc). That seems to have regressed somewhere
284
left_parent_blocks = self.vf._extract_blocks(key,
285
parent_lines[0], lines)
287
left_parent_blocks = None
288
diff = multiparent.MultiParent.from_lines(lines,
289
parent_lines, left_parent_blocks)
290
self.diffs[key] = diff
292
def _process_one_record(self, key, this_chunks):
294
if key in self.parent_map:
295
# This record should be ready to diff, since we requested
296
# content in 'topological' order
297
parent_keys = self.parent_map.pop(key)
298
# If a VersionedFile claims 'no-graph' support, then it may return
299
# None for any parent request, so we replace it with an empty tuple
300
if parent_keys is None:
303
for p in parent_keys:
304
# Alternatively we could check p not in self.needed_keys, but
305
# ghost_parents should be tiny versus huge
306
if p in self.ghost_parents:
308
refcount = self.refcounts[p]
309
if refcount == 1: # Last child reference
310
self.refcounts.pop(p)
311
parent_chunks = self.chunks.pop(p)
313
self.refcounts[p] = refcount - 1
314
parent_chunks = self.chunks[p]
315
p_lines = osutils.chunks_to_lines(parent_chunks)
316
# TODO: Should we cache the line form? We did the
317
# computation to get it, but storing it this way will
318
# be less memory efficient...
319
parent_lines.append(p_lines)
321
lines = osutils.chunks_to_lines(this_chunks)
322
# Since we needed the lines, we'll go ahead and cache them this way
324
self._compute_diff(key, parent_lines, lines)
326
# Is this content required for any more children?
327
if key in self.refcounts:
328
self.chunks[key] = this_chunks
330
def _extract_diffs(self):
331
needed_keys, refcounts = self._find_needed_keys()
332
for record in self.vf.get_record_stream(needed_keys,
333
'topological', True):
334
if record.storage_kind == 'absent':
335
raise errors.RevisionNotPresent(record.key, self.vf)
336
self._process_one_record(record.key,
337
record.get_bytes_as('chunked'))
339
def compute_diffs(self):
340
self._extract_diffs()
341
dpop = self.diffs.pop
342
return [dpop(k) for k in self.ordered_keys]
345
209
class VersionedFile(object):
346
210
"""Versioned text file storage.
437
301
self._check_write_ok()
438
302
return self._add_lines(version_id, parents, lines, parent_texts,
439
left_matching_blocks, nostore_sha, random_id, check_content)
303
left_matching_blocks, nostore_sha, random_id, check_content)
441
305
def _add_lines(self, version_id, parents, lines, parent_texts,
442
left_matching_blocks, nostore_sha, random_id, check_content):
306
left_matching_blocks, nostore_sha, random_id, check_content):
443
307
"""Helper to do the class specific add_lines."""
444
308
raise NotImplementedError(self.add_lines)
446
310
def add_lines_with_ghosts(self, version_id, parents, lines,
447
parent_texts=None, nostore_sha=None, random_id=False,
448
check_content=True, left_matching_blocks=None):
311
parent_texts=None, nostore_sha=None, random_id=False,
312
check_content=True, left_matching_blocks=None):
449
313
"""Add lines to the versioned file, allowing ghosts to be present.
451
315
This takes the same parameters as add_lines and returns the same.
453
317
self._check_write_ok()
454
318
return self._add_lines_with_ghosts(version_id, parents, lines,
455
parent_texts, nostore_sha, random_id, check_content, left_matching_blocks)
319
parent_texts, nostore_sha, random_id, check_content, left_matching_blocks)
457
321
def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts,
458
nostore_sha, random_id, check_content, left_matching_blocks):
322
nostore_sha, random_id, check_content, left_matching_blocks):
459
323
"""Helper to do class specific add_lines_with_ghosts."""
460
324
raise NotImplementedError(self.add_lines_with_ghosts)
541
401
for version, parent_ids, expected_sha1, mpdiff in records:
542
402
needed_parents.update(p for p in parent_ids
543
403
if not mpvf.has_version(p))
544
present_parents = set(self.get_parent_map(needed_parents))
404
present_parents = set(self.get_parent_map(needed_parents).keys())
545
405
for parent_id, lines in zip(present_parents,
546
self._get_lf_split_line_list(present_parents)):
406
self._get_lf_split_line_list(present_parents)):
547
407
mpvf.add_version(lines, parent_id, [])
548
for (version, parent_ids, expected_sha1, mpdiff), lines in zip(
549
records, mpvf.get_line_list(versions)):
408
for (version, parent_ids, expected_sha1, mpdiff), lines in\
409
zip(records, mpvf.get_line_list(versions)):
550
410
if len(parent_ids) == 1:
551
411
left_matching_blocks = list(mpdiff.get_matching_blocks(0,
552
mpvf.get_diff(parent_ids[0]).num_lines()))
412
mpvf.get_diff(parent_ids[0]).num_lines()))
554
414
left_matching_blocks = None
556
416
_, _, version_text = self.add_lines_with_ghosts(version,
557
parent_ids, lines, vf_parents,
558
left_matching_blocks=left_matching_blocks)
417
parent_ids, lines, vf_parents,
418
left_matching_blocks=left_matching_blocks)
559
419
except NotImplementedError:
560
420
# The vf can't handle ghosts, so add lines normally, which will
561
421
# (reasonably) fail if there are ghosts in the data.
562
422
_, _, version_text = self.add_lines(version,
563
parent_ids, lines, vf_parents,
564
left_matching_blocks=left_matching_blocks)
423
parent_ids, lines, vf_parents,
424
left_matching_blocks=left_matching_blocks)
565
425
vf_parents[version] = version_text
566
426
sha1s = self.get_sha1s(versions)
567
427
for version, parent_ids, expected_sha1, mpdiff in records:
718
580
def add_lines(self, key, parents, lines, parent_texts=None,
719
left_matching_blocks=None, nostore_sha=None, random_id=False,
581
left_matching_blocks=None, nostore_sha=None, random_id=False,
721
583
self.calls.append(("add_lines", key, parents, lines, parent_texts,
722
left_matching_blocks, nostore_sha, random_id, check_content))
584
left_matching_blocks, nostore_sha, random_id, check_content))
723
585
return self._backing_vf.add_lines(key, parents, lines, parent_texts,
724
left_matching_blocks, nostore_sha, random_id, check_content)
586
left_matching_blocks, nostore_sha, random_id, check_content)
727
589
self._backing_vf.check()
882
744
This mapper is for use with a transport based backend.
885
_safe = bytearray(b"abcdefghijklmnopqrstuvwxyz0123456789-_@,.")
747
_safe = "abcdefghijklmnopqrstuvwxyz0123456789-_@,."
887
749
def _escape(self, prefix):
888
750
"""Turn a key element into a filesystem safe string.
890
This is similar to a plain urlutils.quote, except
752
This is similar to a plain urllib.quote, except
891
753
it uses specific safe characters, so that it doesn't
892
754
have to translate a lot of valid file ids.
894
756
# @ does not get escaped. This is because it is a valid
895
757
# filesystem character we use all the time, and it looks
896
758
# a lot better than seeing %40 all the time.
897
r = [(c in self._safe) and chr(c) or ('%%%02x' % c)
898
for c in bytearray(prefix)]
899
return ''.join(r).encode('ascii')
759
r = [((c in self._safe) and c or ('%%%02x' % ord(c)))
901
763
def _unescape(self, basename):
902
764
"""Escaped names are easily unescaped by urlutils."""
903
return urlutils.unquote(basename)
765
return urllib.unquote(basename)
906
768
def make_versioned_files_factory(versioned_file_factory, mapper):
928
790
The keyspace is expressed via simple tuples. Any instance of VersionedFiles
929
791
may have a different length key-size, but that size will be constant for
930
all texts added to or retrieved from it. For instance, breezy uses
792
all texts added to or retrieved from it. For instance, bzrlib uses
931
793
instances with a key-size of 2 for storing user files in a repository, with
932
794
the first element the fileid, and the second the version of that file.
934
796
The use of tuples allows a single code base to support several different
935
797
uses with only the mapping logic changing from instance to instance.
937
:ivar _immediate_fallback_vfs: For subclasses that support stacking,
938
this is a list of other VersionedFiles immediately underneath this
939
one. They may in turn each have further fallbacks.
942
800
def add_lines(self, key, parents, lines, parent_texts=None,
943
left_matching_blocks=None, nostore_sha=None, random_id=False,
801
left_matching_blocks=None, nostore_sha=None, random_id=False,
945
803
"""Add a text to the store.
947
805
:param key: The key tuple of the text to add. If the last element is
979
837
raise NotImplementedError(self.add_lines)
839
def _add_text(self, key, parents, text, nostore_sha=None, random_id=False):
840
"""Add a text to the store.
842
This is a private function for use by CommitBuilder.
844
:param key: The key tuple of the text to add. If the last element is
845
None, a CHK string will be generated during the addition.
846
:param parents: The parents key tuples of the text to add.
847
:param text: A string containing the text to be committed.
848
:param nostore_sha: Raise ExistingContent and do not add the lines to
849
the versioned file if the digest of the lines matches this.
850
:param random_id: If True a random id has been selected rather than
851
an id determined by some deterministic process such as a converter
852
from a foreign VCS. When True the backend may choose not to check
853
for uniqueness of the resulting key within the versioned file, so
854
this should only be done when the result is expected to be unique
856
:param check_content: If True, the lines supplied are verified to be
857
bytestrings that are correctly formed lines.
858
:return: The text sha1, the number of bytes in the text, and an opaque
859
representation of the inserted version which can be provided
860
back to future _add_text calls in the parent_texts dictionary.
862
# The default implementation just thunks over to .add_lines(),
863
# inefficient, but it works.
864
return self.add_lines(key, parents, osutils.split_lines(text),
865
nostore_sha=nostore_sha,
981
869
def add_mpdiffs(self, records):
982
870
"""Add mpdiffs to this VersionedFile.
998
886
# easily exhaust memory.
999
887
chunks_to_lines = osutils.chunks_to_lines
1000
888
for record in self.get_record_stream(needed_parents, 'unordered',
1002
890
if record.storage_kind == 'absent':
1004
892
mpvf.add_version(chunks_to_lines(record.get_bytes_as('chunked')),
1006
for (key, parent_keys, expected_sha1, mpdiff), lines in zip(
1007
records, mpvf.get_line_list(versions)):
894
for (key, parent_keys, expected_sha1, mpdiff), lines in\
895
zip(records, mpvf.get_line_list(versions)):
1008
896
if len(parent_keys) == 1:
1009
897
left_matching_blocks = list(mpdiff.get_matching_blocks(0,
1010
mpvf.get_diff(parent_keys[0]).num_lines()))
898
mpvf.get_diff(parent_keys[0]).num_lines()))
1012
900
left_matching_blocks = None
1013
901
version_sha1, _, version_text = self.add_lines(key,
1014
parent_keys, lines, vf_parents,
1015
left_matching_blocks=left_matching_blocks)
902
parent_keys, lines, vf_parents,
903
left_matching_blocks=left_matching_blocks)
1016
904
if version_sha1 != expected_sha1:
1017
905
raise errors.VersionedFileInvalidChecksum(version)
1018
906
vf_parents[key] = version_text
1160
1048
def make_mpdiffs(self, keys):
1161
1049
"""Create multiparent diffs for specified keys."""
1162
generator = _MPDiffGenerator(self, keys)
1163
return generator.compute_diffs()
1165
def get_annotator(self):
1166
return annotate.Annotator(self)
1050
keys_order = tuple(keys)
1051
keys = frozenset(keys)
1052
knit_keys = set(keys)
1053
parent_map = self.get_parent_map(keys)
1054
for parent_keys in parent_map.itervalues():
1056
knit_keys.update(parent_keys)
1057
missing_keys = keys - set(parent_map)
1059
raise errors.RevisionNotPresent(list(missing_keys)[0], self)
1060
# We need to filter out ghosts, because we can't diff against them.
1061
maybe_ghosts = knit_keys - keys
1062
ghosts = maybe_ghosts - set(self.get_parent_map(maybe_ghosts))
1063
knit_keys.difference_update(ghosts)
1065
chunks_to_lines = osutils.chunks_to_lines
1066
for record in self.get_record_stream(knit_keys, 'topological', True):
1067
lines[record.key] = chunks_to_lines(record.get_bytes_as('chunked'))
1068
# line_block_dict = {}
1069
# for parent, blocks in record.extract_line_blocks():
1070
# line_blocks[parent] = blocks
1071
# line_blocks[record.key] = line_block_dict
1073
for key in keys_order:
1075
parents = parent_map[key] or []
1076
# Note that filtering knit_keys can lead to a parent difference
1077
# between the creation and the application of the mpdiff.
1078
parent_lines = [lines[p] for p in parents if p in knit_keys]
1079
if len(parent_lines) > 0:
1080
left_parent_blocks = self._extract_blocks(key, parent_lines[0],
1083
left_parent_blocks = None
1084
diffs.append(multiparent.MultiParent.from_lines(target,
1085
parent_lines, left_parent_blocks))
1168
1088
missing_keys = index._missing_keys_from_parent_map
1170
1090
def _extract_blocks(self, version_id, source, target):
1173
def _transitive_fallbacks(self):
1174
"""Return the whole stack of fallback versionedfiles.
1176
This VersionedFiles may have a list of fallbacks, but it doesn't
1177
necessarily know about the whole stack going down, and it can't know
1178
at open time because they may change after the objects are opened.
1181
for a_vfs in self._immediate_fallback_vfs:
1182
all_fallbacks.append(a_vfs)
1183
all_fallbacks.extend(a_vfs._transitive_fallbacks())
1184
return all_fallbacks
1187
1094
class ThunkedVersionedFiles(VersionedFiles):
1188
1095
"""Storage for many versioned files thunked onto a 'VersionedFile' class.
1214
1121
return vf.add_lines_with_ghosts(version_id, parents, lines,
1215
parent_texts=parent_texts,
1216
left_matching_blocks=left_matching_blocks,
1217
nostore_sha=nostore_sha, random_id=random_id,
1218
check_content=check_content)
1122
parent_texts=parent_texts,
1123
left_matching_blocks=left_matching_blocks,
1124
nostore_sha=nostore_sha, random_id=random_id,
1125
check_content=check_content)
1219
1126
except NotImplementedError:
1220
1127
return vf.add_lines(version_id, parents, lines,
1221
parent_texts=parent_texts,
1222
left_matching_blocks=left_matching_blocks,
1223
nostore_sha=nostore_sha, random_id=random_id,
1224
check_content=check_content)
1128
parent_texts=parent_texts,
1129
left_matching_blocks=left_matching_blocks,
1130
nostore_sha=nostore_sha, random_id=random_id,
1131
check_content=check_content)
1225
1132
except errors.NoSuchFile:
1226
1133
# parent directory may be missing, try again.
1227
1134
self._transport.mkdir(osutils.dirname(path))
1229
1136
return vf.add_lines_with_ghosts(version_id, parents, lines,
1230
parent_texts=parent_texts,
1231
left_matching_blocks=left_matching_blocks,
1232
nostore_sha=nostore_sha, random_id=random_id,
1233
check_content=check_content)
1137
parent_texts=parent_texts,
1138
left_matching_blocks=left_matching_blocks,
1139
nostore_sha=nostore_sha, random_id=random_id,
1140
check_content=check_content)
1234
1141
except NotImplementedError:
1235
1142
return vf.add_lines(version_id, parents, lines,
1236
parent_texts=parent_texts,
1237
left_matching_blocks=left_matching_blocks,
1238
nostore_sha=nostore_sha, random_id=random_id,
1239
check_content=check_content)
1143
parent_texts=parent_texts,
1144
left_matching_blocks=left_matching_blocks,
1145
nostore_sha=nostore_sha, random_id=random_id,
1146
check_content=check_content)
1241
1148
def annotate(self, key):
1242
1149
"""Return a list of (version-key, line) tuples for the text of key.
1404
class VersionedFilesWithFallbacks(VersionedFiles):
1406
def without_fallbacks(self):
1407
"""Return a clone of this object without any fallbacks configured."""
1408
raise NotImplementedError(self.without_fallbacks)
1410
def add_fallback_versioned_files(self, a_versioned_files):
1411
"""Add a source of texts for texts not present in this knit.
1413
:param a_versioned_files: A VersionedFiles object.
1415
raise NotImplementedError(self.add_fallback_versioned_files)
1417
def get_known_graph_ancestry(self, keys):
1418
"""Get a KnownGraph instance with the ancestry of keys."""
1419
parent_map, missing_keys = self._index.find_ancestry(keys)
1420
for fallback in self._transitive_fallbacks():
1421
if not missing_keys:
1423
(f_parent_map, f_missing_keys) = fallback._index.find_ancestry(
1425
parent_map.update(f_parent_map)
1426
missing_keys = f_missing_keys
1427
kg = _mod_graph.KnownGraph(parent_map)
1431
1314
class _PlanMergeVersionedFile(VersionedFiles):
1432
1315
"""A VersionedFile for uncommitted and committed texts.
1454
1337
# line data for locally held keys.
1455
1338
self._lines = {}
1456
1339
# key lookup providers
1457
self._providers = [_mod_graph.DictParentsProvider(self._parents)]
1340
self._providers = [DictParentsProvider(self._parents)]
1459
1342
def plan_merge(self, ver_a, ver_b, base=None):
1460
1343
"""See VersionedFile.plan_merge"""
1461
from ..merge import _PlanMerge
1344
from bzrlib.merge import _PlanMerge
1462
1345
if base is None:
1463
1346
return _PlanMerge(ver_a, ver_b, self, (self._file_id,)).plan_merge()
1464
old_plan = list(_PlanMerge(ver_a, base, self,
1465
(self._file_id,)).plan_merge())
1466
new_plan = list(_PlanMerge(ver_a, ver_b, self,
1467
(self._file_id,)).plan_merge())
1347
old_plan = list(_PlanMerge(ver_a, base, self, (self._file_id,)).plan_merge())
1348
new_plan = list(_PlanMerge(ver_a, ver_b, self, (self._file_id,)).plan_merge())
1468
1349
return _PlanMerge._subtract_plans(old_plan, new_plan)
1470
1351
def plan_lca_merge(self, ver_a, ver_b, base=None):
1471
from ..merge import _PlanLCAMerge
1472
graph = _mod_graph.Graph(self)
1473
new_plan = _PlanLCAMerge(
1474
ver_a, ver_b, self, (self._file_id,), graph).plan_merge()
1352
from bzrlib.merge import _PlanLCAMerge
1354
new_plan = _PlanLCAMerge(ver_a, ver_b, self, (self._file_id,), graph).plan_merge()
1475
1355
if base is None:
1476
1356
return new_plan
1477
old_plan = _PlanLCAMerge(
1478
ver_a, base, self, (self._file_id,), graph).plan_merge()
1357
old_plan = _PlanLCAMerge(ver_a, base, self, (self._file_id,), graph).plan_merge()
1479
1358
return _PlanLCAMerge._subtract_plans(list(old_plan), list(new_plan))
1481
1360
def add_lines(self, key, parents, lines):
1749
class NoDupeAddLinesDecorator(object):
1750
"""Decorator for a VersionedFiles that skips doing an add_lines if the key
1754
def __init__(self, store):
1757
def add_lines(self, key, parents, lines, parent_texts=None,
1758
left_matching_blocks=None, nostore_sha=None, random_id=False,
1759
check_content=True):
1760
"""See VersionedFiles.add_lines.
1762
This implementation may return None as the third element of the return
1763
value when the original store wouldn't.
1766
raise NotImplementedError(
1767
"NoDupeAddLinesDecorator.add_lines does not implement the "
1768
"nostore_sha behaviour.")
1770
sha1 = osutils.sha_strings(lines)
1771
key = (b"sha1:" + sha1,)
1774
if key in self._store.get_parent_map([key]):
1775
# This key has already been inserted, so don't do it again.
1777
sha1 = osutils.sha_strings(lines)
1778
return sha1, sum(map(len, lines)), None
1779
return self._store.add_lines(key, parents, lines,
1780
parent_texts=parent_texts,
1781
left_matching_blocks=left_matching_blocks,
1782
nostore_sha=nostore_sha, random_id=random_id,
1783
check_content=check_content)
1785
def __getattr__(self, name):
1786
return getattr(self._store, name)
1789
1627
def network_bytes_to_kind_and_offset(network_bytes):
1790
1628
"""Strip of a record kind from the front of network_bytes.
1792
1630
:param network_bytes: The bytes of a record.
1793
1631
:return: A tuple (storage_kind, offset_of_remaining_bytes)
1795
line_end = network_bytes.find(b'\n')
1796
storage_kind = network_bytes[:line_end].decode('ascii')
1633
line_end = network_bytes.find('\n')
1634
storage_kind = network_bytes[:line_end]
1797
1635
return storage_kind, line_end + 1
1826
1664
for bytes in self._bytes_iterator:
1827
1665
storage_kind, line_end = network_bytes_to_kind_and_offset(bytes)
1828
1666
for record in self._kind_factory[storage_kind](
1829
storage_kind, bytes, line_end):
1667
storage_kind, bytes, line_end):
1833
1671
def fulltext_network_to_record(kind, bytes, line_end):
1834
1672
"""Convert a network fulltext record to record."""
1835
meta_len, = struct.unpack('!L', bytes[line_end:line_end + 4])
1836
record_meta = bytes[line_end + 4:line_end + 4 + meta_len]
1673
meta_len, = struct.unpack('!L', bytes[line_end:line_end+4])
1674
record_meta = bytes[line_end+4:line_end+4+meta_len]
1837
1675
key, parents = bencode.bdecode_as_tuple(record_meta)
1838
if parents == b'nil':
1676
if parents == 'nil':
1840
fulltext = bytes[line_end + 4 + meta_len:]
1678
fulltext = bytes[line_end+4+meta_len:]
1841
1679
return [FulltextContentFactory(key, parents, None, fulltext)]
1882
1720
for prefix in sorted(per_prefix_map):
1883
1721
present_keys.extend(reversed(tsort.topo_sort(per_prefix_map[prefix])))
1884
1722
return present_keys
1887
class _KeyRefs(object):
1889
def __init__(self, track_new_keys=False):
1890
# dict mapping 'key' to 'set of keys referring to that key'
1893
# set remembering all new keys
1894
self.new_keys = set()
1896
self.new_keys = None
1902
self.new_keys.clear()
1904
def add_references(self, key, refs):
1905
# Record the new references
1906
for referenced in refs:
1908
needed_by = self.refs[referenced]
1910
needed_by = self.refs[referenced] = set()
1912
# Discard references satisfied by the new key
1915
def get_new_keys(self):
1916
return self.new_keys
1918
def get_unsatisfied_refs(self):
1919
return self.refs.keys()
1921
def _satisfy_refs_for_key(self, key):
1925
# No keys depended on this key. That's ok.
1928
def add_key(self, key):
1929
# satisfy refs for key, and remember that we've seen this key.
1930
self._satisfy_refs_for_key(key)
1931
if self.new_keys is not None:
1932
self.new_keys.add(key)
1934
def satisfy_refs_for_keys(self, keys):
1936
self._satisfy_refs_for_key(key)
1938
def get_referrers(self):
1939
return set(itertools.chain.from_iterable(viewvalues(self.refs)))