1
# Copyright (C) 2005, 2006, 2007, 2008 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20
"""Versioned text file storage api."""
23
from cStringIO import StringIO
26
from zlib import adler32
28
from bzrlib.lazy_import import lazy_import
29
lazy_import(globals(), """
46
from bzrlib.graph import DictParentsProvider, Graph, StackedParentsProvider
47
from bzrlib.transport.memory import MemoryTransport
49
from bzrlib.inter import InterObject
50
from bzrlib.registry import Registry
51
from bzrlib.symbol_versioning import *
52
from bzrlib.textmerge import TextMerge
53
from bzrlib import bencode
56
adapter_registry = Registry()
57
adapter_registry.register_lazy(('knit-delta-gz', 'fulltext'), 'bzrlib.knit',
58
'DeltaPlainToFullText')
59
adapter_registry.register_lazy(('knit-ft-gz', 'fulltext'), 'bzrlib.knit',
61
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'knit-delta-gz'),
62
'bzrlib.knit', 'DeltaAnnotatedToUnannotated')
63
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'fulltext'),
64
'bzrlib.knit', 'DeltaAnnotatedToFullText')
65
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'knit-ft-gz'),
66
'bzrlib.knit', 'FTAnnotatedToUnannotated')
67
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'fulltext'),
68
'bzrlib.knit', 'FTAnnotatedToFullText')
69
# adapter_registry.register_lazy(('knit-annotated-ft-gz', 'chunked'),
70
# 'bzrlib.knit', 'FTAnnotatedToChunked')
73
class ContentFactory(object):
74
"""Abstract interface for insertion and retrieval from a VersionedFile.
76
:ivar sha1: None, or the sha1 of the content fulltext.
77
:ivar storage_kind: The native storage kind of this factory. One of
78
'mpdiff', 'knit-annotated-ft', 'knit-annotated-delta', 'knit-ft',
79
'knit-delta', 'fulltext', 'knit-annotated-ft-gz',
80
'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'.
81
:ivar key: The key of this content. Each key is a tuple with a single
83
:ivar parents: A tuple of parent keys for self.key. If the object has
84
no parent information, None (as opposed to () for an empty list of
89
"""Create a ContentFactory."""
91
self.storage_kind = None
96
class ChunkedContentFactory(ContentFactory):
97
"""Static data content factory.
99
This takes a 'chunked' list of strings. The only requirement on 'chunked' is
100
that ''.join(lines) becomes a valid fulltext. A tuple of a single string
101
satisfies this, as does a list of lines.
103
:ivar sha1: None, or the sha1 of the content fulltext.
104
:ivar storage_kind: The native storage kind of this factory. Always
106
:ivar key: The key of this content. Each key is a tuple with a single
108
:ivar parents: A tuple of parent keys for self.key. If the object has
109
no parent information, None (as opposed to () for an empty list of
113
def __init__(self, key, parents, sha1, chunks):
114
"""Create a ContentFactory."""
116
self.storage_kind = 'chunked'
118
self.parents = parents
119
self._chunks = chunks
121
def get_bytes_as(self, storage_kind):
122
if storage_kind == 'chunked':
124
elif storage_kind == 'fulltext':
125
return ''.join(self._chunks)
126
raise errors.UnavailableRepresentation(self.key, storage_kind,
130
class FulltextContentFactory(ContentFactory):
131
"""Static data content factory.
133
This takes a fulltext when created and just returns that during
134
get_bytes_as('fulltext').
136
:ivar sha1: None, or the sha1 of the content fulltext.
137
:ivar storage_kind: The native storage kind of this factory. Always
139
:ivar key: The key of this content. Each key is a tuple with a single
141
:ivar parents: A tuple of parent keys for self.key. If the object has
142
no parent information, None (as opposed to () for an empty list of
146
def __init__(self, key, parents, sha1, text):
147
"""Create a ContentFactory."""
149
self.storage_kind = 'fulltext'
151
self.parents = parents
154
def get_bytes_as(self, storage_kind):
155
if storage_kind == self.storage_kind:
157
elif storage_kind == 'chunked':
159
raise errors.UnavailableRepresentation(self.key, storage_kind,
163
class AbsentContentFactory(ContentFactory):
164
"""A placeholder content factory for unavailable texts.
167
:ivar storage_kind: 'absent'.
168
:ivar key: The key of this content. Each key is a tuple with a single
173
def __init__(self, key):
174
"""Create a ContentFactory."""
176
self.storage_kind = 'absent'
180
def get_bytes_as(self, storage_kind):
181
raise ValueError('A request was made for key: %s, but that'
182
' content is not available, and the calling'
183
' code does not handle if it is missing.'
187
class AdapterFactory(ContentFactory):
188
"""A content factory to adapt between key prefix's."""
190
def __init__(self, key, parents, adapted):
191
"""Create an adapter factory instance."""
193
self.parents = parents
194
self._adapted = adapted
196
def __getattr__(self, attr):
197
"""Return a member from the adapted object."""
198
if attr in ('key', 'parents'):
199
return self.__dict__[attr]
201
return getattr(self._adapted, attr)
204
def filter_absent(record_stream):
205
"""Adapt a record stream to remove absent records."""
206
for record in record_stream:
207
if record.storage_kind != 'absent':
211
class VersionedFile(object):
212
"""Versioned text file storage.
214
A versioned file manages versions of line-based text files,
215
keeping track of the originating version for each line.
217
To clients the "lines" of the file are represented as a list of
218
strings. These strings will typically have terminal newline
219
characters, but this is not required. In particular files commonly
220
do not have a newline at the end of the file.
222
Texts are identified by a version-id string.
226
def check_not_reserved_id(version_id):
227
revision.check_not_reserved_id(version_id)
229
def copy_to(self, name, transport):
230
"""Copy this versioned file to name on transport."""
231
raise NotImplementedError(self.copy_to)
233
def get_record_stream(self, versions, ordering, include_delta_closure):
234
"""Get a stream of records for versions.
236
:param versions: The versions to include. Each version is a tuple
238
:param ordering: Either 'unordered' or 'topological'. A topologically
239
sorted stream has compression parents strictly before their
241
:param include_delta_closure: If True then the closure across any
242
compression parents will be included (in the data content of the
243
stream, not in the emitted records). This guarantees that
244
'fulltext' can be used successfully on every record.
245
:return: An iterator of ContentFactory objects, each of which is only
246
valid until the iterator is advanced.
248
raise NotImplementedError(self.get_record_stream)
250
def has_version(self, version_id):
251
"""Returns whether version is present."""
252
raise NotImplementedError(self.has_version)
254
def insert_record_stream(self, stream):
255
"""Insert a record stream into this versioned file.
257
:param stream: A stream of records to insert.
259
:seealso VersionedFile.get_record_stream:
261
raise NotImplementedError
263
def add_lines(self, version_id, parents, lines, parent_texts=None,
264
left_matching_blocks=None, nostore_sha=None, random_id=False,
266
"""Add a single text on top of the versioned file.
268
Must raise RevisionAlreadyPresent if the new version is
269
already present in file history.
271
Must raise RevisionNotPresent if any of the given parents are
272
not present in file history.
274
:param lines: A list of lines. Each line must be a bytestring. And all
275
of them except the last must be terminated with \n and contain no
276
other \n's. The last line may either contain no \n's or a single
277
terminated \n. If the lines list does meet this constraint the add
278
routine may error or may succeed - but you will be unable to read
279
the data back accurately. (Checking the lines have been split
280
correctly is expensive and extremely unlikely to catch bugs so it
281
is not done at runtime unless check_content is True.)
282
:param parent_texts: An optional dictionary containing the opaque
283
representations of some or all of the parents of version_id to
284
allow delta optimisations. VERY IMPORTANT: the texts must be those
285
returned by add_lines or data corruption can be caused.
286
:param left_matching_blocks: a hint about which areas are common
287
between the text and its left-hand-parent. The format is
288
the SequenceMatcher.get_matching_blocks format.
289
:param nostore_sha: Raise ExistingContent and do not add the lines to
290
the versioned file if the digest of the lines matches this.
291
:param random_id: If True a random id has been selected rather than
292
an id determined by some deterministic process such as a converter
293
from a foreign VCS. When True the backend may choose not to check
294
for uniqueness of the resulting key within the versioned file, so
295
this should only be done when the result is expected to be unique
297
:param check_content: If True, the lines supplied are verified to be
298
bytestrings that are correctly formed lines.
299
:return: The text sha1, the number of bytes in the text, and an opaque
300
representation of the inserted version which can be provided
301
back to future add_lines calls in the parent_texts dictionary.
303
self._check_write_ok()
304
return self._add_lines(version_id, parents, lines, parent_texts,
305
left_matching_blocks, nostore_sha, random_id, check_content)
307
def _add_lines(self, version_id, parents, lines, parent_texts,
308
left_matching_blocks, nostore_sha, random_id, check_content):
309
"""Helper to do the class specific add_lines."""
310
raise NotImplementedError(self.add_lines)
312
def add_lines_with_ghosts(self, version_id, parents, lines,
313
parent_texts=None, nostore_sha=None, random_id=False,
314
check_content=True, left_matching_blocks=None):
315
"""Add lines to the versioned file, allowing ghosts to be present.
317
This takes the same parameters as add_lines and returns the same.
319
self._check_write_ok()
320
return self._add_lines_with_ghosts(version_id, parents, lines,
321
parent_texts, nostore_sha, random_id, check_content, left_matching_blocks)
323
def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts,
324
nostore_sha, random_id, check_content, left_matching_blocks):
325
"""Helper to do class specific add_lines_with_ghosts."""
326
raise NotImplementedError(self.add_lines_with_ghosts)
328
def check(self, progress_bar=None):
329
"""Check the versioned file for integrity."""
330
raise NotImplementedError(self.check)
332
def _check_lines_not_unicode(self, lines):
333
"""Check that lines being added to a versioned file are not unicode."""
335
if line.__class__ is not str:
336
raise errors.BzrBadParameterUnicode("lines")
338
def _check_lines_are_lines(self, lines):
339
"""Check that the lines really are full lines without inline EOL."""
341
if '\n' in line[:-1]:
342
raise errors.BzrBadParameterContainsNewline("lines")
344
def get_format_signature(self):
345
"""Get a text description of the data encoding in this file.
349
raise NotImplementedError(self.get_format_signature)
351
def make_mpdiffs(self, version_ids):
352
"""Create multiparent diffs for specified versions."""
353
knit_versions = set()
354
knit_versions.update(version_ids)
355
parent_map = self.get_parent_map(version_ids)
356
for version_id in version_ids:
358
knit_versions.update(parent_map[version_id])
360
raise errors.RevisionNotPresent(version_id, self)
361
# We need to filter out ghosts, because we can't diff against them.
362
knit_versions = set(self.get_parent_map(knit_versions).keys())
363
lines = dict(zip(knit_versions,
364
self._get_lf_split_line_list(knit_versions)))
366
for version_id in version_ids:
367
target = lines[version_id]
369
parents = [lines[p] for p in parent_map[version_id] if p in
372
# I don't know how this could ever trigger.
373
# parent_map[version_id] was already triggered in the previous
374
# for loop, and lines[p] has the 'if p in knit_versions' check,
375
# so we again won't have a KeyError.
376
raise errors.RevisionNotPresent(version_id, self)
378
left_parent_blocks = self._extract_blocks(version_id,
381
left_parent_blocks = None
382
diffs.append(multiparent.MultiParent.from_lines(target, parents,
386
def _extract_blocks(self, version_id, source, target):
389
def add_mpdiffs(self, records):
390
"""Add mpdiffs to this VersionedFile.
392
Records should be iterables of version, parents, expected_sha1,
393
mpdiff. mpdiff should be a MultiParent instance.
395
# Does this need to call self._check_write_ok()? (IanC 20070919)
397
mpvf = multiparent.MultiMemoryVersionedFile()
399
for version, parent_ids, expected_sha1, mpdiff in records:
400
versions.append(version)
401
mpvf.add_diff(mpdiff, version, parent_ids)
402
needed_parents = set()
403
for version, parent_ids, expected_sha1, mpdiff in records:
404
needed_parents.update(p for p in parent_ids
405
if not mpvf.has_version(p))
406
present_parents = set(self.get_parent_map(needed_parents).keys())
407
for parent_id, lines in zip(present_parents,
408
self._get_lf_split_line_list(present_parents)):
409
mpvf.add_version(lines, parent_id, [])
410
for (version, parent_ids, expected_sha1, mpdiff), lines in\
411
zip(records, mpvf.get_line_list(versions)):
412
if len(parent_ids) == 1:
413
left_matching_blocks = list(mpdiff.get_matching_blocks(0,
414
mpvf.get_diff(parent_ids[0]).num_lines()))
416
left_matching_blocks = None
418
_, _, version_text = self.add_lines_with_ghosts(version,
419
parent_ids, lines, vf_parents,
420
left_matching_blocks=left_matching_blocks)
421
except NotImplementedError:
422
# The vf can't handle ghosts, so add lines normally, which will
423
# (reasonably) fail if there are ghosts in the data.
424
_, _, version_text = self.add_lines(version,
425
parent_ids, lines, vf_parents,
426
left_matching_blocks=left_matching_blocks)
427
vf_parents[version] = version_text
428
sha1s = self.get_sha1s(versions)
429
for version, parent_ids, expected_sha1, mpdiff in records:
430
if expected_sha1 != sha1s[version]:
431
raise errors.VersionedFileInvalidChecksum(version)
433
def get_text(self, version_id):
434
"""Return version contents as a text string.
436
Raises RevisionNotPresent if version is not present in
439
return ''.join(self.get_lines(version_id))
440
get_string = get_text
442
def get_texts(self, version_ids):
443
"""Return the texts of listed versions as a list of strings.
445
Raises RevisionNotPresent if version is not present in
448
return [''.join(self.get_lines(v)) for v in version_ids]
450
def get_lines(self, version_id):
451
"""Return version contents as a sequence of lines.
453
Raises RevisionNotPresent if version is not present in
456
raise NotImplementedError(self.get_lines)
458
def _get_lf_split_line_list(self, version_ids):
459
return [StringIO(t).readlines() for t in self.get_texts(version_ids)]
461
def get_ancestry(self, version_ids, topo_sorted=True):
462
"""Return a list of all ancestors of given version(s). This
463
will not include the null revision.
465
This list will not be topologically sorted if topo_sorted=False is
468
Must raise RevisionNotPresent if any of the given versions are
469
not present in file history."""
470
if isinstance(version_ids, basestring):
471
version_ids = [version_ids]
472
raise NotImplementedError(self.get_ancestry)
474
def get_ancestry_with_ghosts(self, version_ids):
475
"""Return a list of all ancestors of given version(s). This
476
will not include the null revision.
478
Must raise RevisionNotPresent if any of the given versions are
479
not present in file history.
481
Ghosts that are known about will be included in ancestry list,
482
but are not explicitly marked.
484
raise NotImplementedError(self.get_ancestry_with_ghosts)
486
def get_parent_map(self, version_ids):
487
"""Get a map of the parents of version_ids.
489
:param version_ids: The version ids to look up parents for.
490
:return: A mapping from version id to parents.
492
raise NotImplementedError(self.get_parent_map)
494
def get_parents_with_ghosts(self, version_id):
495
"""Return version names for parents of version_id.
497
Will raise RevisionNotPresent if version_id is not present
500
Ghosts that are known about will be included in the parent list,
501
but are not explicitly marked.
504
return list(self.get_parent_map([version_id])[version_id])
506
raise errors.RevisionNotPresent(version_id, self)
508
def annotate(self, version_id):
509
"""Return a list of (version-id, line) tuples for version_id.
511
:raise RevisionNotPresent: If the given version is
512
not present in file history.
514
raise NotImplementedError(self.annotate)
516
def iter_lines_added_or_present_in_versions(self, version_ids=None,
518
"""Iterate over the lines in the versioned file from version_ids.
520
This may return lines from other versions. Each item the returned
521
iterator yields is a tuple of a line and a text version that that line
522
is present in (not introduced in).
524
Ordering of results is in whatever order is most suitable for the
525
underlying storage format.
527
If a progress bar is supplied, it may be used to indicate progress.
528
The caller is responsible for cleaning up progress bars (because this
531
NOTES: Lines are normalised: they will all have \n terminators.
532
Lines are returned in arbitrary order.
534
:return: An iterator over (line, version_id).
536
raise NotImplementedError(self.iter_lines_added_or_present_in_versions)
538
def plan_merge(self, ver_a, ver_b):
539
"""Return pseudo-annotation indicating how the two versions merge.
541
This is computed between versions a and b and their common
544
Weave lines present in none of them are skipped entirely.
547
killed-base Dead in base revision
548
killed-both Killed in each revision
551
unchanged Alive in both a and b (possibly created in both)
554
ghost-a Killed in a, unborn in b
555
ghost-b Killed in b, unborn in a
556
irrelevant Not in either revision
558
raise NotImplementedError(VersionedFile.plan_merge)
560
def weave_merge(self, plan, a_marker=TextMerge.A_MARKER,
561
b_marker=TextMerge.B_MARKER):
562
return PlanWeaveMerge(plan, a_marker, b_marker).merge_lines()[0]
565
class RecordingVersionedFilesDecorator(object):
566
"""A minimal versioned files that records calls made on it.
568
Only enough methods have been added to support tests using it to date.
570
:ivar calls: A list of the calls made; can be reset at any time by
574
def __init__(self, backing_vf):
575
"""Create a RecordingVersionedFilesDecorator decorating backing_vf.
577
:param backing_vf: The versioned file to answer all methods.
579
self._backing_vf = backing_vf
582
def add_lines(self, key, parents, lines, parent_texts=None,
583
left_matching_blocks=None, nostore_sha=None, random_id=False,
585
self.calls.append(("add_lines", key, parents, lines, parent_texts,
586
left_matching_blocks, nostore_sha, random_id, check_content))
587
return self._backing_vf.add_lines(key, parents, lines, parent_texts,
588
left_matching_blocks, nostore_sha, random_id, check_content)
591
self._backing_vf.check()
593
def get_parent_map(self, keys):
594
self.calls.append(("get_parent_map", copy(keys)))
595
return self._backing_vf.get_parent_map(keys)
597
def get_record_stream(self, keys, sort_order, include_delta_closure):
598
self.calls.append(("get_record_stream", list(keys), sort_order,
599
include_delta_closure))
600
return self._backing_vf.get_record_stream(keys, sort_order,
601
include_delta_closure)
603
def get_sha1s(self, keys):
604
self.calls.append(("get_sha1s", copy(keys)))
605
return self._backing_vf.get_sha1s(keys)
607
def iter_lines_added_or_present_in_keys(self, keys, pb=None):
608
self.calls.append(("iter_lines_added_or_present_in_keys", copy(keys)))
609
return self._backing_vf.iter_lines_added_or_present_in_keys(keys, pb=pb)
612
self.calls.append(("keys",))
613
return self._backing_vf.keys()
616
class OrderingVersionedFilesDecorator(RecordingVersionedFilesDecorator):
617
"""A VF that records calls, and returns keys in specific order.
619
:ivar calls: A list of the calls made; can be reset at any time by
623
def __init__(self, backing_vf, key_priority):
624
"""Create a RecordingVersionedFilesDecorator decorating backing_vf.
626
:param backing_vf: The versioned file to answer all methods.
627
:param key_priority: A dictionary defining what order keys should be
628
returned from an 'unordered' get_record_stream request.
629
Keys with lower priority are returned first, keys not present in
630
the map get an implicit priority of 0, and are returned in
631
lexicographical order.
633
RecordingVersionedFilesDecorator.__init__(self, backing_vf)
634
self._key_priority = key_priority
636
def get_record_stream(self, keys, sort_order, include_delta_closure):
637
self.calls.append(("get_record_stream", list(keys), sort_order,
638
include_delta_closure))
639
if sort_order == 'unordered':
641
return (self._key_priority.get(key, 0), key)
642
# Use a defined order by asking for the keys one-by-one from the
644
for key in sorted(keys, key=sort_key):
645
for record in self._backing_vf.get_record_stream([key],
646
'unordered', include_delta_closure):
649
for record in self._backing_vf.get_record_stream(keys, sort_order,
650
include_delta_closure):
654
class KeyMapper(object):
655
"""KeyMappers map between keys and underlying partitioned storage."""
658
"""Map key to an underlying storage identifier.
660
:param key: A key tuple e.g. ('file-id', 'revision-id').
661
:return: An underlying storage identifier, specific to the partitioning
664
raise NotImplementedError(self.map)
666
def unmap(self, partition_id):
667
"""Map a partitioned storage id back to a key prefix.
669
:param partition_id: The underlying partition id.
670
:return: As much of a key (or prefix) as is derivable from the partition
673
raise NotImplementedError(self.unmap)
676
class ConstantMapper(KeyMapper):
677
"""A key mapper that maps to a constant result."""
679
def __init__(self, result):
680
"""Create a ConstantMapper which will return result for all maps."""
681
self._result = result
684
"""See KeyMapper.map()."""
688
class URLEscapeMapper(KeyMapper):
689
"""Base class for use with transport backed storage.
691
This provides a map and unmap wrapper that respectively url escape and
692
unescape their outputs and inputs.
696
"""See KeyMapper.map()."""
697
return urllib.quote(self._map(key))
699
def unmap(self, partition_id):
700
"""See KeyMapper.unmap()."""
701
return self._unmap(urllib.unquote(partition_id))
704
class PrefixMapper(URLEscapeMapper):
705
"""A key mapper that extracts the first component of a key.
707
This mapper is for use with a transport based backend.
711
"""See KeyMapper.map()."""
714
def _unmap(self, partition_id):
715
"""See KeyMapper.unmap()."""
716
return (partition_id,)
719
class HashPrefixMapper(URLEscapeMapper):
720
"""A key mapper that combines the first component of a key with a hash.
722
This mapper is for use with a transport based backend.
726
"""See KeyMapper.map()."""
727
prefix = self._escape(key[0])
728
return "%02x/%s" % (adler32(prefix) & 0xff, prefix)
730
def _escape(self, prefix):
731
"""No escaping needed here."""
734
def _unmap(self, partition_id):
735
"""See KeyMapper.unmap()."""
736
return (self._unescape(osutils.basename(partition_id)),)
738
def _unescape(self, basename):
739
"""No unescaping needed for HashPrefixMapper."""
743
class HashEscapedPrefixMapper(HashPrefixMapper):
744
"""Combines the escaped first component of a key with a hash.
746
This mapper is for use with a transport based backend.
749
_safe = "abcdefghijklmnopqrstuvwxyz0123456789-_@,."
751
def _escape(self, prefix):
752
"""Turn a key element into a filesystem safe string.
754
This is similar to a plain urllib.quote, except
755
it uses specific safe characters, so that it doesn't
756
have to translate a lot of valid file ids.
758
# @ does not get escaped. This is because it is a valid
759
# filesystem character we use all the time, and it looks
760
# a lot better than seeing %40 all the time.
761
r = [((c in self._safe) and c or ('%%%02x' % ord(c)))
765
def _unescape(self, basename):
766
"""Escaped names are easily unescaped by urlutils."""
767
return urllib.unquote(basename)
770
def make_versioned_files_factory(versioned_file_factory, mapper):
771
"""Create a ThunkedVersionedFiles factory.
773
This will create a callable which when called creates a
774
ThunkedVersionedFiles on a transport, using mapper to access individual
775
versioned files, and versioned_file_factory to create each individual file.
777
def factory(transport):
778
return ThunkedVersionedFiles(transport, versioned_file_factory, mapper,
783
class VersionedFiles(object):
784
"""Storage for many versioned files.
786
This object allows a single keyspace for accessing the history graph and
787
contents of named bytestrings.
789
Currently no implementation allows the graph of different key prefixes to
790
intersect, but the API does allow such implementations in the future.
792
The keyspace is expressed via simple tuples. Any instance of VersionedFiles
793
may have a different length key-size, but that size will be constant for
794
all texts added to or retrieved from it. For instance, bzrlib uses
795
instances with a key-size of 2 for storing user files in a repository, with
796
the first element the fileid, and the second the version of that file.
798
The use of tuples allows a single code base to support several different
799
uses with only the mapping logic changing from instance to instance.
802
def add_lines(self, key, parents, lines, parent_texts=None,
803
left_matching_blocks=None, nostore_sha=None, random_id=False,
805
"""Add a text to the store.
807
:param key: The key tuple of the text to add. If the last element is
808
None, a CHK string will be generated during the addition.
809
:param parents: The parents key tuples of the text to add.
810
:param lines: A list of lines. Each line must be a bytestring. And all
811
of them except the last must be terminated with \n and contain no
812
other \n's. The last line may either contain no \n's or a single
813
terminating \n. If the lines list does meet this constraint the add
814
routine may error or may succeed - but you will be unable to read
815
the data back accurately. (Checking the lines have been split
816
correctly is expensive and extremely unlikely to catch bugs so it
817
is not done at runtime unless check_content is True.)
818
:param parent_texts: An optional dictionary containing the opaque
819
representations of some or all of the parents of version_id to
820
allow delta optimisations. VERY IMPORTANT: the texts must be those
821
returned by add_lines or data corruption can be caused.
822
:param left_matching_blocks: a hint about which areas are common
823
between the text and its left-hand-parent. The format is
824
the SequenceMatcher.get_matching_blocks format.
825
:param nostore_sha: Raise ExistingContent and do not add the lines to
826
the versioned file if the digest of the lines matches this.
827
:param random_id: If True a random id has been selected rather than
828
an id determined by some deterministic process such as a converter
829
from a foreign VCS. When True the backend may choose not to check
830
for uniqueness of the resulting key within the versioned file, so
831
this should only be done when the result is expected to be unique
833
:param check_content: If True, the lines supplied are verified to be
834
bytestrings that are correctly formed lines.
835
:return: The text sha1, the number of bytes in the text, and an opaque
836
representation of the inserted version which can be provided
837
back to future add_lines calls in the parent_texts dictionary.
839
raise NotImplementedError(self.add_lines)
841
def _add_text(self, key, parents, text, nostore_sha=None, random_id=False):
842
"""Add a text to the store.
844
This is a private function for use by CommitBuilder.
846
:param key: The key tuple of the text to add. If the last element is
847
None, a CHK string will be generated during the addition.
848
:param parents: The parents key tuples of the text to add.
849
:param text: A string containing the text to be committed.
850
:param nostore_sha: Raise ExistingContent and do not add the lines to
851
the versioned file if the digest of the lines matches this.
852
:param random_id: If True a random id has been selected rather than
853
an id determined by some deterministic process such as a converter
854
from a foreign VCS. When True the backend may choose not to check
855
for uniqueness of the resulting key within the versioned file, so
856
this should only be done when the result is expected to be unique
858
:param check_content: If True, the lines supplied are verified to be
859
bytestrings that are correctly formed lines.
860
:return: The text sha1, the number of bytes in the text, and an opaque
861
representation of the inserted version which can be provided
862
back to future _add_text calls in the parent_texts dictionary.
864
# The default implementation just thunks over to .add_lines(),
865
# inefficient, but it works.
866
return self.add_lines(key, parents, osutils.split_lines(text),
867
nostore_sha=nostore_sha,
871
def add_mpdiffs(self, records):
872
"""Add mpdiffs to this VersionedFile.
874
Records should be iterables of version, parents, expected_sha1,
875
mpdiff. mpdiff should be a MultiParent instance.
878
mpvf = multiparent.MultiMemoryVersionedFile()
880
for version, parent_ids, expected_sha1, mpdiff in records:
881
versions.append(version)
882
mpvf.add_diff(mpdiff, version, parent_ids)
883
needed_parents = set()
884
for version, parent_ids, expected_sha1, mpdiff in records:
885
needed_parents.update(p for p in parent_ids
886
if not mpvf.has_version(p))
887
# It seems likely that adding all the present parents as fulltexts can
888
# easily exhaust memory.
889
chunks_to_lines = osutils.chunks_to_lines
890
for record in self.get_record_stream(needed_parents, 'unordered',
892
if record.storage_kind == 'absent':
894
mpvf.add_version(chunks_to_lines(record.get_bytes_as('chunked')),
896
for (key, parent_keys, expected_sha1, mpdiff), lines in\
897
zip(records, mpvf.get_line_list(versions)):
898
if len(parent_keys) == 1:
899
left_matching_blocks = list(mpdiff.get_matching_blocks(0,
900
mpvf.get_diff(parent_keys[0]).num_lines()))
902
left_matching_blocks = None
903
version_sha1, _, version_text = self.add_lines(key,
904
parent_keys, lines, vf_parents,
905
left_matching_blocks=left_matching_blocks)
906
if version_sha1 != expected_sha1:
907
raise errors.VersionedFileInvalidChecksum(version)
908
vf_parents[key] = version_text
910
def annotate(self, key):
911
"""Return a list of (version-key, line) tuples for the text of key.
913
:raise RevisionNotPresent: If the key is not present.
915
raise NotImplementedError(self.annotate)
917
def check(self, progress_bar=None):
918
"""Check this object for integrity."""
919
raise NotImplementedError(self.check)
922
def check_not_reserved_id(version_id):
923
revision.check_not_reserved_id(version_id)
925
def _check_lines_not_unicode(self, lines):
926
"""Check that lines being added to a versioned file are not unicode."""
928
if line.__class__ is not str:
929
raise errors.BzrBadParameterUnicode("lines")
931
def _check_lines_are_lines(self, lines):
932
"""Check that the lines really are full lines without inline EOL."""
934
if '\n' in line[:-1]:
935
raise errors.BzrBadParameterContainsNewline("lines")
937
def get_parent_map(self, keys):
938
"""Get a map of the parents of keys.
940
:param keys: The keys to look up parents for.
941
:return: A mapping from keys to parents. Absent keys are absent from
944
raise NotImplementedError(self.get_parent_map)
946
def get_record_stream(self, keys, ordering, include_delta_closure):
947
"""Get a stream of records for keys.
949
:param keys: The keys to include.
950
:param ordering: Either 'unordered' or 'topological'. A topologically
951
sorted stream has compression parents strictly before their
953
:param include_delta_closure: If True then the closure across any
954
compression parents will be included (in the opaque data).
955
:return: An iterator of ContentFactory objects, each of which is only
956
valid until the iterator is advanced.
958
raise NotImplementedError(self.get_record_stream)
960
def get_sha1s(self, keys):
961
"""Get the sha1's of the texts for the given keys.
963
:param keys: The names of the keys to lookup
964
:return: a dict from key to sha1 digest. Keys of texts which are not
965
present in the store are not present in the returned
968
raise NotImplementedError(self.get_sha1s)
970
has_key = index._has_key_from_parent_map
972
def get_missing_compression_parent_keys(self):
973
"""Return an iterable of keys of missing compression parents.
975
Check this after calling insert_record_stream to find out if there are
976
any missing compression parents. If there are, the records that
977
depend on them are not able to be inserted safely. The precise
978
behaviour depends on the concrete VersionedFiles class in use.
980
Classes that do not support this will raise NotImplementedError.
982
raise NotImplementedError(self.get_missing_compression_parent_keys)
984
def insert_record_stream(self, stream):
985
"""Insert a record stream into this container.
987
:param stream: A stream of records to insert.
989
:seealso VersionedFile.get_record_stream:
991
raise NotImplementedError
993
def iter_lines_added_or_present_in_keys(self, keys, pb=None):
994
"""Iterate over the lines in the versioned files from keys.
996
This may return lines from other keys. Each item the returned
997
iterator yields is a tuple of a line and a text version that that line
998
is present in (not introduced in).
1000
Ordering of results is in whatever order is most suitable for the
1001
underlying storage format.
1003
If a progress bar is supplied, it may be used to indicate progress.
1004
The caller is responsible for cleaning up progress bars (because this
1008
* Lines are normalised by the underlying store: they will all have \n
1010
* Lines are returned in arbitrary order.
1012
:return: An iterator over (line, key).
1014
raise NotImplementedError(self.iter_lines_added_or_present_in_keys)
1017
"""Return a iterable of the keys for all the contained texts."""
1018
raise NotImplementedError(self.keys)
1020
def make_mpdiffs(self, keys):
1021
"""Create multiparent diffs for specified keys."""
1022
keys_order = tuple(keys)
1023
keys = frozenset(keys)
1024
knit_keys = set(keys)
1025
parent_map = self.get_parent_map(keys)
1026
for parent_keys in parent_map.itervalues():
1028
knit_keys.update(parent_keys)
1029
missing_keys = keys - set(parent_map)
1031
raise errors.RevisionNotPresent(list(missing_keys)[0], self)
1032
# We need to filter out ghosts, because we can't diff against them.
1033
maybe_ghosts = knit_keys - keys
1034
ghosts = maybe_ghosts - set(self.get_parent_map(maybe_ghosts))
1035
knit_keys.difference_update(ghosts)
1037
chunks_to_lines = osutils.chunks_to_lines
1038
for record in self.get_record_stream(knit_keys, 'topological', True):
1039
lines[record.key] = chunks_to_lines(record.get_bytes_as('chunked'))
1040
# line_block_dict = {}
1041
# for parent, blocks in record.extract_line_blocks():
1042
# line_blocks[parent] = blocks
1043
# line_blocks[record.key] = line_block_dict
1045
for key in keys_order:
1047
parents = parent_map[key] or []
1048
# Note that filtering knit_keys can lead to a parent difference
1049
# between the creation and the application of the mpdiff.
1050
parent_lines = [lines[p] for p in parents if p in knit_keys]
1051
if len(parent_lines) > 0:
1052
left_parent_blocks = self._extract_blocks(key, parent_lines[0],
1055
left_parent_blocks = None
1056
diffs.append(multiparent.MultiParent.from_lines(target,
1057
parent_lines, left_parent_blocks))
1060
missing_keys = index._missing_keys_from_parent_map
1062
def _extract_blocks(self, version_id, source, target):
1066
class ThunkedVersionedFiles(VersionedFiles):
1067
"""Storage for many versioned files thunked onto a 'VersionedFile' class.
1069
This object allows a single keyspace for accessing the history graph and
1070
contents of named bytestrings.
1072
Currently no implementation allows the graph of different key prefixes to
1073
intersect, but the API does allow such implementations in the future.
1076
def __init__(self, transport, file_factory, mapper, is_locked):
1077
"""Create a ThunkedVersionedFiles."""
1078
self._transport = transport
1079
self._file_factory = file_factory
1080
self._mapper = mapper
1081
self._is_locked = is_locked
1083
def add_lines(self, key, parents, lines, parent_texts=None,
1084
left_matching_blocks=None, nostore_sha=None, random_id=False,
1085
check_content=True):
1086
"""See VersionedFiles.add_lines()."""
1087
path = self._mapper.map(key)
1088
version_id = key[-1]
1089
parents = [parent[-1] for parent in parents]
1090
vf = self._get_vf(path)
1093
return vf.add_lines_with_ghosts(version_id, parents, lines,
1094
parent_texts=parent_texts,
1095
left_matching_blocks=left_matching_blocks,
1096
nostore_sha=nostore_sha, random_id=random_id,
1097
check_content=check_content)
1098
except NotImplementedError:
1099
return vf.add_lines(version_id, parents, lines,
1100
parent_texts=parent_texts,
1101
left_matching_blocks=left_matching_blocks,
1102
nostore_sha=nostore_sha, random_id=random_id,
1103
check_content=check_content)
1104
except errors.NoSuchFile:
1105
# parent directory may be missing, try again.
1106
self._transport.mkdir(osutils.dirname(path))
1108
return vf.add_lines_with_ghosts(version_id, parents, lines,
1109
parent_texts=parent_texts,
1110
left_matching_blocks=left_matching_blocks,
1111
nostore_sha=nostore_sha, random_id=random_id,
1112
check_content=check_content)
1113
except NotImplementedError:
1114
return vf.add_lines(version_id, parents, lines,
1115
parent_texts=parent_texts,
1116
left_matching_blocks=left_matching_blocks,
1117
nostore_sha=nostore_sha, random_id=random_id,
1118
check_content=check_content)
1120
def annotate(self, key):
1121
"""Return a list of (version-key, line) tuples for the text of key.
1123
:raise RevisionNotPresent: If the key is not present.
1126
path = self._mapper.map(prefix)
1127
vf = self._get_vf(path)
1128
origins = vf.annotate(key[-1])
1130
for origin, line in origins:
1131
result.append((prefix + (origin,), line))
1134
def get_annotator(self):
1135
return annotate.Annotator(self)
1137
def check(self, progress_bar=None):
1138
"""See VersionedFiles.check()."""
1139
for prefix, vf in self._iter_all_components():
1142
def get_parent_map(self, keys):
1143
"""Get a map of the parents of keys.
1145
:param keys: The keys to look up parents for.
1146
:return: A mapping from keys to parents. Absent keys are absent from
1149
prefixes = self._partition_keys(keys)
1151
for prefix, suffixes in prefixes.items():
1152
path = self._mapper.map(prefix)
1153
vf = self._get_vf(path)
1154
parent_map = vf.get_parent_map(suffixes)
1155
for key, parents in parent_map.items():
1156
result[prefix + (key,)] = tuple(
1157
prefix + (parent,) for parent in parents)
1160
def _get_vf(self, path):
1161
if not self._is_locked():
1162
raise errors.ObjectNotLocked(self)
1163
return self._file_factory(path, self._transport, create=True,
1164
get_scope=lambda:None)
1166
def _partition_keys(self, keys):
1167
"""Turn keys into a dict of prefix:suffix_list."""
1170
prefix_keys = result.setdefault(key[:-1], [])
1171
prefix_keys.append(key[-1])
1174
def _get_all_prefixes(self):
1175
# Identify all key prefixes.
1176
# XXX: A bit hacky, needs polish.
1177
if type(self._mapper) == ConstantMapper:
1178
paths = [self._mapper.map(())]
1182
for quoted_relpath in self._transport.iter_files_recursive():
1183
path, ext = os.path.splitext(quoted_relpath)
1185
paths = list(relpaths)
1186
prefixes = [self._mapper.unmap(path) for path in paths]
1187
return zip(paths, prefixes)
1189
def get_record_stream(self, keys, ordering, include_delta_closure):
1190
"""See VersionedFiles.get_record_stream()."""
1191
# Ordering will be taken care of by each partitioned store; group keys
1194
for prefix, suffixes, vf in self._iter_keys_vf(keys):
1195
suffixes = [(suffix,) for suffix in suffixes]
1196
for record in vf.get_record_stream(suffixes, ordering,
1197
include_delta_closure):
1198
if record.parents is not None:
1199
record.parents = tuple(
1200
prefix + parent for parent in record.parents)
1201
record.key = prefix + record.key
1204
def _iter_keys_vf(self, keys):
1205
prefixes = self._partition_keys(keys)
1207
for prefix, suffixes in prefixes.items():
1208
path = self._mapper.map(prefix)
1209
vf = self._get_vf(path)
1210
yield prefix, suffixes, vf
1212
def get_sha1s(self, keys):
1213
"""See VersionedFiles.get_sha1s()."""
1215
for prefix,suffixes, vf in self._iter_keys_vf(keys):
1216
vf_sha1s = vf.get_sha1s(suffixes)
1217
for suffix, sha1 in vf_sha1s.iteritems():
1218
sha1s[prefix + (suffix,)] = sha1
1221
def insert_record_stream(self, stream):
1222
"""Insert a record stream into this container.
1224
:param stream: A stream of records to insert.
1226
:seealso VersionedFile.get_record_stream:
1228
for record in stream:
1229
prefix = record.key[:-1]
1230
key = record.key[-1:]
1231
if record.parents is not None:
1232
parents = [parent[-1:] for parent in record.parents]
1235
thunk_record = AdapterFactory(key, parents, record)
1236
path = self._mapper.map(prefix)
1237
# Note that this parses the file many times; we can do better but
1238
# as this only impacts weaves in terms of performance, it is
1240
vf = self._get_vf(path)
1241
vf.insert_record_stream([thunk_record])
1243
def iter_lines_added_or_present_in_keys(self, keys, pb=None):
1244
"""Iterate over the lines in the versioned files from keys.
1246
This may return lines from other keys. Each item the returned
1247
iterator yields is a tuple of a line and a text version that that line
1248
is present in (not introduced in).
1250
Ordering of results is in whatever order is most suitable for the
1251
underlying storage format.
1253
If a progress bar is supplied, it may be used to indicate progress.
1254
The caller is responsible for cleaning up progress bars (because this
1258
* Lines are normalised by the underlying store: they will all have \n
1260
* Lines are returned in arbitrary order.
1262
:return: An iterator over (line, key).
1264
for prefix, suffixes, vf in self._iter_keys_vf(keys):
1265
for line, version in vf.iter_lines_added_or_present_in_versions(suffixes):
1266
yield line, prefix + (version,)
1268
def _iter_all_components(self):
1269
for path, prefix in self._get_all_prefixes():
1270
yield prefix, self._get_vf(path)
1273
"""See VersionedFiles.keys()."""
1275
for prefix, vf in self._iter_all_components():
1276
for suffix in vf.versions():
1277
result.add(prefix + (suffix,))
1281
class _PlanMergeVersionedFile(VersionedFiles):
1282
"""A VersionedFile for uncommitted and committed texts.
1284
It is intended to allow merges to be planned with working tree texts.
1285
It implements only the small part of the VersionedFiles interface used by
1286
PlanMerge. It falls back to multiple versionedfiles for data not stored in
1287
_PlanMergeVersionedFile itself.
1289
:ivar: fallback_versionedfiles a list of VersionedFiles objects that can be
1290
queried for missing texts.
1293
def __init__(self, file_id):
1294
"""Create a _PlanMergeVersionedFile.
1296
:param file_id: Used with _PlanMerge code which is not yet fully
1297
tuple-keyspace aware.
1299
self._file_id = file_id
1300
# fallback locations
1301
self.fallback_versionedfiles = []
1302
# Parents for locally held keys.
1304
# line data for locally held keys.
1306
# key lookup providers
1307
self._providers = [DictParentsProvider(self._parents)]
1309
def plan_merge(self, ver_a, ver_b, base=None):
1310
"""See VersionedFile.plan_merge"""
1311
from bzrlib.merge import _PlanMerge
1313
return _PlanMerge(ver_a, ver_b, self, (self._file_id,)).plan_merge()
1314
old_plan = list(_PlanMerge(ver_a, base, self, (self._file_id,)).plan_merge())
1315
new_plan = list(_PlanMerge(ver_a, ver_b, self, (self._file_id,)).plan_merge())
1316
return _PlanMerge._subtract_plans(old_plan, new_plan)
1318
def plan_lca_merge(self, ver_a, ver_b, base=None):
1319
from bzrlib.merge import _PlanLCAMerge
1321
new_plan = _PlanLCAMerge(ver_a, ver_b, self, (self._file_id,), graph).plan_merge()
1324
old_plan = _PlanLCAMerge(ver_a, base, self, (self._file_id,), graph).plan_merge()
1325
return _PlanLCAMerge._subtract_plans(list(old_plan), list(new_plan))
1327
def add_lines(self, key, parents, lines):
1328
"""See VersionedFiles.add_lines
1330
Lines are added locally, not to fallback versionedfiles. Also, ghosts
1331
are permitted. Only reserved ids are permitted.
1333
if type(key) is not tuple:
1334
raise TypeError(key)
1335
if not revision.is_reserved_id(key[-1]):
1336
raise ValueError('Only reserved ids may be used')
1338
raise ValueError('Parents may not be None')
1340
raise ValueError('Lines may not be None')
1341
self._parents[key] = tuple(parents)
1342
self._lines[key] = lines
1344
def get_record_stream(self, keys, ordering, include_delta_closure):
1347
if key in self._lines:
1348
lines = self._lines[key]
1349
parents = self._parents[key]
1351
yield ChunkedContentFactory(key, parents, None, lines)
1352
for versionedfile in self.fallback_versionedfiles:
1353
for record in versionedfile.get_record_stream(
1354
pending, 'unordered', True):
1355
if record.storage_kind == 'absent':
1358
pending.remove(record.key)
1362
# report absent entries
1364
yield AbsentContentFactory(key)
1366
def get_parent_map(self, keys):
1367
"""See VersionedFiles.get_parent_map"""
1368
# We create a new provider because a fallback may have been added.
1369
# If we make fallbacks private we can update a stack list and avoid
1370
# object creation thrashing.
1373
if revision.NULL_REVISION in keys:
1374
keys.remove(revision.NULL_REVISION)
1375
result[revision.NULL_REVISION] = ()
1376
self._providers = self._providers[:1] + self.fallback_versionedfiles
1378
StackedParentsProvider(self._providers).get_parent_map(keys))
1379
for key, parents in result.iteritems():
1381
result[key] = (revision.NULL_REVISION,)
1385
class PlanWeaveMerge(TextMerge):
1386
"""Weave merge that takes a plan as its input.
1388
This exists so that VersionedFile.plan_merge is implementable.
1389
Most callers will want to use WeaveMerge instead.
1392
def __init__(self, plan, a_marker=TextMerge.A_MARKER,
1393
b_marker=TextMerge.B_MARKER):
1394
TextMerge.__init__(self, a_marker, b_marker)
1397
def _merge_struct(self):
1402
def outstanding_struct():
1403
if not lines_a and not lines_b:
1405
elif ch_a and not ch_b:
1408
elif ch_b and not ch_a:
1410
elif lines_a == lines_b:
1413
yield (lines_a, lines_b)
1415
# We previously considered either 'unchanged' or 'killed-both' lines
1416
# to be possible places to resynchronize. However, assuming agreement
1417
# on killed-both lines may be too aggressive. -- mbp 20060324
1418
for state, line in self.plan:
1419
if state == 'unchanged':
1420
# resync and flush queued conflicts changes if any
1421
for struct in outstanding_struct():
1427
if state == 'unchanged':
1430
elif state == 'killed-a':
1432
lines_b.append(line)
1433
elif state == 'killed-b':
1435
lines_a.append(line)
1436
elif state == 'new-a':
1438
lines_a.append(line)
1439
elif state == 'new-b':
1441
lines_b.append(line)
1442
elif state == 'conflicted-a':
1444
lines_a.append(line)
1445
elif state == 'conflicted-b':
1447
lines_b.append(line)
1448
elif state == 'killed-both':
1449
# This counts as a change, even though there is no associated
1453
if state not in ('irrelevant', 'ghost-a', 'ghost-b',
1455
raise AssertionError(state)
1456
for struct in outstanding_struct():
1460
class WeaveMerge(PlanWeaveMerge):
1461
"""Weave merge that takes a VersionedFile and two versions as its input."""
1463
def __init__(self, versionedfile, ver_a, ver_b,
1464
a_marker=PlanWeaveMerge.A_MARKER, b_marker=PlanWeaveMerge.B_MARKER):
1465
plan = versionedfile.plan_merge(ver_a, ver_b)
1466
PlanWeaveMerge.__init__(self, plan, a_marker, b_marker)
1469
class VirtualVersionedFiles(VersionedFiles):
1470
"""Dummy implementation for VersionedFiles that uses other functions for
1471
obtaining fulltexts and parent maps.
1473
This is always on the bottom of the stack and uses string keys
1474
(rather than tuples) internally.
1477
def __init__(self, get_parent_map, get_lines):
1478
"""Create a VirtualVersionedFiles.
1480
:param get_parent_map: Same signature as Repository.get_parent_map.
1481
:param get_lines: Should return lines for specified key or None if
1484
super(VirtualVersionedFiles, self).__init__()
1485
self._get_parent_map = get_parent_map
1486
self._get_lines = get_lines
1488
def check(self, progressbar=None):
1489
"""See VersionedFiles.check.
1491
:note: Always returns True for VirtualVersionedFiles.
1495
def add_mpdiffs(self, records):
1496
"""See VersionedFiles.mpdiffs.
1498
:note: Not implemented for VirtualVersionedFiles.
1500
raise NotImplementedError(self.add_mpdiffs)
1502
def get_parent_map(self, keys):
1503
"""See VersionedFiles.get_parent_map."""
1504
return dict([((k,), tuple([(p,) for p in v]))
1505
for k,v in self._get_parent_map([k for (k,) in keys]).iteritems()])
1507
def get_sha1s(self, keys):
1508
"""See VersionedFiles.get_sha1s."""
1511
lines = self._get_lines(k)
1512
if lines is not None:
1513
if not isinstance(lines, list):
1514
raise AssertionError
1515
ret[(k,)] = osutils.sha_strings(lines)
1518
def get_record_stream(self, keys, ordering, include_delta_closure):
1519
"""See VersionedFiles.get_record_stream."""
1520
for (k,) in list(keys):
1521
lines = self._get_lines(k)
1522
if lines is not None:
1523
if not isinstance(lines, list):
1524
raise AssertionError
1525
yield ChunkedContentFactory((k,), None,
1526
sha1=osutils.sha_strings(lines),
1529
yield AbsentContentFactory((k,))
1531
def iter_lines_added_or_present_in_keys(self, keys, pb=None):
1532
"""See VersionedFile.iter_lines_added_or_present_in_versions()."""
1533
for i, (key,) in enumerate(keys):
1535
pb.update("Finding changed lines", i, len(keys))
1536
for l in self._get_lines(key):
1540
def network_bytes_to_kind_and_offset(network_bytes):
1541
"""Strip of a record kind from the front of network_bytes.
1543
:param network_bytes: The bytes of a record.
1544
:return: A tuple (storage_kind, offset_of_remaining_bytes)
1546
line_end = network_bytes.find('\n')
1547
storage_kind = network_bytes[:line_end]
1548
return storage_kind, line_end + 1
1551
class NetworkRecordStream(object):
1552
"""A record_stream which reconstitures a serialised stream."""
1554
def __init__(self, bytes_iterator):
1555
"""Create a NetworkRecordStream.
1557
:param bytes_iterator: An iterator of bytes. Each item in this
1558
iterator should have been obtained from a record_streams'
1559
record.get_bytes_as(record.storage_kind) call.
1561
self._bytes_iterator = bytes_iterator
1562
self._kind_factory = {
1563
'fulltext': fulltext_network_to_record,
1564
'groupcompress-block': groupcompress.network_block_to_records,
1565
'knit-ft-gz': knit.knit_network_to_record,
1566
'knit-delta-gz': knit.knit_network_to_record,
1567
'knit-annotated-ft-gz': knit.knit_network_to_record,
1568
'knit-annotated-delta-gz': knit.knit_network_to_record,
1569
'knit-delta-closure': knit.knit_delta_closure_to_records,
1575
:return: An iterator as per VersionedFiles.get_record_stream().
1577
for bytes in self._bytes_iterator:
1578
storage_kind, line_end = network_bytes_to_kind_and_offset(bytes)
1579
for record in self._kind_factory[storage_kind](
1580
storage_kind, bytes, line_end):
1584
def fulltext_network_to_record(kind, bytes, line_end):
1585
"""Convert a network fulltext record to record."""
1586
meta_len, = struct.unpack('!L', bytes[line_end:line_end+4])
1587
record_meta = bytes[line_end+4:line_end+4+meta_len]
1588
key, parents = bencode.bdecode_as_tuple(record_meta)
1589
if parents == 'nil':
1591
fulltext = bytes[line_end+4+meta_len:]
1592
return [FulltextContentFactory(key, parents, None, fulltext)]
1595
def _length_prefix(bytes):
1596
return struct.pack('!L', len(bytes))
1599
def record_to_fulltext_bytes(record):
1600
if record.parents is None:
1603
parents = record.parents
1604
record_meta = bencode.bencode((record.key, parents))
1605
record_content = record.get_bytes_as('fulltext')
1606
return "fulltext\n%s%s%s" % (
1607
_length_prefix(record_meta), record_meta, record_content)
1610
def sort_groupcompress(parent_map):
1611
"""Sort and group the keys in parent_map into groupcompress order.
1613
groupcompress is defined (currently) as reverse-topological order, grouped
1616
:return: A sorted-list of keys
1618
# gc-optimal ordering is approximately reverse topological,
1619
# properly grouped by file-id.
1621
for item in parent_map.iteritems():
1623
if isinstance(key, str) or len(key) == 1:
1628
per_prefix_map[prefix].append(item)
1630
per_prefix_map[prefix] = [item]
1633
for prefix in sorted(per_prefix_map):
1634
present_keys.extend(reversed(tsort.topo_sort(per_prefix_map[prefix])))