/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
3830.3.20 by John Arbash Meinel
Minor PEP8 and copyright updates.
1
# Copyright (C) 2005, 2006, 2007, 2008 Canonical Ltd
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
2
#
3
# Authors:
4
#   Johan Rydberg <jrydberg@gnu.org>
5
#
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
10
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
# GNU General Public License for more details.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
15
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
4183.7.1 by Sabin Iacob
update FSF mailing address
18
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
19
20
"""Versioned text file storage api."""
21
3350.8.2 by Robert Collins
stacked get_parent_map.
22
from copy import copy
3350.6.1 by Robert Collins
* New ``versionedfile.KeyMapper`` interface to abstract out the access to
23
from cStringIO import StringIO
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
24
import os
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
25
import struct
3350.6.1 by Robert Collins
* New ``versionedfile.KeyMapper`` interface to abstract out the access to
26
from zlib import adler32
27
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
28
from bzrlib.lazy_import import lazy_import
29
lazy_import(globals(), """
3224.5.20 by Andrew Bennetts
Remove or lazyify a couple more imports.
30
import urllib
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
31
32
from bzrlib import (
4454.3.65 by John Arbash Meinel
Tests that VF implementations support .get_annotator()
33
    annotate,
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
34
    errors,
3735.32.18 by John Arbash Meinel
We now support generating a network stream.
35
    groupcompress,
3830.3.12 by Martin Pool
Review cleanups: unify has_key impls, add missing_keys(), clean up exception blocks
36
    index,
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
37
    inventory,
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
38
    inventory_delta,
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
39
    knit,
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
40
    osutils,
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
41
    multiparent,
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
42
    tsort,
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
43
    revision,
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
44
    ui,
45
    )
4379.3.3 by Gary van der Merwe
Rename and add doc string for StackedParentsProvider.
46
from bzrlib.graph import DictParentsProvider, Graph, StackedParentsProvider
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
47
from bzrlib.transport.memory import MemoryTransport
48
""")
1563.2.12 by Robert Collins
Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.
49
from bzrlib.inter import InterObject
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
50
from bzrlib.registry import Registry
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
51
from bzrlib.symbol_versioning import *
1551.6.7 by Aaron Bentley
Implemented two-way merge, refactored weave merge
52
from bzrlib.textmerge import TextMerge
2694.5.4 by Jelmer Vernooij
Move bzrlib.util.bencode to bzrlib._bencode_py.
53
from bzrlib import bencode
1563.2.11 by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis.
54
55
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
56
adapter_registry = Registry()
57
adapter_registry.register_lazy(('knit-delta-gz', 'fulltext'), 'bzrlib.knit',
58
    'DeltaPlainToFullText')
59
adapter_registry.register_lazy(('knit-ft-gz', 'fulltext'), 'bzrlib.knit',
60
    'FTPlainToFullText')
61
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'knit-delta-gz'),
62
    'bzrlib.knit', 'DeltaAnnotatedToUnannotated')
63
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'fulltext'),
64
    'bzrlib.knit', 'DeltaAnnotatedToFullText')
65
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'knit-ft-gz'),
66
    'bzrlib.knit', 'FTAnnotatedToUnannotated')
67
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'fulltext'),
68
    'bzrlib.knit', 'FTAnnotatedToFullText')
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
69
# adapter_registry.register_lazy(('knit-annotated-ft-gz', 'chunked'),
70
#     'bzrlib.knit', 'FTAnnotatedToChunked')
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
71
72
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
73
class ContentFactory(object):
74
    """Abstract interface for insertion and retrieval from a VersionedFile.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
75
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
76
    :ivar sha1: None, or the sha1 of the content fulltext.
77
    :ivar storage_kind: The native storage kind of this factory. One of
78
        'mpdiff', 'knit-annotated-ft', 'knit-annotated-delta', 'knit-ft',
79
        'knit-delta', 'fulltext', 'knit-annotated-ft-gz',
80
        'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'.
81
    :ivar key: The key of this content. Each key is a tuple with a single
82
        string in it.
83
    :ivar parents: A tuple of parent keys for self.key. If the object has
84
        no parent information, None (as opposed to () for an empty list of
85
        parents).
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
86
    """
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
87
88
    def __init__(self):
89
        """Create a ContentFactory."""
90
        self.sha1 = None
91
        self.storage_kind = None
92
        self.key = None
93
        self.parents = None
94
95
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
96
class ChunkedContentFactory(ContentFactory):
97
    """Static data content factory.
98
99
    This takes a 'chunked' list of strings. The only requirement on 'chunked' is
100
    that ''.join(lines) becomes a valid fulltext. A tuple of a single string
101
    satisfies this, as does a list of lines.
102
103
    :ivar sha1: None, or the sha1 of the content fulltext.
104
    :ivar storage_kind: The native storage kind of this factory. Always
3890.2.2 by John Arbash Meinel
Change the signature to report the storage kind as 'chunked'
105
        'chunked'
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
106
    :ivar key: The key of this content. Each key is a tuple with a single
107
        string in it.
108
    :ivar parents: A tuple of parent keys for self.key. If the object has
109
        no parent information, None (as opposed to () for an empty list of
110
        parents).
111
     """
112
113
    def __init__(self, key, parents, sha1, chunks):
114
        """Create a ContentFactory."""
115
        self.sha1 = sha1
3890.2.2 by John Arbash Meinel
Change the signature to report the storage kind as 'chunked'
116
        self.storage_kind = 'chunked'
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
117
        self.key = key
118
        self.parents = parents
119
        self._chunks = chunks
120
121
    def get_bytes_as(self, storage_kind):
122
        if storage_kind == 'chunked':
123
            return self._chunks
124
        elif storage_kind == 'fulltext':
125
            return ''.join(self._chunks)
126
        raise errors.UnavailableRepresentation(self.key, storage_kind,
127
            self.storage_kind)
128
129
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
130
class FulltextContentFactory(ContentFactory):
131
    """Static data content factory.
132
133
    This takes a fulltext when created and just returns that during
134
    get_bytes_as('fulltext').
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
135
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
136
    :ivar sha1: None, or the sha1 of the content fulltext.
137
    :ivar storage_kind: The native storage kind of this factory. Always
138
        'fulltext'.
139
    :ivar key: The key of this content. Each key is a tuple with a single
140
        string in it.
141
    :ivar parents: A tuple of parent keys for self.key. If the object has
142
        no parent information, None (as opposed to () for an empty list of
143
        parents).
144
     """
145
146
    def __init__(self, key, parents, sha1, text):
147
        """Create a ContentFactory."""
148
        self.sha1 = sha1
149
        self.storage_kind = 'fulltext'
150
        self.key = key
151
        self.parents = parents
152
        self._text = text
153
154
    def get_bytes_as(self, storage_kind):
155
        if storage_kind == self.storage_kind:
156
            return self._text
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
157
        elif storage_kind == 'chunked':
3976.2.1 by Robert Collins
Use a list not a tuple for chunks returned from FullTextContentFactory objects, because otherwise code tries to assign to tuples.
158
            return [self._text]
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
159
        raise errors.UnavailableRepresentation(self.key, storage_kind,
160
            self.storage_kind)
161
162
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
163
class InventoryDeltaContentFactory(ContentFactory):
164
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
165
    def __init__(self, key, parents, sha1, delta, basis_id, format_flags,
166
            repo=None):
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
167
        self.sha1 = sha1
168
        self.storage_kind = 'inventory-delta'
169
        self.key = key
170
        self.parents = parents
171
        self._delta = delta
172
        self._basis_id = basis_id
173
        self._format_flags = format_flags
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
174
        self._repo = repo
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
175
176
    def get_bytes_as(self, storage_kind):
177
        if storage_kind == self.storage_kind:
4476.3.2 by Andrew Bennetts
Make it possible for a StreamSink for a rich-root/tree-refs repo format to consume inventories without those features.
178
            return self._basis_id, self.key, self._delta, self._format_flags
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
179
        elif storage_kind == 'inventory-delta-bytes':
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
180
            serializer = inventory_delta.InventoryDeltaSerializer()
181
            serializer.require_flags(*self._format_flags)
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
182
            return ''.join(serializer.delta_to_lines(
183
                self._basis_id, self.key, self._delta))
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
184
        raise errors.UnavailableRepresentation(self.key, storage_kind,
185
            self.storage_kind)
186
187
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
188
class AbsentContentFactory(ContentFactory):
3350.3.12 by Robert Collins
Generate streams with absent records.
189
    """A placeholder content factory for unavailable texts.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
190
3350.3.12 by Robert Collins
Generate streams with absent records.
191
    :ivar sha1: None.
192
    :ivar storage_kind: 'absent'.
193
    :ivar key: The key of this content. Each key is a tuple with a single
194
        string in it.
195
    :ivar parents: None.
196
    """
197
198
    def __init__(self, key):
199
        """Create a ContentFactory."""
200
        self.sha1 = None
201
        self.storage_kind = 'absent'
202
        self.key = key
203
        self.parents = None
204
4537.2.1 by John Arbash Meinel
Add AbsentContentFactory.get_bytes_as()
205
    def get_bytes_as(self, storage_kind):
206
        raise ValueError('A request was made for key: %s, but that'
207
                         ' content is not available, and the calling'
208
                         ' code does not handle if it is missing.'
209
                         % (self.key,))
210
3350.3.12 by Robert Collins
Generate streams with absent records.
211
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
212
class AdapterFactory(ContentFactory):
213
    """A content factory to adapt between key prefix's."""
214
215
    def __init__(self, key, parents, adapted):
216
        """Create an adapter factory instance."""
217
        self.key = key
218
        self.parents = parents
219
        self._adapted = adapted
220
221
    def __getattr__(self, attr):
222
        """Return a member from the adapted object."""
223
        if attr in ('key', 'parents'):
224
            return self.__dict__[attr]
225
        else:
226
            return getattr(self._adapted, attr)
227
228
3350.3.14 by Robert Collins
Deprecate VersionedFile.join.
229
def filter_absent(record_stream):
230
    """Adapt a record stream to remove absent records."""
231
    for record in record_stream:
232
        if record.storage_kind != 'absent':
233
            yield record
234
235
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
236
class VersionedFile(object):
237
    """Versioned text file storage.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
238
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
239
    A versioned file manages versions of line-based text files,
240
    keeping track of the originating version for each line.
241
242
    To clients the "lines" of the file are represented as a list of
243
    strings. These strings will typically have terminal newline
244
    characters, but this is not required.  In particular files commonly
245
    do not have a newline at the end of the file.
246
247
    Texts are identified by a version-id string.
248
    """
249
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
250
    @staticmethod
2229.2.3 by Aaron Bentley
change reserved_id to is_reserved_id, add check_not_reserved for DRY
251
    def check_not_reserved_id(version_id):
252
        revision.check_not_reserved_id(version_id)
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
253
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
254
    def copy_to(self, name, transport):
255
        """Copy this versioned file to name on transport."""
256
        raise NotImplementedError(self.copy_to)
1863.1.1 by John Arbash Meinel
Allow Versioned files to do caching if explicitly asked, and implement for Knit
257
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
258
    def get_record_stream(self, versions, ordering, include_delta_closure):
259
        """Get a stream of records for versions.
260
261
        :param versions: The versions to include. Each version is a tuple
262
            (version,).
263
        :param ordering: Either 'unordered' or 'topological'. A topologically
264
            sorted stream has compression parents strictly before their
265
            children.
266
        :param include_delta_closure: If True then the closure across any
3350.3.22 by Robert Collins
Review feedback.
267
            compression parents will be included (in the data content of the
268
            stream, not in the emitted records). This guarantees that
269
            'fulltext' can be used successfully on every record.
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
270
        :return: An iterator of ContentFactory objects, each of which is only
271
            valid until the iterator is advanced.
272
        """
273
        raise NotImplementedError(self.get_record_stream)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
274
275
    def has_version(self, version_id):
276
        """Returns whether version is present."""
277
        raise NotImplementedError(self.has_version)
278
3350.3.8 by Robert Collins
Basic stream insertion, no fast path yet for knit to knit.
279
    def insert_record_stream(self, stream):
280
        """Insert a record stream into this versioned file.
281
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
282
        :param stream: A stream of records to insert.
3350.3.8 by Robert Collins
Basic stream insertion, no fast path yet for knit to knit.
283
        :return: None
284
        :seealso VersionedFile.get_record_stream:
285
        """
286
        raise NotImplementedError
287
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
288
    def add_lines(self, version_id, parents, lines, parent_texts=None,
2805.6.7 by Robert Collins
Review feedback.
289
        left_matching_blocks=None, nostore_sha=None, random_id=False,
290
        check_content=True):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
291
        """Add a single text on top of the versioned file.
292
293
        Must raise RevisionAlreadyPresent if the new version is
294
        already present in file history.
295
296
        Must raise RevisionNotPresent if any of the given parents are
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
297
        not present in file history.
2805.6.3 by Robert Collins
* The ``VersionedFile`` interface no longer protects against misuse when
298
299
        :param lines: A list of lines. Each line must be a bytestring. And all
300
            of them except the last must be terminated with \n and contain no
301
            other \n's. The last line may either contain no \n's or a single
302
            terminated \n. If the lines list does meet this constraint the add
303
            routine may error or may succeed - but you will be unable to read
304
            the data back accurately. (Checking the lines have been split
2805.6.7 by Robert Collins
Review feedback.
305
            correctly is expensive and extremely unlikely to catch bugs so it
306
            is not done at runtime unless check_content is True.)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
307
        :param parent_texts: An optional dictionary containing the opaque
2805.6.3 by Robert Collins
* The ``VersionedFile`` interface no longer protects against misuse when
308
            representations of some or all of the parents of version_id to
309
            allow delta optimisations.  VERY IMPORTANT: the texts must be those
310
            returned by add_lines or data corruption can be caused.
2520.4.148 by Aaron Bentley
Updates from review
311
        :param left_matching_blocks: a hint about which areas are common
312
            between the text and its left-hand-parent.  The format is
313
            the SequenceMatcher.get_matching_blocks format.
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
314
        :param nostore_sha: Raise ExistingContent and do not add the lines to
315
            the versioned file if the digest of the lines matches this.
2805.6.4 by Robert Collins
Don't check for existing versions when adding texts with random revision ids.
316
        :param random_id: If True a random id has been selected rather than
317
            an id determined by some deterministic process such as a converter
318
            from a foreign VCS. When True the backend may choose not to check
319
            for uniqueness of the resulting key within the versioned file, so
320
            this should only be done when the result is expected to be unique
321
            anyway.
2805.6.7 by Robert Collins
Review feedback.
322
        :param check_content: If True, the lines supplied are verified to be
323
            bytestrings that are correctly formed lines.
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
324
        :return: The text sha1, the number of bytes in the text, and an opaque
325
                 representation of the inserted version which can be provided
326
                 back to future add_lines calls in the parent_texts dictionary.
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
327
        """
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
328
        self._check_write_ok()
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
329
        return self._add_lines(version_id, parents, lines, parent_texts,
2805.6.7 by Robert Collins
Review feedback.
330
            left_matching_blocks, nostore_sha, random_id, check_content)
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
331
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
332
    def _add_lines(self, version_id, parents, lines, parent_texts,
2805.6.7 by Robert Collins
Review feedback.
333
        left_matching_blocks, nostore_sha, random_id, check_content):
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
334
        """Helper to do the class specific add_lines."""
1563.2.4 by Robert Collins
First cut at including the knit implementation of versioned_file.
335
        raise NotImplementedError(self.add_lines)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
336
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
337
    def add_lines_with_ghosts(self, version_id, parents, lines,
2805.6.7 by Robert Collins
Review feedback.
338
        parent_texts=None, nostore_sha=None, random_id=False,
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
339
        check_content=True, left_matching_blocks=None):
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
340
        """Add lines to the versioned file, allowing ghosts to be present.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
341
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
342
        This takes the same parameters as add_lines and returns the same.
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
343
        """
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
344
        self._check_write_ok()
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
345
        return self._add_lines_with_ghosts(version_id, parents, lines,
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
346
            parent_texts, nostore_sha, random_id, check_content, left_matching_blocks)
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
347
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
348
    def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts,
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
349
        nostore_sha, random_id, check_content, left_matching_blocks):
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
350
        """Helper to do class specific add_lines_with_ghosts."""
1594.2.8 by Robert Collins
add ghost aware apis to knits.
351
        raise NotImplementedError(self.add_lines_with_ghosts)
352
1563.2.19 by Robert Collins
stub out a check for knits.
353
    def check(self, progress_bar=None):
354
        """Check the versioned file for integrity."""
355
        raise NotImplementedError(self.check)
356
1666.1.6 by Robert Collins
Make knit the default format.
357
    def _check_lines_not_unicode(self, lines):
358
        """Check that lines being added to a versioned file are not unicode."""
359
        for line in lines:
360
            if line.__class__ is not str:
361
                raise errors.BzrBadParameterUnicode("lines")
362
363
    def _check_lines_are_lines(self, lines):
364
        """Check that the lines really are full lines without inline EOL."""
365
        for line in lines:
366
            if '\n' in line[:-1]:
367
                raise errors.BzrBadParameterContainsNewline("lines")
368
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
369
    def get_format_signature(self):
370
        """Get a text description of the data encoding in this file.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
371
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
372
        :since: 0.90
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
373
        """
374
        raise NotImplementedError(self.get_format_signature)
375
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
376
    def make_mpdiffs(self, version_ids):
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
377
        """Create multiparent diffs for specified versions."""
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
378
        knit_versions = set()
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
379
        knit_versions.update(version_ids)
380
        parent_map = self.get_parent_map(version_ids)
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
381
        for version_id in version_ids:
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
382
            try:
383
                knit_versions.update(parent_map[version_id])
384
            except KeyError:
3453.3.1 by Daniel Fischer
Raise the right exception in make_mpdiffs (bug #235687)
385
                raise errors.RevisionNotPresent(version_id, self)
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
386
        # We need to filter out ghosts, because we can't diff against them.
387
        knit_versions = set(self.get_parent_map(knit_versions).keys())
2520.4.90 by Aaron Bentley
Handle \r terminated lines in Weaves properly
388
        lines = dict(zip(knit_versions,
389
            self._get_lf_split_line_list(knit_versions)))
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
390
        diffs = []
391
        for version_id in version_ids:
392
            target = lines[version_id]
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
393
            try:
394
                parents = [lines[p] for p in parent_map[version_id] if p in
395
                    knit_versions]
396
            except KeyError:
3453.3.2 by John Arbash Meinel
Add a test case for the first loop, unable to find a way to trigger the second loop
397
                # I don't know how this could ever trigger.
398
                # parent_map[version_id] was already triggered in the previous
399
                # for loop, and lines[p] has the 'if p in knit_versions' check,
400
                # so we again won't have a KeyError.
3453.3.1 by Daniel Fischer
Raise the right exception in make_mpdiffs (bug #235687)
401
                raise errors.RevisionNotPresent(version_id, self)
2520.4.48 by Aaron Bentley
Support getting blocks from knit deltas with no final EOL
402
            if len(parents) > 0:
403
                left_parent_blocks = self._extract_blocks(version_id,
404
                                                          parents[0], target)
405
            else:
406
                left_parent_blocks = None
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
407
            diffs.append(multiparent.MultiParent.from_lines(target, parents,
408
                         left_parent_blocks))
409
        return diffs
410
2520.4.48 by Aaron Bentley
Support getting blocks from knit deltas with no final EOL
411
    def _extract_blocks(self, version_id, source, target):
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
412
        return None
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
413
2520.4.61 by Aaron Bentley
Do bulk insertion of records
414
    def add_mpdiffs(self, records):
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
415
        """Add mpdiffs to this VersionedFile.
2520.4.126 by Aaron Bentley
Add more docs
416
417
        Records should be iterables of version, parents, expected_sha1,
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
418
        mpdiff. mpdiff should be a MultiParent instance.
2520.4.126 by Aaron Bentley
Add more docs
419
        """
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
420
        # Does this need to call self._check_write_ok()? (IanC 20070919)
2520.4.61 by Aaron Bentley
Do bulk insertion of records
421
        vf_parents = {}
2520.4.141 by Aaron Bentley
More batch operations adding mpdiffs
422
        mpvf = multiparent.MultiMemoryVersionedFile()
423
        versions = []
424
        for version, parent_ids, expected_sha1, mpdiff in records:
425
            versions.append(version)
426
            mpvf.add_diff(mpdiff, version, parent_ids)
427
        needed_parents = set()
2520.4.142 by Aaron Bentley
Clean up installation of inventory records
428
        for version, parent_ids, expected_sha1, mpdiff in records:
2520.4.141 by Aaron Bentley
More batch operations adding mpdiffs
429
            needed_parents.update(p for p in parent_ids
430
                                  if not mpvf.has_version(p))
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
431
        present_parents = set(self.get_parent_map(needed_parents).keys())
432
        for parent_id, lines in zip(present_parents,
433
                                 self._get_lf_split_line_list(present_parents)):
2520.4.141 by Aaron Bentley
More batch operations adding mpdiffs
434
            mpvf.add_version(lines, parent_id, [])
435
        for (version, parent_ids, expected_sha1, mpdiff), lines in\
436
            zip(records, mpvf.get_line_list(versions)):
437
            if len(parent_ids) == 1:
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
438
                left_matching_blocks = list(mpdiff.get_matching_blocks(0,
2520.4.141 by Aaron Bentley
More batch operations adding mpdiffs
439
                    mpvf.get_diff(parent_ids[0]).num_lines()))
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
440
            else:
441
                left_matching_blocks = None
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
442
            try:
443
                _, _, version_text = self.add_lines_with_ghosts(version,
444
                    parent_ids, lines, vf_parents,
445
                    left_matching_blocks=left_matching_blocks)
446
            except NotImplementedError:
447
                # The vf can't handle ghosts, so add lines normally, which will
448
                # (reasonably) fail if there are ghosts in the data.
449
                _, _, version_text = self.add_lines(version,
450
                    parent_ids, lines, vf_parents,
451
                    left_matching_blocks=left_matching_blocks)
2520.4.61 by Aaron Bentley
Do bulk insertion of records
452
            vf_parents[version] = version_text
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
453
        sha1s = self.get_sha1s(versions)
454
        for version, parent_ids, expected_sha1, mpdiff in records:
455
            if expected_sha1 != sha1s[version]:
2520.4.71 by Aaron Bentley
Update test to accept VersionedFileInvalidChecksum instead of TestamentMismatch
456
                raise errors.VersionedFileInvalidChecksum(version)
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
457
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
458
    def get_text(self, version_id):
459
        """Return version contents as a text string.
460
461
        Raises RevisionNotPresent if version is not present in
462
        file history.
463
        """
464
        return ''.join(self.get_lines(version_id))
465
    get_string = get_text
466
1756.2.1 by Aaron Bentley
Implement get_texts
467
    def get_texts(self, version_ids):
468
        """Return the texts of listed versions as a list of strings.
469
470
        Raises RevisionNotPresent if version is not present in
471
        file history.
472
        """
473
        return [''.join(self.get_lines(v)) for v in version_ids]
474
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
475
    def get_lines(self, version_id):
476
        """Return version contents as a sequence of lines.
477
478
        Raises RevisionNotPresent if version is not present in
479
        file history.
480
        """
481
        raise NotImplementedError(self.get_lines)
482
2520.4.90 by Aaron Bentley
Handle \r terminated lines in Weaves properly
483
    def _get_lf_split_line_list(self, version_ids):
484
        return [StringIO(t).readlines() for t in self.get_texts(version_ids)]
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
485
2530.1.1 by Aaron Bentley
Make topological sorting optional for get_ancestry
486
    def get_ancestry(self, version_ids, topo_sorted=True):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
487
        """Return a list of all ancestors of given version(s). This
488
        will not include the null revision.
489
2490.2.32 by Aaron Bentley
Merge of not-sorting-ancestry branch
490
        This list will not be topologically sorted if topo_sorted=False is
491
        passed.
2530.1.1 by Aaron Bentley
Make topological sorting optional for get_ancestry
492
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
493
        Must raise RevisionNotPresent if any of the given versions are
494
        not present in file history."""
495
        if isinstance(version_ids, basestring):
496
            version_ids = [version_ids]
497
        raise NotImplementedError(self.get_ancestry)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
498
1594.2.8 by Robert Collins
add ghost aware apis to knits.
499
    def get_ancestry_with_ghosts(self, version_ids):
500
        """Return a list of all ancestors of given version(s). This
501
        will not include the null revision.
502
503
        Must raise RevisionNotPresent if any of the given versions are
504
        not present in file history.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
505
1594.2.8 by Robert Collins
add ghost aware apis to knits.
506
        Ghosts that are known about will be included in ancestry list,
507
        but are not explicitly marked.
508
        """
509
        raise NotImplementedError(self.get_ancestry_with_ghosts)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
510
3287.5.1 by Robert Collins
Add VersionedFile.get_parent_map.
511
    def get_parent_map(self, version_ids):
512
        """Get a map of the parents of version_ids.
513
514
        :param version_ids: The version ids to look up parents for.
515
        :return: A mapping from version id to parents.
516
        """
517
        raise NotImplementedError(self.get_parent_map)
518
1594.2.8 by Robert Collins
add ghost aware apis to knits.
519
    def get_parents_with_ghosts(self, version_id):
520
        """Return version names for parents of version_id.
521
522
        Will raise RevisionNotPresent if version_id is not present
523
        in the history.
524
525
        Ghosts that are known about will be included in the parent list,
526
        but are not explicitly marked.
527
        """
3287.5.1 by Robert Collins
Add VersionedFile.get_parent_map.
528
        try:
529
            return list(self.get_parent_map([version_id])[version_id])
530
        except KeyError:
531
            raise errors.RevisionNotPresent(version_id, self)
1594.2.8 by Robert Collins
add ghost aware apis to knits.
532
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
533
    def annotate(self, version_id):
3316.2.13 by Robert Collins
* ``VersionedFile.annotate_iter`` is deprecated. While in principal this
534
        """Return a list of (version-id, line) tuples for version_id.
535
536
        :raise RevisionNotPresent: If the given version is
537
        not present in file history.
538
        """
539
        raise NotImplementedError(self.annotate)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
540
2975.3.2 by Robert Collins
Review feedback - document the API change and improve readability in pack's _do_copy_nodes.
541
    def iter_lines_added_or_present_in_versions(self, version_ids=None,
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
542
                                                pb=None):
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
543
        """Iterate over the lines in the versioned file from version_ids.
544
2975.3.2 by Robert Collins
Review feedback - document the API change and improve readability in pack's _do_copy_nodes.
545
        This may return lines from other versions. Each item the returned
546
        iterator yields is a tuple of a line and a text version that that line
547
        is present in (not introduced in).
548
549
        Ordering of results is in whatever order is most suitable for the
550
        underlying storage format.
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
551
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
552
        If a progress bar is supplied, it may be used to indicate progress.
553
        The caller is responsible for cleaning up progress bars (because this
554
        is an iterator).
555
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
556
        NOTES: Lines are normalised: they will all have \n terminators.
557
               Lines are returned in arbitrary order.
2975.3.2 by Robert Collins
Review feedback - document the API change and improve readability in pack's _do_copy_nodes.
558
559
        :return: An iterator over (line, version_id).
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
560
        """
561
        raise NotImplementedError(self.iter_lines_added_or_present_in_versions)
562
1551.6.15 by Aaron Bentley
Moved plan_merge into Weave
563
    def plan_merge(self, ver_a, ver_b):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
564
        """Return pseudo-annotation indicating how the two versions merge.
565
566
        This is computed between versions a and b and their common
567
        base.
568
569
        Weave lines present in none of them are skipped entirely.
1664.2.2 by Aaron Bentley
Added legend for plan-merge output
570
571
        Legend:
572
        killed-base Dead in base revision
573
        killed-both Killed in each revision
574
        killed-a    Killed in a
575
        killed-b    Killed in b
576
        unchanged   Alive in both a and b (possibly created in both)
577
        new-a       Created in a
578
        new-b       Created in b
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
579
        ghost-a     Killed in a, unborn in b
1664.2.5 by Aaron Bentley
Update plan-merge legend
580
        ghost-b     Killed in b, unborn in a
1664.2.2 by Aaron Bentley
Added legend for plan-merge output
581
        irrelevant  Not in either revision
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
582
        """
1551.6.15 by Aaron Bentley
Moved plan_merge into Weave
583
        raise NotImplementedError(VersionedFile.plan_merge)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
584
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
585
    def weave_merge(self, plan, a_marker=TextMerge.A_MARKER,
1551.6.14 by Aaron Bentley
Tweaks from merge review
586
                    b_marker=TextMerge.B_MARKER):
1551.6.12 by Aaron Bentley
Indicate conflicts from merge_lines, insead of guessing
587
        return PlanWeaveMerge(plan, a_marker, b_marker).merge_lines()[0]
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
588
1664.2.7 by Aaron Bentley
Merge bzr.dev
589
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
590
class RecordingVersionedFilesDecorator(object):
591
    """A minimal versioned files that records calls made on it.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
592
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
593
    Only enough methods have been added to support tests using it to date.
594
595
    :ivar calls: A list of the calls made; can be reset at any time by
596
        assigning [] to it.
597
    """
598
599
    def __init__(self, backing_vf):
3871.4.1 by John Arbash Meinel
Add a VFDecorator that can yield records in a specified order
600
        """Create a RecordingVersionedFilesDecorator decorating backing_vf.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
601
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
602
        :param backing_vf: The versioned file to answer all methods.
603
        """
604
        self._backing_vf = backing_vf
605
        self.calls = []
606
3350.8.2 by Robert Collins
stacked get_parent_map.
607
    def add_lines(self, key, parents, lines, parent_texts=None,
608
        left_matching_blocks=None, nostore_sha=None, random_id=False,
609
        check_content=True):
610
        self.calls.append(("add_lines", key, parents, lines, parent_texts,
611
            left_matching_blocks, nostore_sha, random_id, check_content))
612
        return self._backing_vf.add_lines(key, parents, lines, parent_texts,
613
            left_matching_blocks, nostore_sha, random_id, check_content)
614
3517.4.19 by Martin Pool
Update test for knit.check() to expect it to recurse into fallback vfs
615
    def check(self):
616
        self._backing_vf.check()
617
3350.8.2 by Robert Collins
stacked get_parent_map.
618
    def get_parent_map(self, keys):
619
        self.calls.append(("get_parent_map", copy(keys)))
620
        return self._backing_vf.get_parent_map(keys)
621
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
622
    def get_record_stream(self, keys, sort_order, include_delta_closure):
3350.8.7 by Robert Collins
get_record_stream for fulltexts working (but note extreme memory use!).
623
        self.calls.append(("get_record_stream", list(keys), sort_order,
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
624
            include_delta_closure))
625
        return self._backing_vf.get_record_stream(keys, sort_order,
626
            include_delta_closure)
627
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
628
    def get_sha1s(self, keys):
629
        self.calls.append(("get_sha1s", copy(keys)))
630
        return self._backing_vf.get_sha1s(keys)
631
3350.8.5 by Robert Collins
Iter_lines_added_or_present_in_keys stacks.
632
    def iter_lines_added_or_present_in_keys(self, keys, pb=None):
633
        self.calls.append(("iter_lines_added_or_present_in_keys", copy(keys)))
3350.8.14 by Robert Collins
Review feedback.
634
        return self._backing_vf.iter_lines_added_or_present_in_keys(keys, pb=pb)
3350.8.5 by Robert Collins
Iter_lines_added_or_present_in_keys stacks.
635
3350.8.4 by Robert Collins
Vf.keys() stacking support.
636
    def keys(self):
637
        self.calls.append(("keys",))
638
        return self._backing_vf.keys()
639
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
640
3871.4.1 by John Arbash Meinel
Add a VFDecorator that can yield records in a specified order
641
class OrderingVersionedFilesDecorator(RecordingVersionedFilesDecorator):
642
    """A VF that records calls, and returns keys in specific order.
643
644
    :ivar calls: A list of the calls made; can be reset at any time by
645
        assigning [] to it.
646
    """
647
648
    def __init__(self, backing_vf, key_priority):
649
        """Create a RecordingVersionedFilesDecorator decorating backing_vf.
650
651
        :param backing_vf: The versioned file to answer all methods.
652
        :param key_priority: A dictionary defining what order keys should be
653
            returned from an 'unordered' get_record_stream request.
654
            Keys with lower priority are returned first, keys not present in
655
            the map get an implicit priority of 0, and are returned in
656
            lexicographical order.
657
        """
658
        RecordingVersionedFilesDecorator.__init__(self, backing_vf)
659
        self._key_priority = key_priority
660
661
    def get_record_stream(self, keys, sort_order, include_delta_closure):
662
        self.calls.append(("get_record_stream", list(keys), sort_order,
663
            include_delta_closure))
664
        if sort_order == 'unordered':
665
            def sort_key(key):
666
                return (self._key_priority.get(key, 0), key)
667
            # Use a defined order by asking for the keys one-by-one from the
668
            # backing_vf
669
            for key in sorted(keys, key=sort_key):
670
                for record in self._backing_vf.get_record_stream([key],
671
                                'unordered', include_delta_closure):
672
                    yield record
673
        else:
674
            for record in self._backing_vf.get_record_stream(keys, sort_order,
675
                            include_delta_closure):
676
                yield record
677
678
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
679
class KeyMapper(object):
3350.6.10 by Martin Pool
VersionedFiles review cleanups
680
    """KeyMappers map between keys and underlying partitioned storage."""
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
681
682
    def map(self, key):
683
        """Map key to an underlying storage identifier.
684
685
        :param key: A key tuple e.g. ('file-id', 'revision-id').
686
        :return: An underlying storage identifier, specific to the partitioning
687
            mechanism.
688
        """
689
        raise NotImplementedError(self.map)
690
691
    def unmap(self, partition_id):
692
        """Map a partitioned storage id back to a key prefix.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
693
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
694
        :param partition_id: The underlying partition id.
3350.6.10 by Martin Pool
VersionedFiles review cleanups
695
        :return: As much of a key (or prefix) as is derivable from the partition
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
696
            id.
697
        """
698
        raise NotImplementedError(self.unmap)
699
700
701
class ConstantMapper(KeyMapper):
702
    """A key mapper that maps to a constant result."""
703
704
    def __init__(self, result):
705
        """Create a ConstantMapper which will return result for all maps."""
706
        self._result = result
707
708
    def map(self, key):
709
        """See KeyMapper.map()."""
710
        return self._result
711
712
713
class URLEscapeMapper(KeyMapper):
714
    """Base class for use with transport backed storage.
715
716
    This provides a map and unmap wrapper that respectively url escape and
717
    unescape their outputs and inputs.
718
    """
719
720
    def map(self, key):
721
        """See KeyMapper.map()."""
722
        return urllib.quote(self._map(key))
723
724
    def unmap(self, partition_id):
725
        """See KeyMapper.unmap()."""
726
        return self._unmap(urllib.unquote(partition_id))
727
728
729
class PrefixMapper(URLEscapeMapper):
730
    """A key mapper that extracts the first component of a key.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
731
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
732
    This mapper is for use with a transport based backend.
733
    """
734
735
    def _map(self, key):
736
        """See KeyMapper.map()."""
737
        return key[0]
738
739
    def _unmap(self, partition_id):
740
        """See KeyMapper.unmap()."""
741
        return (partition_id,)
742
743
744
class HashPrefixMapper(URLEscapeMapper):
745
    """A key mapper that combines the first component of a key with a hash.
746
747
    This mapper is for use with a transport based backend.
748
    """
749
750
    def _map(self, key):
751
        """See KeyMapper.map()."""
752
        prefix = self._escape(key[0])
753
        return "%02x/%s" % (adler32(prefix) & 0xff, prefix)
754
755
    def _escape(self, prefix):
756
        """No escaping needed here."""
757
        return prefix
758
759
    def _unmap(self, partition_id):
760
        """See KeyMapper.unmap()."""
761
        return (self._unescape(osutils.basename(partition_id)),)
762
763
    def _unescape(self, basename):
764
        """No unescaping needed for HashPrefixMapper."""
765
        return basename
766
767
768
class HashEscapedPrefixMapper(HashPrefixMapper):
769
    """Combines the escaped first component of a key with a hash.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
770
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
771
    This mapper is for use with a transport based backend.
772
    """
773
774
    _safe = "abcdefghijklmnopqrstuvwxyz0123456789-_@,."
775
776
    def _escape(self, prefix):
777
        """Turn a key element into a filesystem safe string.
778
779
        This is similar to a plain urllib.quote, except
780
        it uses specific safe characters, so that it doesn't
781
        have to translate a lot of valid file ids.
782
        """
783
        # @ does not get escaped. This is because it is a valid
784
        # filesystem character we use all the time, and it looks
785
        # a lot better than seeing %40 all the time.
786
        r = [((c in self._safe) and c or ('%%%02x' % ord(c)))
787
             for c in prefix]
788
        return ''.join(r)
789
790
    def _unescape(self, basename):
791
        """Escaped names are easily unescaped by urlutils."""
792
        return urllib.unquote(basename)
793
794
795
def make_versioned_files_factory(versioned_file_factory, mapper):
796
    """Create a ThunkedVersionedFiles factory.
797
798
    This will create a callable which when called creates a
799
    ThunkedVersionedFiles on a transport, using mapper to access individual
800
    versioned files, and versioned_file_factory to create each individual file.
801
    """
802
    def factory(transport):
803
        return ThunkedVersionedFiles(transport, versioned_file_factory, mapper,
804
            lambda:True)
805
    return factory
806
807
808
class VersionedFiles(object):
809
    """Storage for many versioned files.
810
811
    This object allows a single keyspace for accessing the history graph and
812
    contents of named bytestrings.
813
814
    Currently no implementation allows the graph of different key prefixes to
815
    intersect, but the API does allow such implementations in the future.
3350.6.7 by Robert Collins
Review feedback, making things more clear, adding documentation on what is used where.
816
817
    The keyspace is expressed via simple tuples. Any instance of VersionedFiles
818
    may have a different length key-size, but that size will be constant for
819
    all texts added to or retrieved from it. For instance, bzrlib uses
820
    instances with a key-size of 2 for storing user files in a repository, with
821
    the first element the fileid, and the second the version of that file.
822
823
    The use of tuples allows a single code base to support several different
824
    uses with only the mapping logic changing from instance to instance.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
825
    """
826
827
    def add_lines(self, key, parents, lines, parent_texts=None,
828
        left_matching_blocks=None, nostore_sha=None, random_id=False,
829
        check_content=True):
830
        """Add a text to the store.
831
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
832
        :param key: The key tuple of the text to add. If the last element is
833
            None, a CHK string will be generated during the addition.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
834
        :param parents: The parents key tuples of the text to add.
835
        :param lines: A list of lines. Each line must be a bytestring. And all
836
            of them except the last must be terminated with \n and contain no
837
            other \n's. The last line may either contain no \n's or a single
838
            terminating \n. If the lines list does meet this constraint the add
839
            routine may error or may succeed - but you will be unable to read
840
            the data back accurately. (Checking the lines have been split
841
            correctly is expensive and extremely unlikely to catch bugs so it
842
            is not done at runtime unless check_content is True.)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
843
        :param parent_texts: An optional dictionary containing the opaque
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
844
            representations of some or all of the parents of version_id to
845
            allow delta optimisations.  VERY IMPORTANT: the texts must be those
846
            returned by add_lines or data corruption can be caused.
847
        :param left_matching_blocks: a hint about which areas are common
848
            between the text and its left-hand-parent.  The format is
849
            the SequenceMatcher.get_matching_blocks format.
850
        :param nostore_sha: Raise ExistingContent and do not add the lines to
851
            the versioned file if the digest of the lines matches this.
852
        :param random_id: If True a random id has been selected rather than
853
            an id determined by some deterministic process such as a converter
854
            from a foreign VCS. When True the backend may choose not to check
855
            for uniqueness of the resulting key within the versioned file, so
856
            this should only be done when the result is expected to be unique
857
            anyway.
858
        :param check_content: If True, the lines supplied are verified to be
859
            bytestrings that are correctly formed lines.
860
        :return: The text sha1, the number of bytes in the text, and an opaque
861
                 representation of the inserted version which can be provided
862
                 back to future add_lines calls in the parent_texts dictionary.
863
        """
864
        raise NotImplementedError(self.add_lines)
865
4398.8.6 by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'.
866
    def _add_text(self, key, parents, text, nostore_sha=None, random_id=False):
867
        """Add a text to the store.
868
869
        This is a private function for use by CommitBuilder.
870
871
        :param key: The key tuple of the text to add. If the last element is
872
            None, a CHK string will be generated during the addition.
873
        :param parents: The parents key tuples of the text to add.
874
        :param text: A string containing the text to be committed.
875
        :param nostore_sha: Raise ExistingContent and do not add the lines to
876
            the versioned file if the digest of the lines matches this.
877
        :param random_id: If True a random id has been selected rather than
878
            an id determined by some deterministic process such as a converter
879
            from a foreign VCS. When True the backend may choose not to check
880
            for uniqueness of the resulting key within the versioned file, so
881
            this should only be done when the result is expected to be unique
882
            anyway.
883
        :param check_content: If True, the lines supplied are verified to be
884
            bytestrings that are correctly formed lines.
885
        :return: The text sha1, the number of bytes in the text, and an opaque
886
                 representation of the inserted version which can be provided
887
                 back to future _add_text calls in the parent_texts dictionary.
888
        """
889
        # The default implementation just thunks over to .add_lines(),
890
        # inefficient, but it works.
4398.8.1 by John Arbash Meinel
Add a VersionedFile.add_text() api.
891
        return self.add_lines(key, parents, osutils.split_lines(text),
892
                              nostore_sha=nostore_sha,
893
                              random_id=random_id,
4398.8.6 by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'.
894
                              check_content=True)
4398.8.1 by John Arbash Meinel
Add a VersionedFile.add_text() api.
895
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
896
    def add_mpdiffs(self, records):
897
        """Add mpdiffs to this VersionedFile.
898
899
        Records should be iterables of version, parents, expected_sha1,
900
        mpdiff. mpdiff should be a MultiParent instance.
901
        """
902
        vf_parents = {}
903
        mpvf = multiparent.MultiMemoryVersionedFile()
904
        versions = []
905
        for version, parent_ids, expected_sha1, mpdiff in records:
906
            versions.append(version)
907
            mpvf.add_diff(mpdiff, version, parent_ids)
908
        needed_parents = set()
909
        for version, parent_ids, expected_sha1, mpdiff in records:
910
            needed_parents.update(p for p in parent_ids
911
                                  if not mpvf.has_version(p))
912
        # It seems likely that adding all the present parents as fulltexts can
913
        # easily exhaust memory.
3890.2.9 by John Arbash Meinel
Start using osutils.chunks_as_lines rather than osutils.split_lines.
914
        chunks_to_lines = osutils.chunks_to_lines
3350.8.11 by Robert Collins
Stacked add_mpdiffs.
915
        for record in self.get_record_stream(needed_parents, 'unordered',
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
916
            True):
3350.8.11 by Robert Collins
Stacked add_mpdiffs.
917
            if record.storage_kind == 'absent':
918
                continue
3890.2.9 by John Arbash Meinel
Start using osutils.chunks_as_lines rather than osutils.split_lines.
919
            mpvf.add_version(chunks_to_lines(record.get_bytes_as('chunked')),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
920
                record.key, [])
921
        for (key, parent_keys, expected_sha1, mpdiff), lines in\
922
            zip(records, mpvf.get_line_list(versions)):
923
            if len(parent_keys) == 1:
924
                left_matching_blocks = list(mpdiff.get_matching_blocks(0,
925
                    mpvf.get_diff(parent_keys[0]).num_lines()))
926
            else:
927
                left_matching_blocks = None
928
            version_sha1, _, version_text = self.add_lines(key,
929
                parent_keys, lines, vf_parents,
930
                left_matching_blocks=left_matching_blocks)
931
            if version_sha1 != expected_sha1:
932
                raise errors.VersionedFileInvalidChecksum(version)
933
            vf_parents[key] = version_text
934
935
    def annotate(self, key):
936
        """Return a list of (version-key, line) tuples for the text of key.
937
938
        :raise RevisionNotPresent: If the key is not present.
939
        """
940
        raise NotImplementedError(self.annotate)
941
942
    def check(self, progress_bar=None):
943
        """Check this object for integrity."""
944
        raise NotImplementedError(self.check)
945
946
    @staticmethod
947
    def check_not_reserved_id(version_id):
948
        revision.check_not_reserved_id(version_id)
949
950
    def _check_lines_not_unicode(self, lines):
951
        """Check that lines being added to a versioned file are not unicode."""
952
        for line in lines:
953
            if line.__class__ is not str:
954
                raise errors.BzrBadParameterUnicode("lines")
955
956
    def _check_lines_are_lines(self, lines):
957
        """Check that the lines really are full lines without inline EOL."""
958
        for line in lines:
959
            if '\n' in line[:-1]:
960
                raise errors.BzrBadParameterContainsNewline("lines")
961
962
    def get_parent_map(self, keys):
963
        """Get a map of the parents of keys.
964
965
        :param keys: The keys to look up parents for.
966
        :return: A mapping from keys to parents. Absent keys are absent from
967
            the mapping.
968
        """
969
        raise NotImplementedError(self.get_parent_map)
970
971
    def get_record_stream(self, keys, ordering, include_delta_closure):
972
        """Get a stream of records for keys.
973
974
        :param keys: The keys to include.
975
        :param ordering: Either 'unordered' or 'topological'. A topologically
976
            sorted stream has compression parents strictly before their
977
            children.
978
        :param include_delta_closure: If True then the closure across any
979
            compression parents will be included (in the opaque data).
980
        :return: An iterator of ContentFactory objects, each of which is only
981
            valid until the iterator is advanced.
982
        """
983
        raise NotImplementedError(self.get_record_stream)
984
985
    def get_sha1s(self, keys):
986
        """Get the sha1's of the texts for the given keys.
987
988
        :param keys: The names of the keys to lookup
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
989
        :return: a dict from key to sha1 digest. Keys of texts which are not
3350.8.14 by Robert Collins
Review feedback.
990
            present in the store are not present in the returned
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
991
            dictionary.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
992
        """
993
        raise NotImplementedError(self.get_sha1s)
994
3830.3.12 by Martin Pool
Review cleanups: unify has_key impls, add missing_keys(), clean up exception blocks
995
    has_key = index._has_key_from_parent_map
996
4009.3.3 by Andrew Bennetts
Add docstrings.
997
    def get_missing_compression_parent_keys(self):
998
        """Return an iterable of keys of missing compression parents.
999
1000
        Check this after calling insert_record_stream to find out if there are
1001
        any missing compression parents.  If there are, the records that
4009.3.12 by Robert Collins
Polish on inserting record streams with missing compression parents.
1002
        depend on them are not able to be inserted safely. The precise
1003
        behaviour depends on the concrete VersionedFiles class in use.
1004
1005
        Classes that do not support this will raise NotImplementedError.
4009.3.3 by Andrew Bennetts
Add docstrings.
1006
        """
1007
        raise NotImplementedError(self.get_missing_compression_parent_keys)
1008
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1009
    def insert_record_stream(self, stream):
1010
        """Insert a record stream into this container.
1011
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1012
        :param stream: A stream of records to insert.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1013
        :return: None
1014
        :seealso VersionedFile.get_record_stream:
1015
        """
1016
        raise NotImplementedError
1017
1018
    def iter_lines_added_or_present_in_keys(self, keys, pb=None):
1019
        """Iterate over the lines in the versioned files from keys.
1020
1021
        This may return lines from other keys. Each item the returned
1022
        iterator yields is a tuple of a line and a text version that that line
1023
        is present in (not introduced in).
1024
1025
        Ordering of results is in whatever order is most suitable for the
1026
        underlying storage format.
1027
1028
        If a progress bar is supplied, it may be used to indicate progress.
1029
        The caller is responsible for cleaning up progress bars (because this
1030
        is an iterator).
1031
1032
        NOTES:
1033
         * Lines are normalised by the underlying store: they will all have \n
1034
           terminators.
1035
         * Lines are returned in arbitrary order.
1036
1037
        :return: An iterator over (line, key).
1038
        """
1039
        raise NotImplementedError(self.iter_lines_added_or_present_in_keys)
1040
1041
    def keys(self):
1042
        """Return a iterable of the keys for all the contained texts."""
1043
        raise NotImplementedError(self.keys)
1044
1045
    def make_mpdiffs(self, keys):
1046
        """Create multiparent diffs for specified keys."""
1047
        keys_order = tuple(keys)
1048
        keys = frozenset(keys)
1049
        knit_keys = set(keys)
1050
        parent_map = self.get_parent_map(keys)
1051
        for parent_keys in parent_map.itervalues():
1052
            if parent_keys:
1053
                knit_keys.update(parent_keys)
1054
        missing_keys = keys - set(parent_map)
1055
        if missing_keys:
3530.3.2 by Robert Collins
Handling frozen set inputs in mpdiff generation when a key is missing
1056
            raise errors.RevisionNotPresent(list(missing_keys)[0], self)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1057
        # We need to filter out ghosts, because we can't diff against them.
1058
        maybe_ghosts = knit_keys - keys
1059
        ghosts = maybe_ghosts - set(self.get_parent_map(maybe_ghosts))
1060
        knit_keys.difference_update(ghosts)
1061
        lines = {}
3890.2.9 by John Arbash Meinel
Start using osutils.chunks_as_lines rather than osutils.split_lines.
1062
        chunks_to_lines = osutils.chunks_to_lines
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1063
        for record in self.get_record_stream(knit_keys, 'topological', True):
3890.2.9 by John Arbash Meinel
Start using osutils.chunks_as_lines rather than osutils.split_lines.
1064
            lines[record.key] = chunks_to_lines(record.get_bytes_as('chunked'))
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1065
            # line_block_dict = {}
1066
            # for parent, blocks in record.extract_line_blocks():
1067
            #   line_blocks[parent] = blocks
1068
            # line_blocks[record.key] = line_block_dict
1069
        diffs = []
1070
        for key in keys_order:
1071
            target = lines[key]
1072
            parents = parent_map[key] or []
1073
            # Note that filtering knit_keys can lead to a parent difference
1074
            # between the creation and the application of the mpdiff.
1075
            parent_lines = [lines[p] for p in parents if p in knit_keys]
1076
            if len(parent_lines) > 0:
1077
                left_parent_blocks = self._extract_blocks(key, parent_lines[0],
1078
                    target)
1079
            else:
1080
                left_parent_blocks = None
1081
            diffs.append(multiparent.MultiParent.from_lines(target,
1082
                parent_lines, left_parent_blocks))
1083
        return diffs
1084
3830.3.12 by Martin Pool
Review cleanups: unify has_key impls, add missing_keys(), clean up exception blocks
1085
    missing_keys = index._missing_keys_from_parent_map
1086
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1087
    def _extract_blocks(self, version_id, source, target):
1088
        return None
1089
1090
1091
class ThunkedVersionedFiles(VersionedFiles):
1092
    """Storage for many versioned files thunked onto a 'VersionedFile' class.
1093
1094
    This object allows a single keyspace for accessing the history graph and
1095
    contents of named bytestrings.
1096
1097
    Currently no implementation allows the graph of different key prefixes to
1098
    intersect, but the API does allow such implementations in the future.
1099
    """
1100
1101
    def __init__(self, transport, file_factory, mapper, is_locked):
1102
        """Create a ThunkedVersionedFiles."""
1103
        self._transport = transport
1104
        self._file_factory = file_factory
1105
        self._mapper = mapper
1106
        self._is_locked = is_locked
1107
1108
    def add_lines(self, key, parents, lines, parent_texts=None,
1109
        left_matching_blocks=None, nostore_sha=None, random_id=False,
1110
        check_content=True):
1111
        """See VersionedFiles.add_lines()."""
1112
        path = self._mapper.map(key)
1113
        version_id = key[-1]
1114
        parents = [parent[-1] for parent in parents]
1115
        vf = self._get_vf(path)
1116
        try:
1117
            try:
1118
                return vf.add_lines_with_ghosts(version_id, parents, lines,
1119
                    parent_texts=parent_texts,
1120
                    left_matching_blocks=left_matching_blocks,
1121
                    nostore_sha=nostore_sha, random_id=random_id,
1122
                    check_content=check_content)
1123
            except NotImplementedError:
1124
                return vf.add_lines(version_id, parents, lines,
1125
                    parent_texts=parent_texts,
1126
                    left_matching_blocks=left_matching_blocks,
1127
                    nostore_sha=nostore_sha, random_id=random_id,
1128
                    check_content=check_content)
1129
        except errors.NoSuchFile:
1130
            # parent directory may be missing, try again.
1131
            self._transport.mkdir(osutils.dirname(path))
1132
            try:
1133
                return vf.add_lines_with_ghosts(version_id, parents, lines,
1134
                    parent_texts=parent_texts,
1135
                    left_matching_blocks=left_matching_blocks,
1136
                    nostore_sha=nostore_sha, random_id=random_id,
1137
                    check_content=check_content)
1138
            except NotImplementedError:
1139
                return vf.add_lines(version_id, parents, lines,
1140
                    parent_texts=parent_texts,
1141
                    left_matching_blocks=left_matching_blocks,
1142
                    nostore_sha=nostore_sha, random_id=random_id,
1143
                    check_content=check_content)
1144
1145
    def annotate(self, key):
1146
        """Return a list of (version-key, line) tuples for the text of key.
1147
1148
        :raise RevisionNotPresent: If the key is not present.
1149
        """
1150
        prefix = key[:-1]
1151
        path = self._mapper.map(prefix)
1152
        vf = self._get_vf(path)
1153
        origins = vf.annotate(key[-1])
1154
        result = []
1155
        for origin, line in origins:
1156
            result.append((prefix + (origin,), line))
1157
        return result
1158
4454.3.65 by John Arbash Meinel
Tests that VF implementations support .get_annotator()
1159
    def get_annotator(self):
1160
        return annotate.Annotator(self)
1161
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1162
    def check(self, progress_bar=None):
1163
        """See VersionedFiles.check()."""
1164
        for prefix, vf in self._iter_all_components():
1165
            vf.check()
1166
1167
    def get_parent_map(self, keys):
1168
        """Get a map of the parents of keys.
1169
1170
        :param keys: The keys to look up parents for.
1171
        :return: A mapping from keys to parents. Absent keys are absent from
1172
            the mapping.
1173
        """
1174
        prefixes = self._partition_keys(keys)
1175
        result = {}
1176
        for prefix, suffixes in prefixes.items():
1177
            path = self._mapper.map(prefix)
1178
            vf = self._get_vf(path)
1179
            parent_map = vf.get_parent_map(suffixes)
1180
            for key, parents in parent_map.items():
1181
                result[prefix + (key,)] = tuple(
1182
                    prefix + (parent,) for parent in parents)
1183
        return result
1184
1185
    def _get_vf(self, path):
1186
        if not self._is_locked():
1187
            raise errors.ObjectNotLocked(self)
1188
        return self._file_factory(path, self._transport, create=True,
1189
            get_scope=lambda:None)
1190
1191
    def _partition_keys(self, keys):
1192
        """Turn keys into a dict of prefix:suffix_list."""
1193
        result = {}
1194
        for key in keys:
1195
            prefix_keys = result.setdefault(key[:-1], [])
1196
            prefix_keys.append(key[-1])
1197
        return result
1198
1199
    def _get_all_prefixes(self):
1200
        # Identify all key prefixes.
1201
        # XXX: A bit hacky, needs polish.
1202
        if type(self._mapper) == ConstantMapper:
1203
            paths = [self._mapper.map(())]
1204
            prefixes = [()]
1205
        else:
1206
            relpaths = set()
1207
            for quoted_relpath in self._transport.iter_files_recursive():
1208
                path, ext = os.path.splitext(quoted_relpath)
1209
                relpaths.add(path)
1210
            paths = list(relpaths)
1211
            prefixes = [self._mapper.unmap(path) for path in paths]
1212
        return zip(paths, prefixes)
1213
1214
    def get_record_stream(self, keys, ordering, include_delta_closure):
1215
        """See VersionedFiles.get_record_stream()."""
1216
        # Ordering will be taken care of by each partitioned store; group keys
1217
        # by partition.
1218
        keys = sorted(keys)
1219
        for prefix, suffixes, vf in self._iter_keys_vf(keys):
1220
            suffixes = [(suffix,) for suffix in suffixes]
1221
            for record in vf.get_record_stream(suffixes, ordering,
1222
                include_delta_closure):
1223
                if record.parents is not None:
1224
                    record.parents = tuple(
1225
                        prefix + parent for parent in record.parents)
1226
                record.key = prefix + record.key
1227
                yield record
1228
1229
    def _iter_keys_vf(self, keys):
1230
        prefixes = self._partition_keys(keys)
1231
        sha1s = {}
1232
        for prefix, suffixes in prefixes.items():
1233
            path = self._mapper.map(prefix)
1234
            vf = self._get_vf(path)
1235
            yield prefix, suffixes, vf
1236
1237
    def get_sha1s(self, keys):
1238
        """See VersionedFiles.get_sha1s()."""
1239
        sha1s = {}
1240
        for prefix,suffixes, vf in self._iter_keys_vf(keys):
1241
            vf_sha1s = vf.get_sha1s(suffixes)
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1242
            for suffix, sha1 in vf_sha1s.iteritems():
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1243
                sha1s[prefix + (suffix,)] = sha1
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1244
        return sha1s
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1245
1246
    def insert_record_stream(self, stream):
1247
        """Insert a record stream into this container.
1248
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1249
        :param stream: A stream of records to insert.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1250
        :return: None
1251
        :seealso VersionedFile.get_record_stream:
1252
        """
1253
        for record in stream:
1254
            prefix = record.key[:-1]
1255
            key = record.key[-1:]
1256
            if record.parents is not None:
1257
                parents = [parent[-1:] for parent in record.parents]
1258
            else:
1259
                parents = None
1260
            thunk_record = AdapterFactory(key, parents, record)
1261
            path = self._mapper.map(prefix)
1262
            # Note that this parses the file many times; we can do better but
1263
            # as this only impacts weaves in terms of performance, it is
1264
            # tolerable.
1265
            vf = self._get_vf(path)
1266
            vf.insert_record_stream([thunk_record])
1267
1268
    def iter_lines_added_or_present_in_keys(self, keys, pb=None):
1269
        """Iterate over the lines in the versioned files from keys.
1270
1271
        This may return lines from other keys. Each item the returned
1272
        iterator yields is a tuple of a line and a text version that that line
1273
        is present in (not introduced in).
1274
1275
        Ordering of results is in whatever order is most suitable for the
1276
        underlying storage format.
1277
1278
        If a progress bar is supplied, it may be used to indicate progress.
1279
        The caller is responsible for cleaning up progress bars (because this
1280
        is an iterator).
1281
1282
        NOTES:
1283
         * Lines are normalised by the underlying store: they will all have \n
1284
           terminators.
1285
         * Lines are returned in arbitrary order.
1286
1287
        :return: An iterator over (line, key).
1288
        """
1289
        for prefix, suffixes, vf in self._iter_keys_vf(keys):
1290
            for line, version in vf.iter_lines_added_or_present_in_versions(suffixes):
1291
                yield line, prefix + (version,)
1292
1293
    def _iter_all_components(self):
1294
        for path, prefix in self._get_all_prefixes():
1295
            yield prefix, self._get_vf(path)
1296
1297
    def keys(self):
1298
        """See VersionedFiles.keys()."""
1299
        result = set()
1300
        for prefix, vf in self._iter_all_components():
1301
            for suffix in vf.versions():
1302
                result.add(prefix + (suffix,))
1303
        return result
1304
1305
1306
class _PlanMergeVersionedFile(VersionedFiles):
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1307
    """A VersionedFile for uncommitted and committed texts.
1308
1309
    It is intended to allow merges to be planned with working tree texts.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1310
    It implements only the small part of the VersionedFiles interface used by
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1311
    PlanMerge.  It falls back to multiple versionedfiles for data not stored in
1312
    _PlanMergeVersionedFile itself.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1313
1314
    :ivar: fallback_versionedfiles a list of VersionedFiles objects that can be
1315
        queried for missing texts.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1316
    """
1317
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1318
    def __init__(self, file_id):
1319
        """Create a _PlanMergeVersionedFile.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1320
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1321
        :param file_id: Used with _PlanMerge code which is not yet fully
1322
            tuple-keyspace aware.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1323
        """
1324
        self._file_id = file_id
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1325
        # fallback locations
1326
        self.fallback_versionedfiles = []
1327
        # Parents for locally held keys.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1328
        self._parents = {}
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1329
        # line data for locally held keys.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1330
        self._lines = {}
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1331
        # key lookup providers
1332
        self._providers = [DictParentsProvider(self._parents)]
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1333
3062.2.3 by Aaron Bentley
Sync up with bzr.dev API changes
1334
    def plan_merge(self, ver_a, ver_b, base=None):
3062.1.13 by Aaron Bentley
Make _PlanMerge an implementation detail of _PlanMergeVersionedFile
1335
        """See VersionedFile.plan_merge"""
3144.3.7 by Aaron Bentley
Update from review
1336
        from bzrlib.merge import _PlanMerge
3062.2.3 by Aaron Bentley
Sync up with bzr.dev API changes
1337
        if base is None:
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1338
            return _PlanMerge(ver_a, ver_b, self, (self._file_id,)).plan_merge()
1339
        old_plan = list(_PlanMerge(ver_a, base, self, (self._file_id,)).plan_merge())
1340
        new_plan = list(_PlanMerge(ver_a, ver_b, self, (self._file_id,)).plan_merge())
3062.2.3 by Aaron Bentley
Sync up with bzr.dev API changes
1341
        return _PlanMerge._subtract_plans(old_plan, new_plan)
1342
3144.3.1 by Aaron Bentley
Implement LCA merge, with problematic conflict markers
1343
    def plan_lca_merge(self, ver_a, ver_b, base=None):
3144.3.7 by Aaron Bentley
Update from review
1344
        from bzrlib.merge import _PlanLCAMerge
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1345
        graph = Graph(self)
1346
        new_plan = _PlanLCAMerge(ver_a, ver_b, self, (self._file_id,), graph).plan_merge()
3144.3.1 by Aaron Bentley
Implement LCA merge, with problematic conflict markers
1347
        if base is None:
1348
            return new_plan
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1349
        old_plan = _PlanLCAMerge(ver_a, base, self, (self._file_id,), graph).plan_merge()
3144.3.1 by Aaron Bentley
Implement LCA merge, with problematic conflict markers
1350
        return _PlanLCAMerge._subtract_plans(list(old_plan), list(new_plan))
3062.1.13 by Aaron Bentley
Make _PlanMerge an implementation detail of _PlanMergeVersionedFile
1351
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1352
    def add_lines(self, key, parents, lines):
1353
        """See VersionedFiles.add_lines
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1354
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1355
        Lines are added locally, not to fallback versionedfiles.  Also, ghosts
1356
        are permitted.  Only reserved ids are permitted.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1357
        """
3350.6.8 by Martin Pool
Change stray pdb calls to exceptions
1358
        if type(key) is not tuple:
1359
            raise TypeError(key)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1360
        if not revision.is_reserved_id(key[-1]):
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1361
            raise ValueError('Only reserved ids may be used')
1362
        if parents is None:
1363
            raise ValueError('Parents may not be None')
1364
        if lines is None:
1365
            raise ValueError('Lines may not be None')
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1366
        self._parents[key] = tuple(parents)
1367
        self._lines[key] = lines
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1368
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1369
    def get_record_stream(self, keys, ordering, include_delta_closure):
1370
        pending = set(keys)
1371
        for key in keys:
1372
            if key in self._lines:
1373
                lines = self._lines[key]
1374
                parents = self._parents[key]
1375
                pending.remove(key)
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1376
                yield ChunkedContentFactory(key, parents, None, lines)
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1377
        for versionedfile in self.fallback_versionedfiles:
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1378
            for record in versionedfile.get_record_stream(
1379
                pending, 'unordered', True):
1380
                if record.storage_kind == 'absent':
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1381
                    continue
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1382
                else:
1383
                    pending.remove(record.key)
1384
                    yield record
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
1385
            if not pending:
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1386
                return
1387
        # report absent entries
1388
        for key in pending:
1389
            yield AbsentContentFactory(key)
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1390
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1391
    def get_parent_map(self, keys):
1392
        """See VersionedFiles.get_parent_map"""
1393
        # We create a new provider because a fallback may have been added.
1394
        # If we make fallbacks private we can update a stack list and avoid
1395
        # object creation thrashing.
3350.6.6 by Robert Collins
Fix test_plan_file_merge
1396
        keys = set(keys)
1397
        result = {}
1398
        if revision.NULL_REVISION in keys:
1399
            keys.remove(revision.NULL_REVISION)
1400
            result[revision.NULL_REVISION] = ()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1401
        self._providers = self._providers[:1] + self.fallback_versionedfiles
3350.6.6 by Robert Collins
Fix test_plan_file_merge
1402
        result.update(
4379.3.3 by Gary van der Merwe
Rename and add doc string for StackedParentsProvider.
1403
            StackedParentsProvider(self._providers).get_parent_map(keys))
3350.6.5 by Robert Collins
Update to bzr.dev.
1404
        for key, parents in result.iteritems():
1405
            if parents == ():
1406
                result[key] = (revision.NULL_REVISION,)
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
1407
        return result
3144.3.1 by Aaron Bentley
Implement LCA merge, with problematic conflict markers
1408
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1409
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
1410
class PlanWeaveMerge(TextMerge):
1551.6.13 by Aaron Bentley
Cleanup
1411
    """Weave merge that takes a plan as its input.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1412
1551.6.14 by Aaron Bentley
Tweaks from merge review
1413
    This exists so that VersionedFile.plan_merge is implementable.
1414
    Most callers will want to use WeaveMerge instead.
1551.6.13 by Aaron Bentley
Cleanup
1415
    """
1416
1551.6.14 by Aaron Bentley
Tweaks from merge review
1417
    def __init__(self, plan, a_marker=TextMerge.A_MARKER,
1418
                 b_marker=TextMerge.B_MARKER):
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
1419
        TextMerge.__init__(self, a_marker, b_marker)
1420
        self.plan = plan
1421
1551.6.7 by Aaron Bentley
Implemented two-way merge, refactored weave merge
1422
    def _merge_struct(self):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1423
        lines_a = []
1424
        lines_b = []
1425
        ch_a = ch_b = False
1664.2.8 by Aaron Bentley
Fix WeaveMerge when plan doesn't end with unchanged lines
1426
1427
        def outstanding_struct():
1428
            if not lines_a and not lines_b:
1429
                return
1430
            elif ch_a and not ch_b:
1431
                # one-sided change:
1432
                yield(lines_a,)
1433
            elif ch_b and not ch_a:
1434
                yield (lines_b,)
1435
            elif lines_a == lines_b:
1436
                yield(lines_a,)
1437
            else:
1438
                yield (lines_a, lines_b)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1439
1616.1.18 by Martin Pool
(weave-merge) don't treat killed-both lines as points of agreement;
1440
        # We previously considered either 'unchanged' or 'killed-both' lines
1441
        # to be possible places to resynchronize.  However, assuming agreement
1759.2.1 by Jelmer Vernooij
Fix some types (found using aspell).
1442
        # on killed-both lines may be too aggressive. -- mbp 20060324
1551.6.7 by Aaron Bentley
Implemented two-way merge, refactored weave merge
1443
        for state, line in self.plan:
1616.1.18 by Martin Pool
(weave-merge) don't treat killed-both lines as points of agreement;
1444
            if state == 'unchanged':
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1445
                # resync and flush queued conflicts changes if any
1664.2.8 by Aaron Bentley
Fix WeaveMerge when plan doesn't end with unchanged lines
1446
                for struct in outstanding_struct():
1447
                    yield struct
1551.6.11 by Aaron Bentley
Switched TextMerge_lines to work on a list
1448
                lines_a = []
1449
                lines_b = []
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1450
                ch_a = ch_b = False
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1451
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1452
            if state == 'unchanged':
1453
                if line:
1551.6.5 by Aaron Bentley
Got weave merge producing structural output
1454
                    yield ([line],)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1455
            elif state == 'killed-a':
1456
                ch_a = True
1457
                lines_b.append(line)
1458
            elif state == 'killed-b':
1459
                ch_b = True
1460
                lines_a.append(line)
1461
            elif state == 'new-a':
1462
                ch_a = True
1463
                lines_a.append(line)
1464
            elif state == 'new-b':
1465
                ch_b = True
1466
                lines_b.append(line)
3144.3.2 by Aaron Bentley
Get conflict handling working
1467
            elif state == 'conflicted-a':
1468
                ch_b = ch_a = True
1469
                lines_a.append(line)
1470
            elif state == 'conflicted-b':
1471
                ch_b = ch_a = True
1472
                lines_b.append(line)
4312.1.1 by John Arbash Meinel
Add a per-implementation test that deleting lines conflicts with modifying lines.
1473
            elif state == 'killed-both':
1474
                # This counts as a change, even though there is no associated
1475
                # line
1476
                ch_b = ch_a = True
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1477
            else:
3376.2.4 by Martin Pool
Remove every assert statement from bzrlib!
1478
                if state not in ('irrelevant', 'ghost-a', 'ghost-b',
4312.1.1 by John Arbash Meinel
Add a per-implementation test that deleting lines conflicts with modifying lines.
1479
                        'killed-base'):
3376.2.4 by Martin Pool
Remove every assert statement from bzrlib!
1480
                    raise AssertionError(state)
1664.2.8 by Aaron Bentley
Fix WeaveMerge when plan doesn't end with unchanged lines
1481
        for struct in outstanding_struct():
1482
            yield struct
1563.2.12 by Robert Collins
Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.
1483
1664.2.14 by Aaron Bentley
spacing fix
1484
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
1485
class WeaveMerge(PlanWeaveMerge):
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
1486
    """Weave merge that takes a VersionedFile and two versions as its input."""
1551.6.13 by Aaron Bentley
Cleanup
1487
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1488
    def __init__(self, versionedfile, ver_a, ver_b,
1551.6.14 by Aaron Bentley
Tweaks from merge review
1489
        a_marker=PlanWeaveMerge.A_MARKER, b_marker=PlanWeaveMerge.B_MARKER):
1551.6.15 by Aaron Bentley
Moved plan_merge into Weave
1490
        plan = versionedfile.plan_merge(ver_a, ver_b)
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
1491
        PlanWeaveMerge.__init__(self, plan, a_marker, b_marker)
1492
1493
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1494
class VirtualVersionedFiles(VersionedFiles):
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1495
    """Dummy implementation for VersionedFiles that uses other functions for
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1496
    obtaining fulltexts and parent maps.
1497
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1498
    This is always on the bottom of the stack and uses string keys
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1499
    (rather than tuples) internally.
1500
    """
1501
1502
    def __init__(self, get_parent_map, get_lines):
1503
        """Create a VirtualVersionedFiles.
1504
1505
        :param get_parent_map: Same signature as Repository.get_parent_map.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1506
        :param get_lines: Should return lines for specified key or None if
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1507
                          not available.
1508
        """
1509
        super(VirtualVersionedFiles, self).__init__()
1510
        self._get_parent_map = get_parent_map
1511
        self._get_lines = get_lines
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1512
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1513
    def check(self, progressbar=None):
1514
        """See VersionedFiles.check.
1515
1516
        :note: Always returns True for VirtualVersionedFiles.
1517
        """
1518
        return True
1519
1520
    def add_mpdiffs(self, records):
1521
        """See VersionedFiles.mpdiffs.
1522
1523
        :note: Not implemented for VirtualVersionedFiles.
1524
        """
1525
        raise NotImplementedError(self.add_mpdiffs)
1526
1527
    def get_parent_map(self, keys):
1528
        """See VersionedFiles.get_parent_map."""
3518.1.2 by Jelmer Vernooij
Fix some stylistic issues pointed out by Ian.
1529
        return dict([((k,), tuple([(p,) for p in v]))
1530
            for k,v in self._get_parent_map([k for (k,) in keys]).iteritems()])
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1531
1532
    def get_sha1s(self, keys):
1533
        """See VersionedFiles.get_sha1s."""
1534
        ret = {}
1535
        for (k,) in keys:
1536
            lines = self._get_lines(k)
1537
            if lines is not None:
3518.1.2 by Jelmer Vernooij
Fix some stylistic issues pointed out by Ian.
1538
                if not isinstance(lines, list):
1539
                    raise AssertionError
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1540
                ret[(k,)] = osutils.sha_strings(lines)
1541
        return ret
1542
1543
    def get_record_stream(self, keys, ordering, include_delta_closure):
1544
        """See VersionedFiles.get_record_stream."""
1545
        for (k,) in list(keys):
1546
            lines = self._get_lines(k)
1547
            if lines is not None:
3518.1.2 by Jelmer Vernooij
Fix some stylistic issues pointed out by Ian.
1548
                if not isinstance(lines, list):
1549
                    raise AssertionError
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1550
                yield ChunkedContentFactory((k,), None,
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1551
                        sha1=osutils.sha_strings(lines),
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1552
                        chunks=lines)
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1553
            else:
1554
                yield AbsentContentFactory((k,))
1555
3949.4.1 by Jelmer Vernooij
Implement VirtualVersionedFiles.iter_lines_added_or_present_in_keys.
1556
    def iter_lines_added_or_present_in_keys(self, keys, pb=None):
1557
        """See VersionedFile.iter_lines_added_or_present_in_versions()."""
1558
        for i, (key,) in enumerate(keys):
1559
            if pb is not None:
4110.2.10 by Martin Pool
Tweak iter_lines progress messages
1560
                pb.update("Finding changed lines", i, len(keys))
3949.4.1 by Jelmer Vernooij
Implement VirtualVersionedFiles.iter_lines_added_or_present_in_keys.
1561
            for l in self._get_lines(key):
1562
                yield (l, key)
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
1563
1564
1565
def network_bytes_to_kind_and_offset(network_bytes):
1566
    """Strip of a record kind from the front of network_bytes.
1567
1568
    :param network_bytes: The bytes of a record.
1569
    :return: A tuple (storage_kind, offset_of_remaining_bytes)
1570
    """
1571
    line_end = network_bytes.find('\n')
1572
    storage_kind = network_bytes[:line_end]
1573
    return storage_kind, line_end + 1
1574
1575
1576
class NetworkRecordStream(object):
1577
    """A record_stream which reconstitures a serialised stream."""
1578
1579
    def __init__(self, bytes_iterator):
1580
        """Create a NetworkRecordStream.
1581
1582
        :param bytes_iterator: An iterator of bytes. Each item in this
1583
            iterator should have been obtained from a record_streams'
1584
            record.get_bytes_as(record.storage_kind) call.
1585
        """
1586
        self._bytes_iterator = bytes_iterator
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1587
        self._kind_factory = {
1588
            'fulltext': fulltext_network_to_record,
1589
            'groupcompress-block': groupcompress.network_block_to_records,
1590
            'inventory-delta': inventory_delta_network_to_record,
1591
            'knit-ft-gz': knit.knit_network_to_record,
1592
            'knit-delta-gz': knit.knit_network_to_record,
1593
            'knit-annotated-ft-gz': knit.knit_network_to_record,
1594
            'knit-annotated-delta-gz': knit.knit_network_to_record,
1595
            'knit-delta-closure': knit.knit_delta_closure_to_records,
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
1596
            }
1597
1598
    def read(self):
1599
        """Read the stream.
1600
1601
        :return: An iterator as per VersionedFiles.get_record_stream().
1602
        """
1603
        for bytes in self._bytes_iterator:
1604
            storage_kind, line_end = network_bytes_to_kind_and_offset(bytes)
4005.3.6 by Robert Collins
Support delta_closure=True with NetworkRecordStream to transmit deltas over the wire when full text extraction is required on the far end.
1605
            for record in self._kind_factory[storage_kind](
1606
                storage_kind, bytes, line_end):
1607
                yield record
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1608
1609
1610
def fulltext_network_to_record(kind, bytes, line_end):
1611
    """Convert a network fulltext record to record."""
1612
    meta_len, = struct.unpack('!L', bytes[line_end:line_end+4])
4060.1.4 by Robert Collins
Streaming fetch from remote servers.
1613
    record_meta = bytes[line_end+4:line_end+4+meta_len]
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1614
    key, parents = bencode.bdecode_as_tuple(record_meta)
1615
    if parents == 'nil':
1616
        parents = None
4060.1.4 by Robert Collins
Streaming fetch from remote servers.
1617
    fulltext = bytes[line_end+4+meta_len:]
1618
    return [FulltextContentFactory(key, parents, None, fulltext)]
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1619
1620
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1621
def inventory_delta_network_to_record(kind, bytes, line_end):
4476.3.24 by Andrew Bennetts
Finish updating verb name from _1.17 to _1.18. Also, fix a typo in a comment.
1622
    """Convert a network inventory-delta record to record."""
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1623
    meta_len, = struct.unpack('!L', bytes[line_end:line_end+4])
1624
    record_meta = bytes[line_end+4:line_end+4+meta_len]
1625
    key, parents = bencode.bdecode_as_tuple(record_meta)
1626
    if parents == 'nil':
1627
        parents = None
1628
    inventory_delta_bytes = bytes[line_end+4+meta_len:]
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
1629
    deserialiser = inventory_delta.InventoryDeltaSerializer()
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1630
    parse_result = deserialiser.parse_text_bytes(inventory_delta_bytes)
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
1631
    basis_id, new_id, rich_root, tree_refs, delta = parse_result
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1632
    return [InventoryDeltaContentFactory(
1633
        key, parents, None, delta, basis_id, (rich_root, tree_refs))]
1634
1635
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1636
def _length_prefix(bytes):
1637
    return struct.pack('!L', len(bytes))
1638
1639
4060.1.4 by Robert Collins
Streaming fetch from remote servers.
1640
def record_to_fulltext_bytes(record):
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1641
    if record.parents is None:
1642
        parents = 'nil'
1643
    else:
1644
        parents = record.parents
1645
    record_meta = bencode.bencode((record.key, parents))
1646
    record_content = record.get_bytes_as('fulltext')
1647
    return "fulltext\n%s%s%s" % (
1648
        _length_prefix(record_meta), record_meta, record_content)
4111.1.1 by Robert Collins
Add a groupcompress sort order.
1649
1650
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1651
def record_to_inventory_delta_bytes(record):
1652
    record_content = record.get_bytes_as('inventory-delta-bytes')
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
1653
    if record.parents is None:
1654
        parents = 'nil'
1655
    else:
1656
        parents = record.parents
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1657
    record_meta = bencode.bencode((record.key, parents))
1658
    return "inventory-delta\n%s%s%s" % (
1659
        _length_prefix(record_meta), record_meta, record_content)
1660
1661
4111.1.1 by Robert Collins
Add a groupcompress sort order.
1662
def sort_groupcompress(parent_map):
1663
    """Sort and group the keys in parent_map into groupcompress order.
1664
1665
    groupcompress is defined (currently) as reverse-topological order, grouped
1666
    by the key prefix.
1667
1668
    :return: A sorted-list of keys
1669
    """
1670
    # gc-optimal ordering is approximately reverse topological,
1671
    # properly grouped by file-id.
1672
    per_prefix_map = {}
1673
    for item in parent_map.iteritems():
1674
        key = item[0]
1675
        if isinstance(key, str) or len(key) == 1:
1676
            prefix = ''
1677
        else:
1678
            prefix = key[0]
1679
        try:
1680
            per_prefix_map[prefix].append(item)
1681
        except KeyError:
1682
            per_prefix_map[prefix] = [item]
1683
1684
    present_keys = []
1685
    for prefix in sorted(per_prefix_map):
1686
        present_keys.extend(reversed(tsort.topo_sort(per_prefix_map[prefix])))
1687
    return present_keys