/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
3830.3.20 by John Arbash Meinel
Minor PEP8 and copyright updates.
1
# Copyright (C) 2005, 2006, 2007, 2008 Canonical Ltd
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
2
#
3
# Authors:
4
#   Johan Rydberg <jrydberg@gnu.org>
5
#
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
10
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
# GNU General Public License for more details.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
15
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
4183.7.1 by Sabin Iacob
update FSF mailing address
18
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
19
20
"""Versioned text file storage api."""
21
3350.8.2 by Robert Collins
stacked get_parent_map.
22
from copy import copy
3350.6.1 by Robert Collins
* New ``versionedfile.KeyMapper`` interface to abstract out the access to
23
from cStringIO import StringIO
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
24
import os
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
25
import struct
3350.6.1 by Robert Collins
* New ``versionedfile.KeyMapper`` interface to abstract out the access to
26
from zlib import adler32
27
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
28
from bzrlib.lazy_import import lazy_import
29
lazy_import(globals(), """
3224.5.20 by Andrew Bennetts
Remove or lazyify a couple more imports.
30
import urllib
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
31
32
from bzrlib import (
33
    errors,
3735.32.18 by John Arbash Meinel
We now support generating a network stream.
34
    groupcompress,
3830.3.12 by Martin Pool
Review cleanups: unify has_key impls, add missing_keys(), clean up exception blocks
35
    index,
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
36
    inventory,
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
37
    inventory_delta,
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
38
    knit,
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
39
    osutils,
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
40
    multiparent,
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
41
    tsort,
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
42
    revision,
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
43
    ui,
44
    )
4379.3.3 by Gary van der Merwe
Rename and add doc string for StackedParentsProvider.
45
from bzrlib.graph import DictParentsProvider, Graph, StackedParentsProvider
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
46
from bzrlib.transport.memory import MemoryTransport
47
""")
1563.2.12 by Robert Collins
Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.
48
from bzrlib.inter import InterObject
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
49
from bzrlib.registry import Registry
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
50
from bzrlib.symbol_versioning import *
1551.6.7 by Aaron Bentley
Implemented two-way merge, refactored weave merge
51
from bzrlib.textmerge import TextMerge
2694.5.4 by Jelmer Vernooij
Move bzrlib.util.bencode to bzrlib._bencode_py.
52
from bzrlib import bencode
1563.2.11 by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis.
53
54
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
55
adapter_registry = Registry()
56
adapter_registry.register_lazy(('knit-delta-gz', 'fulltext'), 'bzrlib.knit',
57
    'DeltaPlainToFullText')
58
adapter_registry.register_lazy(('knit-ft-gz', 'fulltext'), 'bzrlib.knit',
59
    'FTPlainToFullText')
60
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'knit-delta-gz'),
61
    'bzrlib.knit', 'DeltaAnnotatedToUnannotated')
62
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'fulltext'),
63
    'bzrlib.knit', 'DeltaAnnotatedToFullText')
64
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'knit-ft-gz'),
65
    'bzrlib.knit', 'FTAnnotatedToUnannotated')
66
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'fulltext'),
67
    'bzrlib.knit', 'FTAnnotatedToFullText')
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
68
# adapter_registry.register_lazy(('knit-annotated-ft-gz', 'chunked'),
69
#     'bzrlib.knit', 'FTAnnotatedToChunked')
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
70
71
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
72
class ContentFactory(object):
73
    """Abstract interface for insertion and retrieval from a VersionedFile.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
74
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
75
    :ivar sha1: None, or the sha1 of the content fulltext.
76
    :ivar storage_kind: The native storage kind of this factory. One of
77
        'mpdiff', 'knit-annotated-ft', 'knit-annotated-delta', 'knit-ft',
78
        'knit-delta', 'fulltext', 'knit-annotated-ft-gz',
79
        'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'.
80
    :ivar key: The key of this content. Each key is a tuple with a single
81
        string in it.
82
    :ivar parents: A tuple of parent keys for self.key. If the object has
83
        no parent information, None (as opposed to () for an empty list of
84
        parents).
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
85
    """
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
86
87
    def __init__(self):
88
        """Create a ContentFactory."""
89
        self.sha1 = None
90
        self.storage_kind = None
91
        self.key = None
92
        self.parents = None
93
94
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
95
class ChunkedContentFactory(ContentFactory):
96
    """Static data content factory.
97
98
    This takes a 'chunked' list of strings. The only requirement on 'chunked' is
99
    that ''.join(lines) becomes a valid fulltext. A tuple of a single string
100
    satisfies this, as does a list of lines.
101
102
    :ivar sha1: None, or the sha1 of the content fulltext.
103
    :ivar storage_kind: The native storage kind of this factory. Always
3890.2.2 by John Arbash Meinel
Change the signature to report the storage kind as 'chunked'
104
        'chunked'
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
105
    :ivar key: The key of this content. Each key is a tuple with a single
106
        string in it.
107
    :ivar parents: A tuple of parent keys for self.key. If the object has
108
        no parent information, None (as opposed to () for an empty list of
109
        parents).
110
     """
111
112
    def __init__(self, key, parents, sha1, chunks):
113
        """Create a ContentFactory."""
114
        self.sha1 = sha1
3890.2.2 by John Arbash Meinel
Change the signature to report the storage kind as 'chunked'
115
        self.storage_kind = 'chunked'
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
116
        self.key = key
117
        self.parents = parents
118
        self._chunks = chunks
119
120
    def get_bytes_as(self, storage_kind):
121
        if storage_kind == 'chunked':
122
            return self._chunks
123
        elif storage_kind == 'fulltext':
124
            return ''.join(self._chunks)
125
        raise errors.UnavailableRepresentation(self.key, storage_kind,
126
            self.storage_kind)
127
128
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
129
class FulltextContentFactory(ContentFactory):
130
    """Static data content factory.
131
132
    This takes a fulltext when created and just returns that during
133
    get_bytes_as('fulltext').
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
134
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
135
    :ivar sha1: None, or the sha1 of the content fulltext.
136
    :ivar storage_kind: The native storage kind of this factory. Always
137
        'fulltext'.
138
    :ivar key: The key of this content. Each key is a tuple with a single
139
        string in it.
140
    :ivar parents: A tuple of parent keys for self.key. If the object has
141
        no parent information, None (as opposed to () for an empty list of
142
        parents).
143
     """
144
145
    def __init__(self, key, parents, sha1, text):
146
        """Create a ContentFactory."""
147
        self.sha1 = sha1
148
        self.storage_kind = 'fulltext'
149
        self.key = key
150
        self.parents = parents
151
        self._text = text
152
153
    def get_bytes_as(self, storage_kind):
154
        if storage_kind == self.storage_kind:
155
            return self._text
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
156
        elif storage_kind == 'chunked':
3976.2.1 by Robert Collins
Use a list not a tuple for chunks returned from FullTextContentFactory objects, because otherwise code tries to assign to tuples.
157
            return [self._text]
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
158
        raise errors.UnavailableRepresentation(self.key, storage_kind,
159
            self.storage_kind)
160
161
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
162
class InventoryDeltaContentFactory(ContentFactory):
163
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
164
    def __init__(self, key, parents, sha1, delta, basis_id, format_flags,
165
            repo=None):
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
166
        self.sha1 = sha1
167
        self.storage_kind = 'inventory-delta'
168
        self.key = key
169
        self.parents = parents
170
        self._delta = delta
171
        self._basis_id = basis_id
172
        self._format_flags = format_flags
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
173
        self._repo = repo
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
174
175
    def get_bytes_as(self, storage_kind):
176
        if storage_kind == self.storage_kind:
4476.3.2 by Andrew Bennetts
Make it possible for a StreamSink for a rich-root/tree-refs repo format to consume inventories without those features.
177
            return self._basis_id, self.key, self._delta, self._format_flags
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
178
        elif storage_kind == 'inventory-delta-bytes':
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
179
            serializer = inventory_delta.InventoryDeltaSerializer()
180
            serializer.require_flags(*self._format_flags)
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
181
            return ''.join(serializer.delta_to_lines(
182
                self._basis_id, self.key, self._delta))
183
        elif storage_kind == 'inventory-delta-bytes-from-null':
184
            if self._repo is None:
185
                raise errors.UnavailableRepresentation(self.key, storage_kind,
186
                    self.storage_kind)
187
            null_inv = inventory.Inventory(None)
188
            my_inv = self._repo.get_inventory(self.key) # XXX: key[0] ???
189
            delta = my_inv._make_delta(null_inv)
190
            serializer.require_flags(*self._format_flags)
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
191
            return serializer.delta_to_lines(
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
192
                revision.NULL_REVISION, self.key, delta)
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
193
        raise errors.UnavailableRepresentation(self.key, storage_kind,
194
            self.storage_kind)
195
196
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
197
class AbsentContentFactory(ContentFactory):
3350.3.12 by Robert Collins
Generate streams with absent records.
198
    """A placeholder content factory for unavailable texts.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
199
3350.3.12 by Robert Collins
Generate streams with absent records.
200
    :ivar sha1: None.
201
    :ivar storage_kind: 'absent'.
202
    :ivar key: The key of this content. Each key is a tuple with a single
203
        string in it.
204
    :ivar parents: None.
205
    """
206
207
    def __init__(self, key):
208
        """Create a ContentFactory."""
209
        self.sha1 = None
210
        self.storage_kind = 'absent'
211
        self.key = key
212
        self.parents = None
213
214
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
215
class AdapterFactory(ContentFactory):
216
    """A content factory to adapt between key prefix's."""
217
218
    def __init__(self, key, parents, adapted):
219
        """Create an adapter factory instance."""
220
        self.key = key
221
        self.parents = parents
222
        self._adapted = adapted
223
224
    def __getattr__(self, attr):
225
        """Return a member from the adapted object."""
226
        if attr in ('key', 'parents'):
227
            return self.__dict__[attr]
228
        else:
229
            return getattr(self._adapted, attr)
230
231
3350.3.14 by Robert Collins
Deprecate VersionedFile.join.
232
def filter_absent(record_stream):
233
    """Adapt a record stream to remove absent records."""
234
    for record in record_stream:
235
        if record.storage_kind != 'absent':
236
            yield record
237
238
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
239
class VersionedFile(object):
240
    """Versioned text file storage.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
241
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
242
    A versioned file manages versions of line-based text files,
243
    keeping track of the originating version for each line.
244
245
    To clients the "lines" of the file are represented as a list of
246
    strings. These strings will typically have terminal newline
247
    characters, but this is not required.  In particular files commonly
248
    do not have a newline at the end of the file.
249
250
    Texts are identified by a version-id string.
251
    """
252
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
253
    @staticmethod
2229.2.3 by Aaron Bentley
change reserved_id to is_reserved_id, add check_not_reserved for DRY
254
    def check_not_reserved_id(version_id):
255
        revision.check_not_reserved_id(version_id)
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
256
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
257
    def copy_to(self, name, transport):
258
        """Copy this versioned file to name on transport."""
259
        raise NotImplementedError(self.copy_to)
1863.1.1 by John Arbash Meinel
Allow Versioned files to do caching if explicitly asked, and implement for Knit
260
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
261
    def get_record_stream(self, versions, ordering, include_delta_closure):
262
        """Get a stream of records for versions.
263
264
        :param versions: The versions to include. Each version is a tuple
265
            (version,).
266
        :param ordering: Either 'unordered' or 'topological'. A topologically
267
            sorted stream has compression parents strictly before their
268
            children.
269
        :param include_delta_closure: If True then the closure across any
3350.3.22 by Robert Collins
Review feedback.
270
            compression parents will be included (in the data content of the
271
            stream, not in the emitted records). This guarantees that
272
            'fulltext' can be used successfully on every record.
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
273
        :return: An iterator of ContentFactory objects, each of which is only
274
            valid until the iterator is advanced.
275
        """
276
        raise NotImplementedError(self.get_record_stream)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
277
278
    def has_version(self, version_id):
279
        """Returns whether version is present."""
280
        raise NotImplementedError(self.has_version)
281
3350.3.8 by Robert Collins
Basic stream insertion, no fast path yet for knit to knit.
282
    def insert_record_stream(self, stream):
283
        """Insert a record stream into this versioned file.
284
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
285
        :param stream: A stream of records to insert.
3350.3.8 by Robert Collins
Basic stream insertion, no fast path yet for knit to knit.
286
        :return: None
287
        :seealso VersionedFile.get_record_stream:
288
        """
289
        raise NotImplementedError
290
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
291
    def add_lines(self, version_id, parents, lines, parent_texts=None,
2805.6.7 by Robert Collins
Review feedback.
292
        left_matching_blocks=None, nostore_sha=None, random_id=False,
293
        check_content=True):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
294
        """Add a single text on top of the versioned file.
295
296
        Must raise RevisionAlreadyPresent if the new version is
297
        already present in file history.
298
299
        Must raise RevisionNotPresent if any of the given parents are
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
300
        not present in file history.
2805.6.3 by Robert Collins
* The ``VersionedFile`` interface no longer protects against misuse when
301
302
        :param lines: A list of lines. Each line must be a bytestring. And all
303
            of them except the last must be terminated with \n and contain no
304
            other \n's. The last line may either contain no \n's or a single
305
            terminated \n. If the lines list does meet this constraint the add
306
            routine may error or may succeed - but you will be unable to read
307
            the data back accurately. (Checking the lines have been split
2805.6.7 by Robert Collins
Review feedback.
308
            correctly is expensive and extremely unlikely to catch bugs so it
309
            is not done at runtime unless check_content is True.)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
310
        :param parent_texts: An optional dictionary containing the opaque
2805.6.3 by Robert Collins
* The ``VersionedFile`` interface no longer protects against misuse when
311
            representations of some or all of the parents of version_id to
312
            allow delta optimisations.  VERY IMPORTANT: the texts must be those
313
            returned by add_lines or data corruption can be caused.
2520.4.148 by Aaron Bentley
Updates from review
314
        :param left_matching_blocks: a hint about which areas are common
315
            between the text and its left-hand-parent.  The format is
316
            the SequenceMatcher.get_matching_blocks format.
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
317
        :param nostore_sha: Raise ExistingContent and do not add the lines to
318
            the versioned file if the digest of the lines matches this.
2805.6.4 by Robert Collins
Don't check for existing versions when adding texts with random revision ids.
319
        :param random_id: If True a random id has been selected rather than
320
            an id determined by some deterministic process such as a converter
321
            from a foreign VCS. When True the backend may choose not to check
322
            for uniqueness of the resulting key within the versioned file, so
323
            this should only be done when the result is expected to be unique
324
            anyway.
2805.6.7 by Robert Collins
Review feedback.
325
        :param check_content: If True, the lines supplied are verified to be
326
            bytestrings that are correctly formed lines.
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
327
        :return: The text sha1, the number of bytes in the text, and an opaque
328
                 representation of the inserted version which can be provided
329
                 back to future add_lines calls in the parent_texts dictionary.
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
330
        """
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
331
        self._check_write_ok()
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
332
        return self._add_lines(version_id, parents, lines, parent_texts,
2805.6.7 by Robert Collins
Review feedback.
333
            left_matching_blocks, nostore_sha, random_id, check_content)
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
334
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
335
    def _add_lines(self, version_id, parents, lines, parent_texts,
2805.6.7 by Robert Collins
Review feedback.
336
        left_matching_blocks, nostore_sha, random_id, check_content):
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
337
        """Helper to do the class specific add_lines."""
1563.2.4 by Robert Collins
First cut at including the knit implementation of versioned_file.
338
        raise NotImplementedError(self.add_lines)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
339
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
340
    def add_lines_with_ghosts(self, version_id, parents, lines,
2805.6.7 by Robert Collins
Review feedback.
341
        parent_texts=None, nostore_sha=None, random_id=False,
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
342
        check_content=True, left_matching_blocks=None):
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
343
        """Add lines to the versioned file, allowing ghosts to be present.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
344
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
345
        This takes the same parameters as add_lines and returns the same.
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
346
        """
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
347
        self._check_write_ok()
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
348
        return self._add_lines_with_ghosts(version_id, parents, lines,
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
349
            parent_texts, nostore_sha, random_id, check_content, left_matching_blocks)
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
350
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
351
    def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts,
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
352
        nostore_sha, random_id, check_content, left_matching_blocks):
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
353
        """Helper to do class specific add_lines_with_ghosts."""
1594.2.8 by Robert Collins
add ghost aware apis to knits.
354
        raise NotImplementedError(self.add_lines_with_ghosts)
355
1563.2.19 by Robert Collins
stub out a check for knits.
356
    def check(self, progress_bar=None):
357
        """Check the versioned file for integrity."""
358
        raise NotImplementedError(self.check)
359
1666.1.6 by Robert Collins
Make knit the default format.
360
    def _check_lines_not_unicode(self, lines):
361
        """Check that lines being added to a versioned file are not unicode."""
362
        for line in lines:
363
            if line.__class__ is not str:
364
                raise errors.BzrBadParameterUnicode("lines")
365
366
    def _check_lines_are_lines(self, lines):
367
        """Check that the lines really are full lines without inline EOL."""
368
        for line in lines:
369
            if '\n' in line[:-1]:
370
                raise errors.BzrBadParameterContainsNewline("lines")
371
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
372
    def get_format_signature(self):
373
        """Get a text description of the data encoding in this file.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
374
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
375
        :since: 0.90
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
376
        """
377
        raise NotImplementedError(self.get_format_signature)
378
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
379
    def make_mpdiffs(self, version_ids):
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
380
        """Create multiparent diffs for specified versions."""
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
381
        knit_versions = set()
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
382
        knit_versions.update(version_ids)
383
        parent_map = self.get_parent_map(version_ids)
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
384
        for version_id in version_ids:
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
385
            try:
386
                knit_versions.update(parent_map[version_id])
387
            except KeyError:
3453.3.1 by Daniel Fischer
Raise the right exception in make_mpdiffs (bug #235687)
388
                raise errors.RevisionNotPresent(version_id, self)
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
389
        # We need to filter out ghosts, because we can't diff against them.
390
        knit_versions = set(self.get_parent_map(knit_versions).keys())
2520.4.90 by Aaron Bentley
Handle \r terminated lines in Weaves properly
391
        lines = dict(zip(knit_versions,
392
            self._get_lf_split_line_list(knit_versions)))
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
393
        diffs = []
394
        for version_id in version_ids:
395
            target = lines[version_id]
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
396
            try:
397
                parents = [lines[p] for p in parent_map[version_id] if p in
398
                    knit_versions]
399
            except KeyError:
3453.3.2 by John Arbash Meinel
Add a test case for the first loop, unable to find a way to trigger the second loop
400
                # I don't know how this could ever trigger.
401
                # parent_map[version_id] was already triggered in the previous
402
                # for loop, and lines[p] has the 'if p in knit_versions' check,
403
                # so we again won't have a KeyError.
3453.3.1 by Daniel Fischer
Raise the right exception in make_mpdiffs (bug #235687)
404
                raise errors.RevisionNotPresent(version_id, self)
2520.4.48 by Aaron Bentley
Support getting blocks from knit deltas with no final EOL
405
            if len(parents) > 0:
406
                left_parent_blocks = self._extract_blocks(version_id,
407
                                                          parents[0], target)
408
            else:
409
                left_parent_blocks = None
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
410
            diffs.append(multiparent.MultiParent.from_lines(target, parents,
411
                         left_parent_blocks))
412
        return diffs
413
2520.4.48 by Aaron Bentley
Support getting blocks from knit deltas with no final EOL
414
    def _extract_blocks(self, version_id, source, target):
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
415
        return None
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
416
2520.4.61 by Aaron Bentley
Do bulk insertion of records
417
    def add_mpdiffs(self, records):
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
418
        """Add mpdiffs to this VersionedFile.
2520.4.126 by Aaron Bentley
Add more docs
419
420
        Records should be iterables of version, parents, expected_sha1,
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
421
        mpdiff. mpdiff should be a MultiParent instance.
2520.4.126 by Aaron Bentley
Add more docs
422
        """
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
423
        # Does this need to call self._check_write_ok()? (IanC 20070919)
2520.4.61 by Aaron Bentley
Do bulk insertion of records
424
        vf_parents = {}
2520.4.141 by Aaron Bentley
More batch operations adding mpdiffs
425
        mpvf = multiparent.MultiMemoryVersionedFile()
426
        versions = []
427
        for version, parent_ids, expected_sha1, mpdiff in records:
428
            versions.append(version)
429
            mpvf.add_diff(mpdiff, version, parent_ids)
430
        needed_parents = set()
2520.4.142 by Aaron Bentley
Clean up installation of inventory records
431
        for version, parent_ids, expected_sha1, mpdiff in records:
2520.4.141 by Aaron Bentley
More batch operations adding mpdiffs
432
            needed_parents.update(p for p in parent_ids
433
                                  if not mpvf.has_version(p))
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
434
        present_parents = set(self.get_parent_map(needed_parents).keys())
435
        for parent_id, lines in zip(present_parents,
436
                                 self._get_lf_split_line_list(present_parents)):
2520.4.141 by Aaron Bentley
More batch operations adding mpdiffs
437
            mpvf.add_version(lines, parent_id, [])
438
        for (version, parent_ids, expected_sha1, mpdiff), lines in\
439
            zip(records, mpvf.get_line_list(versions)):
440
            if len(parent_ids) == 1:
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
441
                left_matching_blocks = list(mpdiff.get_matching_blocks(0,
2520.4.141 by Aaron Bentley
More batch operations adding mpdiffs
442
                    mpvf.get_diff(parent_ids[0]).num_lines()))
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
443
            else:
444
                left_matching_blocks = None
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
445
            try:
446
                _, _, version_text = self.add_lines_with_ghosts(version,
447
                    parent_ids, lines, vf_parents,
448
                    left_matching_blocks=left_matching_blocks)
449
            except NotImplementedError:
450
                # The vf can't handle ghosts, so add lines normally, which will
451
                # (reasonably) fail if there are ghosts in the data.
452
                _, _, version_text = self.add_lines(version,
453
                    parent_ids, lines, vf_parents,
454
                    left_matching_blocks=left_matching_blocks)
2520.4.61 by Aaron Bentley
Do bulk insertion of records
455
            vf_parents[version] = version_text
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
456
        sha1s = self.get_sha1s(versions)
457
        for version, parent_ids, expected_sha1, mpdiff in records:
458
            if expected_sha1 != sha1s[version]:
2520.4.71 by Aaron Bentley
Update test to accept VersionedFileInvalidChecksum instead of TestamentMismatch
459
                raise errors.VersionedFileInvalidChecksum(version)
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
460
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
461
    def get_text(self, version_id):
462
        """Return version contents as a text string.
463
464
        Raises RevisionNotPresent if version is not present in
465
        file history.
466
        """
467
        return ''.join(self.get_lines(version_id))
468
    get_string = get_text
469
1756.2.1 by Aaron Bentley
Implement get_texts
470
    def get_texts(self, version_ids):
471
        """Return the texts of listed versions as a list of strings.
472
473
        Raises RevisionNotPresent if version is not present in
474
        file history.
475
        """
476
        return [''.join(self.get_lines(v)) for v in version_ids]
477
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
478
    def get_lines(self, version_id):
479
        """Return version contents as a sequence of lines.
480
481
        Raises RevisionNotPresent if version is not present in
482
        file history.
483
        """
484
        raise NotImplementedError(self.get_lines)
485
2520.4.90 by Aaron Bentley
Handle \r terminated lines in Weaves properly
486
    def _get_lf_split_line_list(self, version_ids):
487
        return [StringIO(t).readlines() for t in self.get_texts(version_ids)]
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
488
2530.1.1 by Aaron Bentley
Make topological sorting optional for get_ancestry
489
    def get_ancestry(self, version_ids, topo_sorted=True):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
490
        """Return a list of all ancestors of given version(s). This
491
        will not include the null revision.
492
2490.2.32 by Aaron Bentley
Merge of not-sorting-ancestry branch
493
        This list will not be topologically sorted if topo_sorted=False is
494
        passed.
2530.1.1 by Aaron Bentley
Make topological sorting optional for get_ancestry
495
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
496
        Must raise RevisionNotPresent if any of the given versions are
497
        not present in file history."""
498
        if isinstance(version_ids, basestring):
499
            version_ids = [version_ids]
500
        raise NotImplementedError(self.get_ancestry)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
501
1594.2.8 by Robert Collins
add ghost aware apis to knits.
502
    def get_ancestry_with_ghosts(self, version_ids):
503
        """Return a list of all ancestors of given version(s). This
504
        will not include the null revision.
505
506
        Must raise RevisionNotPresent if any of the given versions are
507
        not present in file history.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
508
1594.2.8 by Robert Collins
add ghost aware apis to knits.
509
        Ghosts that are known about will be included in ancestry list,
510
        but are not explicitly marked.
511
        """
512
        raise NotImplementedError(self.get_ancestry_with_ghosts)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
513
3287.5.1 by Robert Collins
Add VersionedFile.get_parent_map.
514
    def get_parent_map(self, version_ids):
515
        """Get a map of the parents of version_ids.
516
517
        :param version_ids: The version ids to look up parents for.
518
        :return: A mapping from version id to parents.
519
        """
520
        raise NotImplementedError(self.get_parent_map)
521
1594.2.8 by Robert Collins
add ghost aware apis to knits.
522
    def get_parents_with_ghosts(self, version_id):
523
        """Return version names for parents of version_id.
524
525
        Will raise RevisionNotPresent if version_id is not present
526
        in the history.
527
528
        Ghosts that are known about will be included in the parent list,
529
        but are not explicitly marked.
530
        """
3287.5.1 by Robert Collins
Add VersionedFile.get_parent_map.
531
        try:
532
            return list(self.get_parent_map([version_id])[version_id])
533
        except KeyError:
534
            raise errors.RevisionNotPresent(version_id, self)
1594.2.8 by Robert Collins
add ghost aware apis to knits.
535
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
536
    def annotate(self, version_id):
3316.2.13 by Robert Collins
* ``VersionedFile.annotate_iter`` is deprecated. While in principal this
537
        """Return a list of (version-id, line) tuples for version_id.
538
539
        :raise RevisionNotPresent: If the given version is
540
        not present in file history.
541
        """
542
        raise NotImplementedError(self.annotate)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
543
2975.3.2 by Robert Collins
Review feedback - document the API change and improve readability in pack's _do_copy_nodes.
544
    def iter_lines_added_or_present_in_versions(self, version_ids=None,
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
545
                                                pb=None):
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
546
        """Iterate over the lines in the versioned file from version_ids.
547
2975.3.2 by Robert Collins
Review feedback - document the API change and improve readability in pack's _do_copy_nodes.
548
        This may return lines from other versions. Each item the returned
549
        iterator yields is a tuple of a line and a text version that that line
550
        is present in (not introduced in).
551
552
        Ordering of results is in whatever order is most suitable for the
553
        underlying storage format.
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
554
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
555
        If a progress bar is supplied, it may be used to indicate progress.
556
        The caller is responsible for cleaning up progress bars (because this
557
        is an iterator).
558
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
559
        NOTES: Lines are normalised: they will all have \n terminators.
560
               Lines are returned in arbitrary order.
2975.3.2 by Robert Collins
Review feedback - document the API change and improve readability in pack's _do_copy_nodes.
561
562
        :return: An iterator over (line, version_id).
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
563
        """
564
        raise NotImplementedError(self.iter_lines_added_or_present_in_versions)
565
1551.6.15 by Aaron Bentley
Moved plan_merge into Weave
566
    def plan_merge(self, ver_a, ver_b):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
567
        """Return pseudo-annotation indicating how the two versions merge.
568
569
        This is computed between versions a and b and their common
570
        base.
571
572
        Weave lines present in none of them are skipped entirely.
1664.2.2 by Aaron Bentley
Added legend for plan-merge output
573
574
        Legend:
575
        killed-base Dead in base revision
576
        killed-both Killed in each revision
577
        killed-a    Killed in a
578
        killed-b    Killed in b
579
        unchanged   Alive in both a and b (possibly created in both)
580
        new-a       Created in a
581
        new-b       Created in b
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
582
        ghost-a     Killed in a, unborn in b
1664.2.5 by Aaron Bentley
Update plan-merge legend
583
        ghost-b     Killed in b, unborn in a
1664.2.2 by Aaron Bentley
Added legend for plan-merge output
584
        irrelevant  Not in either revision
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
585
        """
1551.6.15 by Aaron Bentley
Moved plan_merge into Weave
586
        raise NotImplementedError(VersionedFile.plan_merge)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
587
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
588
    def weave_merge(self, plan, a_marker=TextMerge.A_MARKER,
1551.6.14 by Aaron Bentley
Tweaks from merge review
589
                    b_marker=TextMerge.B_MARKER):
1551.6.12 by Aaron Bentley
Indicate conflicts from merge_lines, insead of guessing
590
        return PlanWeaveMerge(plan, a_marker, b_marker).merge_lines()[0]
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
591
1664.2.7 by Aaron Bentley
Merge bzr.dev
592
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
593
class RecordingVersionedFilesDecorator(object):
594
    """A minimal versioned files that records calls made on it.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
595
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
596
    Only enough methods have been added to support tests using it to date.
597
598
    :ivar calls: A list of the calls made; can be reset at any time by
599
        assigning [] to it.
600
    """
601
602
    def __init__(self, backing_vf):
3871.4.1 by John Arbash Meinel
Add a VFDecorator that can yield records in a specified order
603
        """Create a RecordingVersionedFilesDecorator decorating backing_vf.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
604
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
605
        :param backing_vf: The versioned file to answer all methods.
606
        """
607
        self._backing_vf = backing_vf
608
        self.calls = []
609
3350.8.2 by Robert Collins
stacked get_parent_map.
610
    def add_lines(self, key, parents, lines, parent_texts=None,
611
        left_matching_blocks=None, nostore_sha=None, random_id=False,
612
        check_content=True):
613
        self.calls.append(("add_lines", key, parents, lines, parent_texts,
614
            left_matching_blocks, nostore_sha, random_id, check_content))
615
        return self._backing_vf.add_lines(key, parents, lines, parent_texts,
616
            left_matching_blocks, nostore_sha, random_id, check_content)
617
3517.4.19 by Martin Pool
Update test for knit.check() to expect it to recurse into fallback vfs
618
    def check(self):
619
        self._backing_vf.check()
620
3350.8.2 by Robert Collins
stacked get_parent_map.
621
    def get_parent_map(self, keys):
622
        self.calls.append(("get_parent_map", copy(keys)))
623
        return self._backing_vf.get_parent_map(keys)
624
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
625
    def get_record_stream(self, keys, sort_order, include_delta_closure):
3350.8.7 by Robert Collins
get_record_stream for fulltexts working (but note extreme memory use!).
626
        self.calls.append(("get_record_stream", list(keys), sort_order,
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
627
            include_delta_closure))
628
        return self._backing_vf.get_record_stream(keys, sort_order,
629
            include_delta_closure)
630
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
631
    def get_sha1s(self, keys):
632
        self.calls.append(("get_sha1s", copy(keys)))
633
        return self._backing_vf.get_sha1s(keys)
634
3350.8.5 by Robert Collins
Iter_lines_added_or_present_in_keys stacks.
635
    def iter_lines_added_or_present_in_keys(self, keys, pb=None):
636
        self.calls.append(("iter_lines_added_or_present_in_keys", copy(keys)))
3350.8.14 by Robert Collins
Review feedback.
637
        return self._backing_vf.iter_lines_added_or_present_in_keys(keys, pb=pb)
3350.8.5 by Robert Collins
Iter_lines_added_or_present_in_keys stacks.
638
3350.8.4 by Robert Collins
Vf.keys() stacking support.
639
    def keys(self):
640
        self.calls.append(("keys",))
641
        return self._backing_vf.keys()
642
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
643
3871.4.1 by John Arbash Meinel
Add a VFDecorator that can yield records in a specified order
644
class OrderingVersionedFilesDecorator(RecordingVersionedFilesDecorator):
645
    """A VF that records calls, and returns keys in specific order.
646
647
    :ivar calls: A list of the calls made; can be reset at any time by
648
        assigning [] to it.
649
    """
650
651
    def __init__(self, backing_vf, key_priority):
652
        """Create a RecordingVersionedFilesDecorator decorating backing_vf.
653
654
        :param backing_vf: The versioned file to answer all methods.
655
        :param key_priority: A dictionary defining what order keys should be
656
            returned from an 'unordered' get_record_stream request.
657
            Keys with lower priority are returned first, keys not present in
658
            the map get an implicit priority of 0, and are returned in
659
            lexicographical order.
660
        """
661
        RecordingVersionedFilesDecorator.__init__(self, backing_vf)
662
        self._key_priority = key_priority
663
664
    def get_record_stream(self, keys, sort_order, include_delta_closure):
665
        self.calls.append(("get_record_stream", list(keys), sort_order,
666
            include_delta_closure))
667
        if sort_order == 'unordered':
668
            def sort_key(key):
669
                return (self._key_priority.get(key, 0), key)
670
            # Use a defined order by asking for the keys one-by-one from the
671
            # backing_vf
672
            for key in sorted(keys, key=sort_key):
673
                for record in self._backing_vf.get_record_stream([key],
674
                                'unordered', include_delta_closure):
675
                    yield record
676
        else:
677
            for record in self._backing_vf.get_record_stream(keys, sort_order,
678
                            include_delta_closure):
679
                yield record
680
681
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
682
class KeyMapper(object):
3350.6.10 by Martin Pool
VersionedFiles review cleanups
683
    """KeyMappers map between keys and underlying partitioned storage."""
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
684
685
    def map(self, key):
686
        """Map key to an underlying storage identifier.
687
688
        :param key: A key tuple e.g. ('file-id', 'revision-id').
689
        :return: An underlying storage identifier, specific to the partitioning
690
            mechanism.
691
        """
692
        raise NotImplementedError(self.map)
693
694
    def unmap(self, partition_id):
695
        """Map a partitioned storage id back to a key prefix.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
696
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
697
        :param partition_id: The underlying partition id.
3350.6.10 by Martin Pool
VersionedFiles review cleanups
698
        :return: As much of a key (or prefix) as is derivable from the partition
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
699
            id.
700
        """
701
        raise NotImplementedError(self.unmap)
702
703
704
class ConstantMapper(KeyMapper):
705
    """A key mapper that maps to a constant result."""
706
707
    def __init__(self, result):
708
        """Create a ConstantMapper which will return result for all maps."""
709
        self._result = result
710
711
    def map(self, key):
712
        """See KeyMapper.map()."""
713
        return self._result
714
715
716
class URLEscapeMapper(KeyMapper):
717
    """Base class for use with transport backed storage.
718
719
    This provides a map and unmap wrapper that respectively url escape and
720
    unescape their outputs and inputs.
721
    """
722
723
    def map(self, key):
724
        """See KeyMapper.map()."""
725
        return urllib.quote(self._map(key))
726
727
    def unmap(self, partition_id):
728
        """See KeyMapper.unmap()."""
729
        return self._unmap(urllib.unquote(partition_id))
730
731
732
class PrefixMapper(URLEscapeMapper):
733
    """A key mapper that extracts the first component of a key.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
734
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
735
    This mapper is for use with a transport based backend.
736
    """
737
738
    def _map(self, key):
739
        """See KeyMapper.map()."""
740
        return key[0]
741
742
    def _unmap(self, partition_id):
743
        """See KeyMapper.unmap()."""
744
        return (partition_id,)
745
746
747
class HashPrefixMapper(URLEscapeMapper):
748
    """A key mapper that combines the first component of a key with a hash.
749
750
    This mapper is for use with a transport based backend.
751
    """
752
753
    def _map(self, key):
754
        """See KeyMapper.map()."""
755
        prefix = self._escape(key[0])
756
        return "%02x/%s" % (adler32(prefix) & 0xff, prefix)
757
758
    def _escape(self, prefix):
759
        """No escaping needed here."""
760
        return prefix
761
762
    def _unmap(self, partition_id):
763
        """See KeyMapper.unmap()."""
764
        return (self._unescape(osutils.basename(partition_id)),)
765
766
    def _unescape(self, basename):
767
        """No unescaping needed for HashPrefixMapper."""
768
        return basename
769
770
771
class HashEscapedPrefixMapper(HashPrefixMapper):
772
    """Combines the escaped first component of a key with a hash.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
773
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
774
    This mapper is for use with a transport based backend.
775
    """
776
777
    _safe = "abcdefghijklmnopqrstuvwxyz0123456789-_@,."
778
779
    def _escape(self, prefix):
780
        """Turn a key element into a filesystem safe string.
781
782
        This is similar to a plain urllib.quote, except
783
        it uses specific safe characters, so that it doesn't
784
        have to translate a lot of valid file ids.
785
        """
786
        # @ does not get escaped. This is because it is a valid
787
        # filesystem character we use all the time, and it looks
788
        # a lot better than seeing %40 all the time.
789
        r = [((c in self._safe) and c or ('%%%02x' % ord(c)))
790
             for c in prefix]
791
        return ''.join(r)
792
793
    def _unescape(self, basename):
794
        """Escaped names are easily unescaped by urlutils."""
795
        return urllib.unquote(basename)
796
797
798
def make_versioned_files_factory(versioned_file_factory, mapper):
799
    """Create a ThunkedVersionedFiles factory.
800
801
    This will create a callable which when called creates a
802
    ThunkedVersionedFiles on a transport, using mapper to access individual
803
    versioned files, and versioned_file_factory to create each individual file.
804
    """
805
    def factory(transport):
806
        return ThunkedVersionedFiles(transport, versioned_file_factory, mapper,
807
            lambda:True)
808
    return factory
809
810
811
class VersionedFiles(object):
812
    """Storage for many versioned files.
813
814
    This object allows a single keyspace for accessing the history graph and
815
    contents of named bytestrings.
816
817
    Currently no implementation allows the graph of different key prefixes to
818
    intersect, but the API does allow such implementations in the future.
3350.6.7 by Robert Collins
Review feedback, making things more clear, adding documentation on what is used where.
819
820
    The keyspace is expressed via simple tuples. Any instance of VersionedFiles
821
    may have a different length key-size, but that size will be constant for
822
    all texts added to or retrieved from it. For instance, bzrlib uses
823
    instances with a key-size of 2 for storing user files in a repository, with
824
    the first element the fileid, and the second the version of that file.
825
826
    The use of tuples allows a single code base to support several different
827
    uses with only the mapping logic changing from instance to instance.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
828
    """
829
830
    def add_lines(self, key, parents, lines, parent_texts=None,
831
        left_matching_blocks=None, nostore_sha=None, random_id=False,
832
        check_content=True):
833
        """Add a text to the store.
834
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
835
        :param key: The key tuple of the text to add. If the last element is
836
            None, a CHK string will be generated during the addition.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
837
        :param parents: The parents key tuples of the text to add.
838
        :param lines: A list of lines. Each line must be a bytestring. And all
839
            of them except the last must be terminated with \n and contain no
840
            other \n's. The last line may either contain no \n's or a single
841
            terminating \n. If the lines list does meet this constraint the add
842
            routine may error or may succeed - but you will be unable to read
843
            the data back accurately. (Checking the lines have been split
844
            correctly is expensive and extremely unlikely to catch bugs so it
845
            is not done at runtime unless check_content is True.)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
846
        :param parent_texts: An optional dictionary containing the opaque
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
847
            representations of some or all of the parents of version_id to
848
            allow delta optimisations.  VERY IMPORTANT: the texts must be those
849
            returned by add_lines or data corruption can be caused.
850
        :param left_matching_blocks: a hint about which areas are common
851
            between the text and its left-hand-parent.  The format is
852
            the SequenceMatcher.get_matching_blocks format.
853
        :param nostore_sha: Raise ExistingContent and do not add the lines to
854
            the versioned file if the digest of the lines matches this.
855
        :param random_id: If True a random id has been selected rather than
856
            an id determined by some deterministic process such as a converter
857
            from a foreign VCS. When True the backend may choose not to check
858
            for uniqueness of the resulting key within the versioned file, so
859
            this should only be done when the result is expected to be unique
860
            anyway.
861
        :param check_content: If True, the lines supplied are verified to be
862
            bytestrings that are correctly formed lines.
863
        :return: The text sha1, the number of bytes in the text, and an opaque
864
                 representation of the inserted version which can be provided
865
                 back to future add_lines calls in the parent_texts dictionary.
866
        """
867
        raise NotImplementedError(self.add_lines)
868
4398.8.6 by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'.
869
    def _add_text(self, key, parents, text, nostore_sha=None, random_id=False):
870
        """Add a text to the store.
871
872
        This is a private function for use by CommitBuilder.
873
874
        :param key: The key tuple of the text to add. If the last element is
875
            None, a CHK string will be generated during the addition.
876
        :param parents: The parents key tuples of the text to add.
877
        :param text: A string containing the text to be committed.
878
        :param nostore_sha: Raise ExistingContent and do not add the lines to
879
            the versioned file if the digest of the lines matches this.
880
        :param random_id: If True a random id has been selected rather than
881
            an id determined by some deterministic process such as a converter
882
            from a foreign VCS. When True the backend may choose not to check
883
            for uniqueness of the resulting key within the versioned file, so
884
            this should only be done when the result is expected to be unique
885
            anyway.
886
        :param check_content: If True, the lines supplied are verified to be
887
            bytestrings that are correctly formed lines.
888
        :return: The text sha1, the number of bytes in the text, and an opaque
889
                 representation of the inserted version which can be provided
890
                 back to future _add_text calls in the parent_texts dictionary.
891
        """
892
        # The default implementation just thunks over to .add_lines(),
893
        # inefficient, but it works.
4398.8.1 by John Arbash Meinel
Add a VersionedFile.add_text() api.
894
        return self.add_lines(key, parents, osutils.split_lines(text),
895
                              nostore_sha=nostore_sha,
896
                              random_id=random_id,
4398.8.6 by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'.
897
                              check_content=True)
4398.8.1 by John Arbash Meinel
Add a VersionedFile.add_text() api.
898
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
899
    def add_mpdiffs(self, records):
900
        """Add mpdiffs to this VersionedFile.
901
902
        Records should be iterables of version, parents, expected_sha1,
903
        mpdiff. mpdiff should be a MultiParent instance.
904
        """
905
        vf_parents = {}
906
        mpvf = multiparent.MultiMemoryVersionedFile()
907
        versions = []
908
        for version, parent_ids, expected_sha1, mpdiff in records:
909
            versions.append(version)
910
            mpvf.add_diff(mpdiff, version, parent_ids)
911
        needed_parents = set()
912
        for version, parent_ids, expected_sha1, mpdiff in records:
913
            needed_parents.update(p for p in parent_ids
914
                                  if not mpvf.has_version(p))
915
        # It seems likely that adding all the present parents as fulltexts can
916
        # easily exhaust memory.
3890.2.9 by John Arbash Meinel
Start using osutils.chunks_as_lines rather than osutils.split_lines.
917
        chunks_to_lines = osutils.chunks_to_lines
3350.8.11 by Robert Collins
Stacked add_mpdiffs.
918
        for record in self.get_record_stream(needed_parents, 'unordered',
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
919
            True):
3350.8.11 by Robert Collins
Stacked add_mpdiffs.
920
            if record.storage_kind == 'absent':
921
                continue
3890.2.9 by John Arbash Meinel
Start using osutils.chunks_as_lines rather than osutils.split_lines.
922
            mpvf.add_version(chunks_to_lines(record.get_bytes_as('chunked')),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
923
                record.key, [])
924
        for (key, parent_keys, expected_sha1, mpdiff), lines in\
925
            zip(records, mpvf.get_line_list(versions)):
926
            if len(parent_keys) == 1:
927
                left_matching_blocks = list(mpdiff.get_matching_blocks(0,
928
                    mpvf.get_diff(parent_keys[0]).num_lines()))
929
            else:
930
                left_matching_blocks = None
931
            version_sha1, _, version_text = self.add_lines(key,
932
                parent_keys, lines, vf_parents,
933
                left_matching_blocks=left_matching_blocks)
934
            if version_sha1 != expected_sha1:
935
                raise errors.VersionedFileInvalidChecksum(version)
936
            vf_parents[key] = version_text
937
938
    def annotate(self, key):
939
        """Return a list of (version-key, line) tuples for the text of key.
940
941
        :raise RevisionNotPresent: If the key is not present.
942
        """
943
        raise NotImplementedError(self.annotate)
944
945
    def check(self, progress_bar=None):
946
        """Check this object for integrity."""
947
        raise NotImplementedError(self.check)
948
949
    @staticmethod
950
    def check_not_reserved_id(version_id):
951
        revision.check_not_reserved_id(version_id)
952
953
    def _check_lines_not_unicode(self, lines):
954
        """Check that lines being added to a versioned file are not unicode."""
955
        for line in lines:
956
            if line.__class__ is not str:
957
                raise errors.BzrBadParameterUnicode("lines")
958
959
    def _check_lines_are_lines(self, lines):
960
        """Check that the lines really are full lines without inline EOL."""
961
        for line in lines:
962
            if '\n' in line[:-1]:
963
                raise errors.BzrBadParameterContainsNewline("lines")
964
965
    def get_parent_map(self, keys):
966
        """Get a map of the parents of keys.
967
968
        :param keys: The keys to look up parents for.
969
        :return: A mapping from keys to parents. Absent keys are absent from
970
            the mapping.
971
        """
972
        raise NotImplementedError(self.get_parent_map)
973
974
    def get_record_stream(self, keys, ordering, include_delta_closure):
975
        """Get a stream of records for keys.
976
977
        :param keys: The keys to include.
978
        :param ordering: Either 'unordered' or 'topological'. A topologically
979
            sorted stream has compression parents strictly before their
980
            children.
981
        :param include_delta_closure: If True then the closure across any
982
            compression parents will be included (in the opaque data).
983
        :return: An iterator of ContentFactory objects, each of which is only
984
            valid until the iterator is advanced.
985
        """
986
        raise NotImplementedError(self.get_record_stream)
987
988
    def get_sha1s(self, keys):
989
        """Get the sha1's of the texts for the given keys.
990
991
        :param keys: The names of the keys to lookup
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
992
        :return: a dict from key to sha1 digest. Keys of texts which are not
3350.8.14 by Robert Collins
Review feedback.
993
            present in the store are not present in the returned
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
994
            dictionary.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
995
        """
996
        raise NotImplementedError(self.get_sha1s)
997
3830.3.12 by Martin Pool
Review cleanups: unify has_key impls, add missing_keys(), clean up exception blocks
998
    has_key = index._has_key_from_parent_map
999
4009.3.3 by Andrew Bennetts
Add docstrings.
1000
    def get_missing_compression_parent_keys(self):
1001
        """Return an iterable of keys of missing compression parents.
1002
1003
        Check this after calling insert_record_stream to find out if there are
1004
        any missing compression parents.  If there are, the records that
4009.3.12 by Robert Collins
Polish on inserting record streams with missing compression parents.
1005
        depend on them are not able to be inserted safely. The precise
1006
        behaviour depends on the concrete VersionedFiles class in use.
1007
1008
        Classes that do not support this will raise NotImplementedError.
4009.3.3 by Andrew Bennetts
Add docstrings.
1009
        """
1010
        raise NotImplementedError(self.get_missing_compression_parent_keys)
1011
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1012
    def insert_record_stream(self, stream):
1013
        """Insert a record stream into this container.
1014
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1015
        :param stream: A stream of records to insert.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1016
        :return: None
1017
        :seealso VersionedFile.get_record_stream:
1018
        """
1019
        raise NotImplementedError
1020
1021
    def iter_lines_added_or_present_in_keys(self, keys, pb=None):
1022
        """Iterate over the lines in the versioned files from keys.
1023
1024
        This may return lines from other keys. Each item the returned
1025
        iterator yields is a tuple of a line and a text version that that line
1026
        is present in (not introduced in).
1027
1028
        Ordering of results is in whatever order is most suitable for the
1029
        underlying storage format.
1030
1031
        If a progress bar is supplied, it may be used to indicate progress.
1032
        The caller is responsible for cleaning up progress bars (because this
1033
        is an iterator).
1034
1035
        NOTES:
1036
         * Lines are normalised by the underlying store: they will all have \n
1037
           terminators.
1038
         * Lines are returned in arbitrary order.
1039
1040
        :return: An iterator over (line, key).
1041
        """
1042
        raise NotImplementedError(self.iter_lines_added_or_present_in_keys)
1043
1044
    def keys(self):
1045
        """Return a iterable of the keys for all the contained texts."""
1046
        raise NotImplementedError(self.keys)
1047
1048
    def make_mpdiffs(self, keys):
1049
        """Create multiparent diffs for specified keys."""
1050
        keys_order = tuple(keys)
1051
        keys = frozenset(keys)
1052
        knit_keys = set(keys)
1053
        parent_map = self.get_parent_map(keys)
1054
        for parent_keys in parent_map.itervalues():
1055
            if parent_keys:
1056
                knit_keys.update(parent_keys)
1057
        missing_keys = keys - set(parent_map)
1058
        if missing_keys:
3530.3.2 by Robert Collins
Handling frozen set inputs in mpdiff generation when a key is missing
1059
            raise errors.RevisionNotPresent(list(missing_keys)[0], self)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1060
        # We need to filter out ghosts, because we can't diff against them.
1061
        maybe_ghosts = knit_keys - keys
1062
        ghosts = maybe_ghosts - set(self.get_parent_map(maybe_ghosts))
1063
        knit_keys.difference_update(ghosts)
1064
        lines = {}
3890.2.9 by John Arbash Meinel
Start using osutils.chunks_as_lines rather than osutils.split_lines.
1065
        chunks_to_lines = osutils.chunks_to_lines
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1066
        for record in self.get_record_stream(knit_keys, 'topological', True):
3890.2.9 by John Arbash Meinel
Start using osutils.chunks_as_lines rather than osutils.split_lines.
1067
            lines[record.key] = chunks_to_lines(record.get_bytes_as('chunked'))
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1068
            # line_block_dict = {}
1069
            # for parent, blocks in record.extract_line_blocks():
1070
            #   line_blocks[parent] = blocks
1071
            # line_blocks[record.key] = line_block_dict
1072
        diffs = []
1073
        for key in keys_order:
1074
            target = lines[key]
1075
            parents = parent_map[key] or []
1076
            # Note that filtering knit_keys can lead to a parent difference
1077
            # between the creation and the application of the mpdiff.
1078
            parent_lines = [lines[p] for p in parents if p in knit_keys]
1079
            if len(parent_lines) > 0:
1080
                left_parent_blocks = self._extract_blocks(key, parent_lines[0],
1081
                    target)
1082
            else:
1083
                left_parent_blocks = None
1084
            diffs.append(multiparent.MultiParent.from_lines(target,
1085
                parent_lines, left_parent_blocks))
1086
        return diffs
1087
3830.3.12 by Martin Pool
Review cleanups: unify has_key impls, add missing_keys(), clean up exception blocks
1088
    missing_keys = index._missing_keys_from_parent_map
1089
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1090
    def _extract_blocks(self, version_id, source, target):
1091
        return None
1092
1093
1094
class ThunkedVersionedFiles(VersionedFiles):
1095
    """Storage for many versioned files thunked onto a 'VersionedFile' class.
1096
1097
    This object allows a single keyspace for accessing the history graph and
1098
    contents of named bytestrings.
1099
1100
    Currently no implementation allows the graph of different key prefixes to
1101
    intersect, but the API does allow such implementations in the future.
1102
    """
1103
1104
    def __init__(self, transport, file_factory, mapper, is_locked):
1105
        """Create a ThunkedVersionedFiles."""
1106
        self._transport = transport
1107
        self._file_factory = file_factory
1108
        self._mapper = mapper
1109
        self._is_locked = is_locked
1110
1111
    def add_lines(self, key, parents, lines, parent_texts=None,
1112
        left_matching_blocks=None, nostore_sha=None, random_id=False,
1113
        check_content=True):
1114
        """See VersionedFiles.add_lines()."""
1115
        path = self._mapper.map(key)
1116
        version_id = key[-1]
1117
        parents = [parent[-1] for parent in parents]
1118
        vf = self._get_vf(path)
1119
        try:
1120
            try:
1121
                return vf.add_lines_with_ghosts(version_id, parents, lines,
1122
                    parent_texts=parent_texts,
1123
                    left_matching_blocks=left_matching_blocks,
1124
                    nostore_sha=nostore_sha, random_id=random_id,
1125
                    check_content=check_content)
1126
            except NotImplementedError:
1127
                return vf.add_lines(version_id, parents, lines,
1128
                    parent_texts=parent_texts,
1129
                    left_matching_blocks=left_matching_blocks,
1130
                    nostore_sha=nostore_sha, random_id=random_id,
1131
                    check_content=check_content)
1132
        except errors.NoSuchFile:
1133
            # parent directory may be missing, try again.
1134
            self._transport.mkdir(osutils.dirname(path))
1135
            try:
1136
                return vf.add_lines_with_ghosts(version_id, parents, lines,
1137
                    parent_texts=parent_texts,
1138
                    left_matching_blocks=left_matching_blocks,
1139
                    nostore_sha=nostore_sha, random_id=random_id,
1140
                    check_content=check_content)
1141
            except NotImplementedError:
1142
                return vf.add_lines(version_id, parents, lines,
1143
                    parent_texts=parent_texts,
1144
                    left_matching_blocks=left_matching_blocks,
1145
                    nostore_sha=nostore_sha, random_id=random_id,
1146
                    check_content=check_content)
1147
1148
    def annotate(self, key):
1149
        """Return a list of (version-key, line) tuples for the text of key.
1150
1151
        :raise RevisionNotPresent: If the key is not present.
1152
        """
1153
        prefix = key[:-1]
1154
        path = self._mapper.map(prefix)
1155
        vf = self._get_vf(path)
1156
        origins = vf.annotate(key[-1])
1157
        result = []
1158
        for origin, line in origins:
1159
            result.append((prefix + (origin,), line))
1160
        return result
1161
1162
    def check(self, progress_bar=None):
1163
        """See VersionedFiles.check()."""
1164
        for prefix, vf in self._iter_all_components():
1165
            vf.check()
1166
1167
    def get_parent_map(self, keys):
1168
        """Get a map of the parents of keys.
1169
1170
        :param keys: The keys to look up parents for.
1171
        :return: A mapping from keys to parents. Absent keys are absent from
1172
            the mapping.
1173
        """
1174
        prefixes = self._partition_keys(keys)
1175
        result = {}
1176
        for prefix, suffixes in prefixes.items():
1177
            path = self._mapper.map(prefix)
1178
            vf = self._get_vf(path)
1179
            parent_map = vf.get_parent_map(suffixes)
1180
            for key, parents in parent_map.items():
1181
                result[prefix + (key,)] = tuple(
1182
                    prefix + (parent,) for parent in parents)
1183
        return result
1184
1185
    def _get_vf(self, path):
1186
        if not self._is_locked():
1187
            raise errors.ObjectNotLocked(self)
1188
        return self._file_factory(path, self._transport, create=True,
1189
            get_scope=lambda:None)
1190
1191
    def _partition_keys(self, keys):
1192
        """Turn keys into a dict of prefix:suffix_list."""
1193
        result = {}
1194
        for key in keys:
1195
            prefix_keys = result.setdefault(key[:-1], [])
1196
            prefix_keys.append(key[-1])
1197
        return result
1198
1199
    def _get_all_prefixes(self):
1200
        # Identify all key prefixes.
1201
        # XXX: A bit hacky, needs polish.
1202
        if type(self._mapper) == ConstantMapper:
1203
            paths = [self._mapper.map(())]
1204
            prefixes = [()]
1205
        else:
1206
            relpaths = set()
1207
            for quoted_relpath in self._transport.iter_files_recursive():
1208
                path, ext = os.path.splitext(quoted_relpath)
1209
                relpaths.add(path)
1210
            paths = list(relpaths)
1211
            prefixes = [self._mapper.unmap(path) for path in paths]
1212
        return zip(paths, prefixes)
1213
1214
    def get_record_stream(self, keys, ordering, include_delta_closure):
1215
        """See VersionedFiles.get_record_stream()."""
1216
        # Ordering will be taken care of by each partitioned store; group keys
1217
        # by partition.
1218
        keys = sorted(keys)
1219
        for prefix, suffixes, vf in self._iter_keys_vf(keys):
1220
            suffixes = [(suffix,) for suffix in suffixes]
1221
            for record in vf.get_record_stream(suffixes, ordering,
1222
                include_delta_closure):
1223
                if record.parents is not None:
1224
                    record.parents = tuple(
1225
                        prefix + parent for parent in record.parents)
1226
                record.key = prefix + record.key
1227
                yield record
1228
1229
    def _iter_keys_vf(self, keys):
1230
        prefixes = self._partition_keys(keys)
1231
        sha1s = {}
1232
        for prefix, suffixes in prefixes.items():
1233
            path = self._mapper.map(prefix)
1234
            vf = self._get_vf(path)
1235
            yield prefix, suffixes, vf
1236
1237
    def get_sha1s(self, keys):
1238
        """See VersionedFiles.get_sha1s()."""
1239
        sha1s = {}
1240
        for prefix,suffixes, vf in self._iter_keys_vf(keys):
1241
            vf_sha1s = vf.get_sha1s(suffixes)
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1242
            for suffix, sha1 in vf_sha1s.iteritems():
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1243
                sha1s[prefix + (suffix,)] = sha1
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1244
        return sha1s
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1245
1246
    def insert_record_stream(self, stream):
1247
        """Insert a record stream into this container.
1248
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1249
        :param stream: A stream of records to insert.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1250
        :return: None
1251
        :seealso VersionedFile.get_record_stream:
1252
        """
1253
        for record in stream:
1254
            prefix = record.key[:-1]
1255
            key = record.key[-1:]
1256
            if record.parents is not None:
1257
                parents = [parent[-1:] for parent in record.parents]
1258
            else:
1259
                parents = None
1260
            thunk_record = AdapterFactory(key, parents, record)
1261
            path = self._mapper.map(prefix)
1262
            # Note that this parses the file many times; we can do better but
1263
            # as this only impacts weaves in terms of performance, it is
1264
            # tolerable.
1265
            vf = self._get_vf(path)
1266
            vf.insert_record_stream([thunk_record])
1267
1268
    def iter_lines_added_or_present_in_keys(self, keys, pb=None):
1269
        """Iterate over the lines in the versioned files from keys.
1270
1271
        This may return lines from other keys. Each item the returned
1272
        iterator yields is a tuple of a line and a text version that that line
1273
        is present in (not introduced in).
1274
1275
        Ordering of results is in whatever order is most suitable for the
1276
        underlying storage format.
1277
1278
        If a progress bar is supplied, it may be used to indicate progress.
1279
        The caller is responsible for cleaning up progress bars (because this
1280
        is an iterator).
1281
1282
        NOTES:
1283
         * Lines are normalised by the underlying store: they will all have \n
1284
           terminators.
1285
         * Lines are returned in arbitrary order.
1286
1287
        :return: An iterator over (line, key).
1288
        """
1289
        for prefix, suffixes, vf in self._iter_keys_vf(keys):
1290
            for line, version in vf.iter_lines_added_or_present_in_versions(suffixes):
1291
                yield line, prefix + (version,)
1292
1293
    def _iter_all_components(self):
1294
        for path, prefix in self._get_all_prefixes():
1295
            yield prefix, self._get_vf(path)
1296
1297
    def keys(self):
1298
        """See VersionedFiles.keys()."""
1299
        result = set()
1300
        for prefix, vf in self._iter_all_components():
1301
            for suffix in vf.versions():
1302
                result.add(prefix + (suffix,))
1303
        return result
1304
1305
1306
class _PlanMergeVersionedFile(VersionedFiles):
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1307
    """A VersionedFile for uncommitted and committed texts.
1308
1309
    It is intended to allow merges to be planned with working tree texts.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1310
    It implements only the small part of the VersionedFiles interface used by
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1311
    PlanMerge.  It falls back to multiple versionedfiles for data not stored in
1312
    _PlanMergeVersionedFile itself.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1313
1314
    :ivar: fallback_versionedfiles a list of VersionedFiles objects that can be
1315
        queried for missing texts.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1316
    """
1317
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1318
    def __init__(self, file_id):
1319
        """Create a _PlanMergeVersionedFile.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1320
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1321
        :param file_id: Used with _PlanMerge code which is not yet fully
1322
            tuple-keyspace aware.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1323
        """
1324
        self._file_id = file_id
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1325
        # fallback locations
1326
        self.fallback_versionedfiles = []
1327
        # Parents for locally held keys.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1328
        self._parents = {}
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1329
        # line data for locally held keys.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1330
        self._lines = {}
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1331
        # key lookup providers
1332
        self._providers = [DictParentsProvider(self._parents)]
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1333
3062.2.3 by Aaron Bentley
Sync up with bzr.dev API changes
1334
    def plan_merge(self, ver_a, ver_b, base=None):
3062.1.13 by Aaron Bentley
Make _PlanMerge an implementation detail of _PlanMergeVersionedFile
1335
        """See VersionedFile.plan_merge"""
3144.3.7 by Aaron Bentley
Update from review
1336
        from bzrlib.merge import _PlanMerge
3062.2.3 by Aaron Bentley
Sync up with bzr.dev API changes
1337
        if base is None:
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1338
            return _PlanMerge(ver_a, ver_b, self, (self._file_id,)).plan_merge()
1339
        old_plan = list(_PlanMerge(ver_a, base, self, (self._file_id,)).plan_merge())
1340
        new_plan = list(_PlanMerge(ver_a, ver_b, self, (self._file_id,)).plan_merge())
3062.2.3 by Aaron Bentley
Sync up with bzr.dev API changes
1341
        return _PlanMerge._subtract_plans(old_plan, new_plan)
1342
3144.3.1 by Aaron Bentley
Implement LCA merge, with problematic conflict markers
1343
    def plan_lca_merge(self, ver_a, ver_b, base=None):
3144.3.7 by Aaron Bentley
Update from review
1344
        from bzrlib.merge import _PlanLCAMerge
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1345
        graph = Graph(self)
1346
        new_plan = _PlanLCAMerge(ver_a, ver_b, self, (self._file_id,), graph).plan_merge()
3144.3.1 by Aaron Bentley
Implement LCA merge, with problematic conflict markers
1347
        if base is None:
1348
            return new_plan
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1349
        old_plan = _PlanLCAMerge(ver_a, base, self, (self._file_id,), graph).plan_merge()
3144.3.1 by Aaron Bentley
Implement LCA merge, with problematic conflict markers
1350
        return _PlanLCAMerge._subtract_plans(list(old_plan), list(new_plan))
3062.1.13 by Aaron Bentley
Make _PlanMerge an implementation detail of _PlanMergeVersionedFile
1351
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1352
    def add_lines(self, key, parents, lines):
1353
        """See VersionedFiles.add_lines
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1354
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1355
        Lines are added locally, not to fallback versionedfiles.  Also, ghosts
1356
        are permitted.  Only reserved ids are permitted.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1357
        """
3350.6.8 by Martin Pool
Change stray pdb calls to exceptions
1358
        if type(key) is not tuple:
1359
            raise TypeError(key)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1360
        if not revision.is_reserved_id(key[-1]):
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1361
            raise ValueError('Only reserved ids may be used')
1362
        if parents is None:
1363
            raise ValueError('Parents may not be None')
1364
        if lines is None:
1365
            raise ValueError('Lines may not be None')
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1366
        self._parents[key] = tuple(parents)
1367
        self._lines[key] = lines
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1368
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1369
    def get_record_stream(self, keys, ordering, include_delta_closure):
1370
        pending = set(keys)
1371
        for key in keys:
1372
            if key in self._lines:
1373
                lines = self._lines[key]
1374
                parents = self._parents[key]
1375
                pending.remove(key)
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1376
                yield ChunkedContentFactory(key, parents, None, lines)
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1377
        for versionedfile in self.fallback_versionedfiles:
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1378
            for record in versionedfile.get_record_stream(
1379
                pending, 'unordered', True):
1380
                if record.storage_kind == 'absent':
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1381
                    continue
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1382
                else:
1383
                    pending.remove(record.key)
1384
                    yield record
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
1385
            if not pending:
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1386
                return
1387
        # report absent entries
1388
        for key in pending:
1389
            yield AbsentContentFactory(key)
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1390
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1391
    def get_parent_map(self, keys):
1392
        """See VersionedFiles.get_parent_map"""
1393
        # We create a new provider because a fallback may have been added.
1394
        # If we make fallbacks private we can update a stack list and avoid
1395
        # object creation thrashing.
3350.6.6 by Robert Collins
Fix test_plan_file_merge
1396
        keys = set(keys)
1397
        result = {}
1398
        if revision.NULL_REVISION in keys:
1399
            keys.remove(revision.NULL_REVISION)
1400
            result[revision.NULL_REVISION] = ()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1401
        self._providers = self._providers[:1] + self.fallback_versionedfiles
3350.6.6 by Robert Collins
Fix test_plan_file_merge
1402
        result.update(
4379.3.3 by Gary van der Merwe
Rename and add doc string for StackedParentsProvider.
1403
            StackedParentsProvider(self._providers).get_parent_map(keys))
3350.6.5 by Robert Collins
Update to bzr.dev.
1404
        for key, parents in result.iteritems():
1405
            if parents == ():
1406
                result[key] = (revision.NULL_REVISION,)
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
1407
        return result
3144.3.1 by Aaron Bentley
Implement LCA merge, with problematic conflict markers
1408
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1409
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
1410
class PlanWeaveMerge(TextMerge):
1551.6.13 by Aaron Bentley
Cleanup
1411
    """Weave merge that takes a plan as its input.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1412
1551.6.14 by Aaron Bentley
Tweaks from merge review
1413
    This exists so that VersionedFile.plan_merge is implementable.
1414
    Most callers will want to use WeaveMerge instead.
1551.6.13 by Aaron Bentley
Cleanup
1415
    """
1416
1551.6.14 by Aaron Bentley
Tweaks from merge review
1417
    def __init__(self, plan, a_marker=TextMerge.A_MARKER,
1418
                 b_marker=TextMerge.B_MARKER):
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
1419
        TextMerge.__init__(self, a_marker, b_marker)
1420
        self.plan = plan
1421
1551.6.7 by Aaron Bentley
Implemented two-way merge, refactored weave merge
1422
    def _merge_struct(self):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1423
        lines_a = []
1424
        lines_b = []
1425
        ch_a = ch_b = False
1664.2.8 by Aaron Bentley
Fix WeaveMerge when plan doesn't end with unchanged lines
1426
1427
        def outstanding_struct():
1428
            if not lines_a and not lines_b:
1429
                return
1430
            elif ch_a and not ch_b:
1431
                # one-sided change:
1432
                yield(lines_a,)
1433
            elif ch_b and not ch_a:
1434
                yield (lines_b,)
1435
            elif lines_a == lines_b:
1436
                yield(lines_a,)
1437
            else:
1438
                yield (lines_a, lines_b)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1439
1616.1.18 by Martin Pool
(weave-merge) don't treat killed-both lines as points of agreement;
1440
        # We previously considered either 'unchanged' or 'killed-both' lines
1441
        # to be possible places to resynchronize.  However, assuming agreement
1759.2.1 by Jelmer Vernooij
Fix some types (found using aspell).
1442
        # on killed-both lines may be too aggressive. -- mbp 20060324
1551.6.7 by Aaron Bentley
Implemented two-way merge, refactored weave merge
1443
        for state, line in self.plan:
1616.1.18 by Martin Pool
(weave-merge) don't treat killed-both lines as points of agreement;
1444
            if state == 'unchanged':
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1445
                # resync and flush queued conflicts changes if any
1664.2.8 by Aaron Bentley
Fix WeaveMerge when plan doesn't end with unchanged lines
1446
                for struct in outstanding_struct():
1447
                    yield struct
1551.6.11 by Aaron Bentley
Switched TextMerge_lines to work on a list
1448
                lines_a = []
1449
                lines_b = []
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1450
                ch_a = ch_b = False
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1451
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1452
            if state == 'unchanged':
1453
                if line:
1551.6.5 by Aaron Bentley
Got weave merge producing structural output
1454
                    yield ([line],)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1455
            elif state == 'killed-a':
1456
                ch_a = True
1457
                lines_b.append(line)
1458
            elif state == 'killed-b':
1459
                ch_b = True
1460
                lines_a.append(line)
1461
            elif state == 'new-a':
1462
                ch_a = True
1463
                lines_a.append(line)
1464
            elif state == 'new-b':
1465
                ch_b = True
1466
                lines_b.append(line)
3144.3.2 by Aaron Bentley
Get conflict handling working
1467
            elif state == 'conflicted-a':
1468
                ch_b = ch_a = True
1469
                lines_a.append(line)
1470
            elif state == 'conflicted-b':
1471
                ch_b = ch_a = True
1472
                lines_b.append(line)
4312.1.1 by John Arbash Meinel
Add a per-implementation test that deleting lines conflicts with modifying lines.
1473
            elif state == 'killed-both':
1474
                # This counts as a change, even though there is no associated
1475
                # line
1476
                ch_b = ch_a = True
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1477
            else:
3376.2.4 by Martin Pool
Remove every assert statement from bzrlib!
1478
                if state not in ('irrelevant', 'ghost-a', 'ghost-b',
4312.1.1 by John Arbash Meinel
Add a per-implementation test that deleting lines conflicts with modifying lines.
1479
                        'killed-base'):
3376.2.4 by Martin Pool
Remove every assert statement from bzrlib!
1480
                    raise AssertionError(state)
1664.2.8 by Aaron Bentley
Fix WeaveMerge when plan doesn't end with unchanged lines
1481
        for struct in outstanding_struct():
1482
            yield struct
1563.2.12 by Robert Collins
Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.
1483
1664.2.14 by Aaron Bentley
spacing fix
1484
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
1485
class WeaveMerge(PlanWeaveMerge):
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
1486
    """Weave merge that takes a VersionedFile and two versions as its input."""
1551.6.13 by Aaron Bentley
Cleanup
1487
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1488
    def __init__(self, versionedfile, ver_a, ver_b,
1551.6.14 by Aaron Bentley
Tweaks from merge review
1489
        a_marker=PlanWeaveMerge.A_MARKER, b_marker=PlanWeaveMerge.B_MARKER):
1551.6.15 by Aaron Bentley
Moved plan_merge into Weave
1490
        plan = versionedfile.plan_merge(ver_a, ver_b)
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
1491
        PlanWeaveMerge.__init__(self, plan, a_marker, b_marker)
1492
1493
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1494
class VirtualVersionedFiles(VersionedFiles):
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1495
    """Dummy implementation for VersionedFiles that uses other functions for
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1496
    obtaining fulltexts and parent maps.
1497
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1498
    This is always on the bottom of the stack and uses string keys
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1499
    (rather than tuples) internally.
1500
    """
1501
1502
    def __init__(self, get_parent_map, get_lines):
1503
        """Create a VirtualVersionedFiles.
1504
1505
        :param get_parent_map: Same signature as Repository.get_parent_map.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1506
        :param get_lines: Should return lines for specified key or None if
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1507
                          not available.
1508
        """
1509
        super(VirtualVersionedFiles, self).__init__()
1510
        self._get_parent_map = get_parent_map
1511
        self._get_lines = get_lines
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1512
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1513
    def check(self, progressbar=None):
1514
        """See VersionedFiles.check.
1515
1516
        :note: Always returns True for VirtualVersionedFiles.
1517
        """
1518
        return True
1519
1520
    def add_mpdiffs(self, records):
1521
        """See VersionedFiles.mpdiffs.
1522
1523
        :note: Not implemented for VirtualVersionedFiles.
1524
        """
1525
        raise NotImplementedError(self.add_mpdiffs)
1526
1527
    def get_parent_map(self, keys):
1528
        """See VersionedFiles.get_parent_map."""
3518.1.2 by Jelmer Vernooij
Fix some stylistic issues pointed out by Ian.
1529
        return dict([((k,), tuple([(p,) for p in v]))
1530
            for k,v in self._get_parent_map([k for (k,) in keys]).iteritems()])
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1531
1532
    def get_sha1s(self, keys):
1533
        """See VersionedFiles.get_sha1s."""
1534
        ret = {}
1535
        for (k,) in keys:
1536
            lines = self._get_lines(k)
1537
            if lines is not None:
3518.1.2 by Jelmer Vernooij
Fix some stylistic issues pointed out by Ian.
1538
                if not isinstance(lines, list):
1539
                    raise AssertionError
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1540
                ret[(k,)] = osutils.sha_strings(lines)
1541
        return ret
1542
1543
    def get_record_stream(self, keys, ordering, include_delta_closure):
1544
        """See VersionedFiles.get_record_stream."""
1545
        for (k,) in list(keys):
1546
            lines = self._get_lines(k)
1547
            if lines is not None:
3518.1.2 by Jelmer Vernooij
Fix some stylistic issues pointed out by Ian.
1548
                if not isinstance(lines, list):
1549
                    raise AssertionError
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1550
                yield ChunkedContentFactory((k,), None,
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1551
                        sha1=osutils.sha_strings(lines),
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1552
                        chunks=lines)
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1553
            else:
1554
                yield AbsentContentFactory((k,))
1555
3949.4.1 by Jelmer Vernooij
Implement VirtualVersionedFiles.iter_lines_added_or_present_in_keys.
1556
    def iter_lines_added_or_present_in_keys(self, keys, pb=None):
1557
        """See VersionedFile.iter_lines_added_or_present_in_versions()."""
1558
        for i, (key,) in enumerate(keys):
1559
            if pb is not None:
4110.2.10 by Martin Pool
Tweak iter_lines progress messages
1560
                pb.update("Finding changed lines", i, len(keys))
3949.4.1 by Jelmer Vernooij
Implement VirtualVersionedFiles.iter_lines_added_or_present_in_keys.
1561
            for l in self._get_lines(key):
1562
                yield (l, key)
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
1563
1564
1565
def network_bytes_to_kind_and_offset(network_bytes):
1566
    """Strip of a record kind from the front of network_bytes.
1567
1568
    :param network_bytes: The bytes of a record.
1569
    :return: A tuple (storage_kind, offset_of_remaining_bytes)
1570
    """
1571
    line_end = network_bytes.find('\n')
1572
    storage_kind = network_bytes[:line_end]
1573
    return storage_kind, line_end + 1
1574
1575
1576
class NetworkRecordStream(object):
1577
    """A record_stream which reconstitures a serialised stream."""
1578
1579
    def __init__(self, bytes_iterator):
1580
        """Create a NetworkRecordStream.
1581
1582
        :param bytes_iterator: An iterator of bytes. Each item in this
1583
            iterator should have been obtained from a record_streams'
1584
            record.get_bytes_as(record.storage_kind) call.
1585
        """
1586
        self._bytes_iterator = bytes_iterator
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1587
        self._kind_factory = {
1588
            'fulltext': fulltext_network_to_record,
1589
            'groupcompress-block': groupcompress.network_block_to_records,
1590
            'inventory-delta': inventory_delta_network_to_record,
1591
            'knit-ft-gz': knit.knit_network_to_record,
1592
            'knit-delta-gz': knit.knit_network_to_record,
1593
            'knit-annotated-ft-gz': knit.knit_network_to_record,
1594
            'knit-annotated-delta-gz': knit.knit_network_to_record,
1595
            'knit-delta-closure': knit.knit_delta_closure_to_records,
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
1596
            }
1597
1598
    def read(self):
1599
        """Read the stream.
1600
1601
        :return: An iterator as per VersionedFiles.get_record_stream().
1602
        """
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
1603
        from bzrlib.trace import mutter
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
1604
        for bytes in self._bytes_iterator:
1605
            storage_kind, line_end = network_bytes_to_kind_and_offset(bytes)
4005.3.6 by Robert Collins
Support delta_closure=True with NetworkRecordStream to transmit deltas over the wire when full text extraction is required on the far end.
1606
            for record in self._kind_factory[storage_kind](
1607
                storage_kind, bytes, line_end):
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
1608
                mutter('<- <- bytes: %r, %s', record.key, record.storage_kind)
4005.3.6 by Robert Collins
Support delta_closure=True with NetworkRecordStream to transmit deltas over the wire when full text extraction is required on the far end.
1609
                yield record
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1610
1611
1612
def fulltext_network_to_record(kind, bytes, line_end):
1613
    """Convert a network fulltext record to record."""
1614
    meta_len, = struct.unpack('!L', bytes[line_end:line_end+4])
4060.1.4 by Robert Collins
Streaming fetch from remote servers.
1615
    record_meta = bytes[line_end+4:line_end+4+meta_len]
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1616
    key, parents = bencode.bdecode_as_tuple(record_meta)
1617
    if parents == 'nil':
1618
        parents = None
4060.1.4 by Robert Collins
Streaming fetch from remote servers.
1619
    fulltext = bytes[line_end+4+meta_len:]
1620
    return [FulltextContentFactory(key, parents, None, fulltext)]
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1621
1622
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1623
def inventory_delta_network_to_record(kind, bytes, line_end):
1624
    """Convert a network fulltext record to record."""
1625
    meta_len, = struct.unpack('!L', bytes[line_end:line_end+4])
1626
    record_meta = bytes[line_end+4:line_end+4+meta_len]
1627
    key, parents = bencode.bdecode_as_tuple(record_meta)
1628
    if parents == 'nil':
1629
        parents = None
1630
    inventory_delta_bytes = bytes[line_end+4+meta_len:]
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
1631
    deserialiser = inventory_delta.InventoryDeltaSerializer()
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1632
    parse_result = deserialiser.parse_text_bytes(inventory_delta_bytes)
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
1633
    basis_id, new_id, rich_root, tree_refs, delta = parse_result
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1634
    return [InventoryDeltaContentFactory(
1635
        key, parents, None, delta, basis_id, (rich_root, tree_refs))]
1636
1637
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1638
def _length_prefix(bytes):
1639
    return struct.pack('!L', len(bytes))
1640
1641
4060.1.4 by Robert Collins
Streaming fetch from remote servers.
1642
def record_to_fulltext_bytes(record):
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1643
    if record.parents is None:
1644
        parents = 'nil'
1645
    else:
1646
        parents = record.parents
1647
    record_meta = bencode.bencode((record.key, parents))
1648
    record_content = record.get_bytes_as('fulltext')
1649
    return "fulltext\n%s%s%s" % (
1650
        _length_prefix(record_meta), record_meta, record_content)
4111.1.1 by Robert Collins
Add a groupcompress sort order.
1651
1652
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1653
def record_to_inventory_delta_bytes(record):
1654
    record_content = record.get_bytes_as('inventory-delta-bytes')
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
1655
    if record.parents is None:
1656
        parents = 'nil'
1657
    else:
1658
        parents = record.parents
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1659
    record_meta = bencode.bencode((record.key, parents))
1660
    return "inventory-delta\n%s%s%s" % (
1661
        _length_prefix(record_meta), record_meta, record_content)
1662
1663
4111.1.1 by Robert Collins
Add a groupcompress sort order.
1664
def sort_groupcompress(parent_map):
1665
    """Sort and group the keys in parent_map into groupcompress order.
1666
1667
    groupcompress is defined (currently) as reverse-topological order, grouped
1668
    by the key prefix.
1669
1670
    :return: A sorted-list of keys
1671
    """
1672
    # gc-optimal ordering is approximately reverse topological,
1673
    # properly grouped by file-id.
1674
    per_prefix_map = {}
1675
    for item in parent_map.iteritems():
1676
        key = item[0]
1677
        if isinstance(key, str) or len(key) == 1:
1678
            prefix = ''
1679
        else:
1680
            prefix = key[0]
1681
        try:
1682
            per_prefix_map[prefix].append(item)
1683
        except KeyError:
1684
            per_prefix_map[prefix] = [item]
1685
1686
    present_keys = []
1687
    for prefix in sorted(per_prefix_map):
1688
        present_keys.extend(reversed(tsort.topo_sort(per_prefix_map[prefix])))
1689
    return present_keys