/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
3830.3.20 by John Arbash Meinel
Minor PEP8 and copyright updates.
1
# Copyright (C) 2005, 2006, 2007, 2008 Canonical Ltd
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
2
#
3
# Authors:
4
#   Johan Rydberg <jrydberg@gnu.org>
5
#
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
10
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
# GNU General Public License for more details.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
15
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
4183.7.1 by Sabin Iacob
update FSF mailing address
18
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
19
20
"""Versioned text file storage api."""
21
3350.8.2 by Robert Collins
stacked get_parent_map.
22
from copy import copy
3350.6.1 by Robert Collins
* New ``versionedfile.KeyMapper`` interface to abstract out the access to
23
from cStringIO import StringIO
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
24
import os
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
25
import struct
3350.6.1 by Robert Collins
* New ``versionedfile.KeyMapper`` interface to abstract out the access to
26
from zlib import adler32
27
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
28
from bzrlib.lazy_import import lazy_import
29
lazy_import(globals(), """
3224.5.20 by Andrew Bennetts
Remove or lazyify a couple more imports.
30
import urllib
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
31
32
from bzrlib import (
4454.3.65 by John Arbash Meinel
Tests that VF implementations support .get_annotator()
33
    annotate,
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
34
    errors,
3735.32.18 by John Arbash Meinel
We now support generating a network stream.
35
    groupcompress,
3830.3.12 by Martin Pool
Review cleanups: unify has_key impls, add missing_keys(), clean up exception blocks
36
    index,
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
37
    inventory,
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
38
    inventory_delta,
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
39
    knit,
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
40
    osutils,
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
41
    multiparent,
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
42
    tsort,
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
43
    revision,
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
44
    ui,
45
    )
4379.3.3 by Gary van der Merwe
Rename and add doc string for StackedParentsProvider.
46
from bzrlib.graph import DictParentsProvider, Graph, StackedParentsProvider
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
47
from bzrlib.transport.memory import MemoryTransport
48
""")
1563.2.12 by Robert Collins
Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.
49
from bzrlib.inter import InterObject
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
50
from bzrlib.registry import Registry
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
51
from bzrlib.symbol_versioning import *
1551.6.7 by Aaron Bentley
Implemented two-way merge, refactored weave merge
52
from bzrlib.textmerge import TextMerge
2694.5.4 by Jelmer Vernooij
Move bzrlib.util.bencode to bzrlib._bencode_py.
53
from bzrlib import bencode
1563.2.11 by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis.
54
55
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
56
adapter_registry = Registry()
57
adapter_registry.register_lazy(('knit-delta-gz', 'fulltext'), 'bzrlib.knit',
58
    'DeltaPlainToFullText')
59
adapter_registry.register_lazy(('knit-ft-gz', 'fulltext'), 'bzrlib.knit',
60
    'FTPlainToFullText')
61
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'knit-delta-gz'),
62
    'bzrlib.knit', 'DeltaAnnotatedToUnannotated')
63
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'fulltext'),
64
    'bzrlib.knit', 'DeltaAnnotatedToFullText')
65
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'knit-ft-gz'),
66
    'bzrlib.knit', 'FTAnnotatedToUnannotated')
67
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'fulltext'),
68
    'bzrlib.knit', 'FTAnnotatedToFullText')
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
69
# adapter_registry.register_lazy(('knit-annotated-ft-gz', 'chunked'),
70
#     'bzrlib.knit', 'FTAnnotatedToChunked')
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
71
72
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
73
class ContentFactory(object):
74
    """Abstract interface for insertion and retrieval from a VersionedFile.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
75
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
76
    :ivar sha1: None, or the sha1 of the content fulltext.
77
    :ivar storage_kind: The native storage kind of this factory. One of
78
        'mpdiff', 'knit-annotated-ft', 'knit-annotated-delta', 'knit-ft',
79
        'knit-delta', 'fulltext', 'knit-annotated-ft-gz',
80
        'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'.
81
    :ivar key: The key of this content. Each key is a tuple with a single
82
        string in it.
83
    :ivar parents: A tuple of parent keys for self.key. If the object has
84
        no parent information, None (as opposed to () for an empty list of
85
        parents).
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
86
    """
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
87
88
    def __init__(self):
89
        """Create a ContentFactory."""
90
        self.sha1 = None
91
        self.storage_kind = None
92
        self.key = None
93
        self.parents = None
94
95
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
96
class ChunkedContentFactory(ContentFactory):
97
    """Static data content factory.
98
99
    This takes a 'chunked' list of strings. The only requirement on 'chunked' is
100
    that ''.join(lines) becomes a valid fulltext. A tuple of a single string
101
    satisfies this, as does a list of lines.
102
103
    :ivar sha1: None, or the sha1 of the content fulltext.
104
    :ivar storage_kind: The native storage kind of this factory. Always
3890.2.2 by John Arbash Meinel
Change the signature to report the storage kind as 'chunked'
105
        'chunked'
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
106
    :ivar key: The key of this content. Each key is a tuple with a single
107
        string in it.
108
    :ivar parents: A tuple of parent keys for self.key. If the object has
109
        no parent information, None (as opposed to () for an empty list of
110
        parents).
111
     """
112
113
    def __init__(self, key, parents, sha1, chunks):
114
        """Create a ContentFactory."""
115
        self.sha1 = sha1
3890.2.2 by John Arbash Meinel
Change the signature to report the storage kind as 'chunked'
116
        self.storage_kind = 'chunked'
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
117
        self.key = key
118
        self.parents = parents
119
        self._chunks = chunks
120
121
    def get_bytes_as(self, storage_kind):
122
        if storage_kind == 'chunked':
123
            return self._chunks
124
        elif storage_kind == 'fulltext':
125
            return ''.join(self._chunks)
126
        raise errors.UnavailableRepresentation(self.key, storage_kind,
127
            self.storage_kind)
128
129
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
130
class FulltextContentFactory(ContentFactory):
131
    """Static data content factory.
132
133
    This takes a fulltext when created and just returns that during
134
    get_bytes_as('fulltext').
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
135
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
136
    :ivar sha1: None, or the sha1 of the content fulltext.
137
    :ivar storage_kind: The native storage kind of this factory. Always
138
        'fulltext'.
139
    :ivar key: The key of this content. Each key is a tuple with a single
140
        string in it.
141
    :ivar parents: A tuple of parent keys for self.key. If the object has
142
        no parent information, None (as opposed to () for an empty list of
143
        parents).
144
     """
145
146
    def __init__(self, key, parents, sha1, text):
147
        """Create a ContentFactory."""
148
        self.sha1 = sha1
149
        self.storage_kind = 'fulltext'
150
        self.key = key
151
        self.parents = parents
152
        self._text = text
153
154
    def get_bytes_as(self, storage_kind):
155
        if storage_kind == self.storage_kind:
156
            return self._text
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
157
        elif storage_kind == 'chunked':
3976.2.1 by Robert Collins
Use a list not a tuple for chunks returned from FullTextContentFactory objects, because otherwise code tries to assign to tuples.
158
            return [self._text]
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
159
        raise errors.UnavailableRepresentation(self.key, storage_kind,
160
            self.storage_kind)
161
162
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
163
class InventoryDeltaContentFactory(ContentFactory):
164
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
165
    def __init__(self, key, parents, sha1, delta, basis_id, format_flags,
166
            repo=None):
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
167
        self.sha1 = sha1
168
        self.storage_kind = 'inventory-delta'
169
        self.key = key
170
        self.parents = parents
171
        self._delta = delta
172
        self._basis_id = basis_id
173
        self._format_flags = format_flags
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
174
        self._repo = repo
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
175
176
    def get_bytes_as(self, storage_kind):
177
        if storage_kind == self.storage_kind:
4476.3.2 by Andrew Bennetts
Make it possible for a StreamSink for a rich-root/tree-refs repo format to consume inventories without those features.
178
            return self._basis_id, self.key, self._delta, self._format_flags
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
179
        elif storage_kind == 'inventory-delta-bytes':
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
180
            serializer = inventory_delta.InventoryDeltaSerializer()
181
            serializer.require_flags(*self._format_flags)
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
182
            return ''.join(serializer.delta_to_lines(
183
                self._basis_id, self.key, self._delta))
184
        elif storage_kind == 'inventory-delta-bytes-from-null':
185
            if self._repo is None:
186
                raise errors.UnavailableRepresentation(self.key, storage_kind,
187
                    self.storage_kind)
188
            null_inv = inventory.Inventory(None)
4476.3.29 by Andrew Bennetts
Add Repository.get_stream_1.18 verb.
189
            my_inv = self._repo.get_inventory(self.key)
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
190
            delta = my_inv._make_delta(null_inv)
191
            serializer.require_flags(*self._format_flags)
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
192
            return serializer.delta_to_lines(
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
193
                revision.NULL_REVISION, self.key, delta)
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
194
        raise errors.UnavailableRepresentation(self.key, storage_kind,
195
            self.storage_kind)
196
197
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
198
class AbsentContentFactory(ContentFactory):
3350.3.12 by Robert Collins
Generate streams with absent records.
199
    """A placeholder content factory for unavailable texts.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
200
3350.3.12 by Robert Collins
Generate streams with absent records.
201
    :ivar sha1: None.
202
    :ivar storage_kind: 'absent'.
203
    :ivar key: The key of this content. Each key is a tuple with a single
204
        string in it.
205
    :ivar parents: None.
206
    """
207
208
    def __init__(self, key):
209
        """Create a ContentFactory."""
210
        self.sha1 = None
211
        self.storage_kind = 'absent'
212
        self.key = key
213
        self.parents = None
214
215
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
216
class AdapterFactory(ContentFactory):
217
    """A content factory to adapt between key prefix's."""
218
219
    def __init__(self, key, parents, adapted):
220
        """Create an adapter factory instance."""
221
        self.key = key
222
        self.parents = parents
223
        self._adapted = adapted
224
225
    def __getattr__(self, attr):
226
        """Return a member from the adapted object."""
227
        if attr in ('key', 'parents'):
228
            return self.__dict__[attr]
229
        else:
230
            return getattr(self._adapted, attr)
231
232
3350.3.14 by Robert Collins
Deprecate VersionedFile.join.
233
def filter_absent(record_stream):
234
    """Adapt a record stream to remove absent records."""
235
    for record in record_stream:
236
        if record.storage_kind != 'absent':
237
            yield record
238
239
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
240
class VersionedFile(object):
241
    """Versioned text file storage.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
242
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
243
    A versioned file manages versions of line-based text files,
244
    keeping track of the originating version for each line.
245
246
    To clients the "lines" of the file are represented as a list of
247
    strings. These strings will typically have terminal newline
248
    characters, but this is not required.  In particular files commonly
249
    do not have a newline at the end of the file.
250
251
    Texts are identified by a version-id string.
252
    """
253
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
254
    @staticmethod
2229.2.3 by Aaron Bentley
change reserved_id to is_reserved_id, add check_not_reserved for DRY
255
    def check_not_reserved_id(version_id):
256
        revision.check_not_reserved_id(version_id)
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
257
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
258
    def copy_to(self, name, transport):
259
        """Copy this versioned file to name on transport."""
260
        raise NotImplementedError(self.copy_to)
1863.1.1 by John Arbash Meinel
Allow Versioned files to do caching if explicitly asked, and implement for Knit
261
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
262
    def get_record_stream(self, versions, ordering, include_delta_closure):
263
        """Get a stream of records for versions.
264
265
        :param versions: The versions to include. Each version is a tuple
266
            (version,).
267
        :param ordering: Either 'unordered' or 'topological'. A topologically
268
            sorted stream has compression parents strictly before their
269
            children.
270
        :param include_delta_closure: If True then the closure across any
3350.3.22 by Robert Collins
Review feedback.
271
            compression parents will be included (in the data content of the
272
            stream, not in the emitted records). This guarantees that
273
            'fulltext' can be used successfully on every record.
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
274
        :return: An iterator of ContentFactory objects, each of which is only
275
            valid until the iterator is advanced.
276
        """
277
        raise NotImplementedError(self.get_record_stream)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
278
279
    def has_version(self, version_id):
280
        """Returns whether version is present."""
281
        raise NotImplementedError(self.has_version)
282
3350.3.8 by Robert Collins
Basic stream insertion, no fast path yet for knit to knit.
283
    def insert_record_stream(self, stream):
284
        """Insert a record stream into this versioned file.
285
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
286
        :param stream: A stream of records to insert.
3350.3.8 by Robert Collins
Basic stream insertion, no fast path yet for knit to knit.
287
        :return: None
288
        :seealso VersionedFile.get_record_stream:
289
        """
290
        raise NotImplementedError
291
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
292
    def add_lines(self, version_id, parents, lines, parent_texts=None,
2805.6.7 by Robert Collins
Review feedback.
293
        left_matching_blocks=None, nostore_sha=None, random_id=False,
294
        check_content=True):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
295
        """Add a single text on top of the versioned file.
296
297
        Must raise RevisionAlreadyPresent if the new version is
298
        already present in file history.
299
300
        Must raise RevisionNotPresent if any of the given parents are
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
301
        not present in file history.
2805.6.3 by Robert Collins
* The ``VersionedFile`` interface no longer protects against misuse when
302
303
        :param lines: A list of lines. Each line must be a bytestring. And all
304
            of them except the last must be terminated with \n and contain no
305
            other \n's. The last line may either contain no \n's or a single
306
            terminated \n. If the lines list does meet this constraint the add
307
            routine may error or may succeed - but you will be unable to read
308
            the data back accurately. (Checking the lines have been split
2805.6.7 by Robert Collins
Review feedback.
309
            correctly is expensive and extremely unlikely to catch bugs so it
310
            is not done at runtime unless check_content is True.)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
311
        :param parent_texts: An optional dictionary containing the opaque
2805.6.3 by Robert Collins
* The ``VersionedFile`` interface no longer protects against misuse when
312
            representations of some or all of the parents of version_id to
313
            allow delta optimisations.  VERY IMPORTANT: the texts must be those
314
            returned by add_lines or data corruption can be caused.
2520.4.148 by Aaron Bentley
Updates from review
315
        :param left_matching_blocks: a hint about which areas are common
316
            between the text and its left-hand-parent.  The format is
317
            the SequenceMatcher.get_matching_blocks format.
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
318
        :param nostore_sha: Raise ExistingContent and do not add the lines to
319
            the versioned file if the digest of the lines matches this.
2805.6.4 by Robert Collins
Don't check for existing versions when adding texts with random revision ids.
320
        :param random_id: If True a random id has been selected rather than
321
            an id determined by some deterministic process such as a converter
322
            from a foreign VCS. When True the backend may choose not to check
323
            for uniqueness of the resulting key within the versioned file, so
324
            this should only be done when the result is expected to be unique
325
            anyway.
2805.6.7 by Robert Collins
Review feedback.
326
        :param check_content: If True, the lines supplied are verified to be
327
            bytestrings that are correctly formed lines.
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
328
        :return: The text sha1, the number of bytes in the text, and an opaque
329
                 representation of the inserted version which can be provided
330
                 back to future add_lines calls in the parent_texts dictionary.
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
331
        """
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
332
        self._check_write_ok()
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
333
        return self._add_lines(version_id, parents, lines, parent_texts,
2805.6.7 by Robert Collins
Review feedback.
334
            left_matching_blocks, nostore_sha, random_id, check_content)
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
335
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
336
    def _add_lines(self, version_id, parents, lines, parent_texts,
2805.6.7 by Robert Collins
Review feedback.
337
        left_matching_blocks, nostore_sha, random_id, check_content):
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
338
        """Helper to do the class specific add_lines."""
1563.2.4 by Robert Collins
First cut at including the knit implementation of versioned_file.
339
        raise NotImplementedError(self.add_lines)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
340
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
341
    def add_lines_with_ghosts(self, version_id, parents, lines,
2805.6.7 by Robert Collins
Review feedback.
342
        parent_texts=None, nostore_sha=None, random_id=False,
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
343
        check_content=True, left_matching_blocks=None):
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
344
        """Add lines to the versioned file, allowing ghosts to be present.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
345
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
346
        This takes the same parameters as add_lines and returns the same.
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
347
        """
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
348
        self._check_write_ok()
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
349
        return self._add_lines_with_ghosts(version_id, parents, lines,
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
350
            parent_texts, nostore_sha, random_id, check_content, left_matching_blocks)
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
351
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
352
    def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts,
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
353
        nostore_sha, random_id, check_content, left_matching_blocks):
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
354
        """Helper to do class specific add_lines_with_ghosts."""
1594.2.8 by Robert Collins
add ghost aware apis to knits.
355
        raise NotImplementedError(self.add_lines_with_ghosts)
356
1563.2.19 by Robert Collins
stub out a check for knits.
357
    def check(self, progress_bar=None):
358
        """Check the versioned file for integrity."""
359
        raise NotImplementedError(self.check)
360
1666.1.6 by Robert Collins
Make knit the default format.
361
    def _check_lines_not_unicode(self, lines):
362
        """Check that lines being added to a versioned file are not unicode."""
363
        for line in lines:
364
            if line.__class__ is not str:
365
                raise errors.BzrBadParameterUnicode("lines")
366
367
    def _check_lines_are_lines(self, lines):
368
        """Check that the lines really are full lines without inline EOL."""
369
        for line in lines:
370
            if '\n' in line[:-1]:
371
                raise errors.BzrBadParameterContainsNewline("lines")
372
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
373
    def get_format_signature(self):
374
        """Get a text description of the data encoding in this file.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
375
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
376
        :since: 0.90
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
377
        """
378
        raise NotImplementedError(self.get_format_signature)
379
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
380
    def make_mpdiffs(self, version_ids):
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
381
        """Create multiparent diffs for specified versions."""
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
382
        knit_versions = set()
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
383
        knit_versions.update(version_ids)
384
        parent_map = self.get_parent_map(version_ids)
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
385
        for version_id in version_ids:
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
386
            try:
387
                knit_versions.update(parent_map[version_id])
388
            except KeyError:
3453.3.1 by Daniel Fischer
Raise the right exception in make_mpdiffs (bug #235687)
389
                raise errors.RevisionNotPresent(version_id, self)
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
390
        # We need to filter out ghosts, because we can't diff against them.
391
        knit_versions = set(self.get_parent_map(knit_versions).keys())
2520.4.90 by Aaron Bentley
Handle \r terminated lines in Weaves properly
392
        lines = dict(zip(knit_versions,
393
            self._get_lf_split_line_list(knit_versions)))
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
394
        diffs = []
395
        for version_id in version_ids:
396
            target = lines[version_id]
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
397
            try:
398
                parents = [lines[p] for p in parent_map[version_id] if p in
399
                    knit_versions]
400
            except KeyError:
3453.3.2 by John Arbash Meinel
Add a test case for the first loop, unable to find a way to trigger the second loop
401
                # I don't know how this could ever trigger.
402
                # parent_map[version_id] was already triggered in the previous
403
                # for loop, and lines[p] has the 'if p in knit_versions' check,
404
                # so we again won't have a KeyError.
3453.3.1 by Daniel Fischer
Raise the right exception in make_mpdiffs (bug #235687)
405
                raise errors.RevisionNotPresent(version_id, self)
2520.4.48 by Aaron Bentley
Support getting blocks from knit deltas with no final EOL
406
            if len(parents) > 0:
407
                left_parent_blocks = self._extract_blocks(version_id,
408
                                                          parents[0], target)
409
            else:
410
                left_parent_blocks = None
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
411
            diffs.append(multiparent.MultiParent.from_lines(target, parents,
412
                         left_parent_blocks))
413
        return diffs
414
2520.4.48 by Aaron Bentley
Support getting blocks from knit deltas with no final EOL
415
    def _extract_blocks(self, version_id, source, target):
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
416
        return None
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
417
2520.4.61 by Aaron Bentley
Do bulk insertion of records
418
    def add_mpdiffs(self, records):
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
419
        """Add mpdiffs to this VersionedFile.
2520.4.126 by Aaron Bentley
Add more docs
420
421
        Records should be iterables of version, parents, expected_sha1,
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
422
        mpdiff. mpdiff should be a MultiParent instance.
2520.4.126 by Aaron Bentley
Add more docs
423
        """
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
424
        # Does this need to call self._check_write_ok()? (IanC 20070919)
2520.4.61 by Aaron Bentley
Do bulk insertion of records
425
        vf_parents = {}
2520.4.141 by Aaron Bentley
More batch operations adding mpdiffs
426
        mpvf = multiparent.MultiMemoryVersionedFile()
427
        versions = []
428
        for version, parent_ids, expected_sha1, mpdiff in records:
429
            versions.append(version)
430
            mpvf.add_diff(mpdiff, version, parent_ids)
431
        needed_parents = set()
2520.4.142 by Aaron Bentley
Clean up installation of inventory records
432
        for version, parent_ids, expected_sha1, mpdiff in records:
2520.4.141 by Aaron Bentley
More batch operations adding mpdiffs
433
            needed_parents.update(p for p in parent_ids
434
                                  if not mpvf.has_version(p))
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
435
        present_parents = set(self.get_parent_map(needed_parents).keys())
436
        for parent_id, lines in zip(present_parents,
437
                                 self._get_lf_split_line_list(present_parents)):
2520.4.141 by Aaron Bentley
More batch operations adding mpdiffs
438
            mpvf.add_version(lines, parent_id, [])
439
        for (version, parent_ids, expected_sha1, mpdiff), lines in\
440
            zip(records, mpvf.get_line_list(versions)):
441
            if len(parent_ids) == 1:
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
442
                left_matching_blocks = list(mpdiff.get_matching_blocks(0,
2520.4.141 by Aaron Bentley
More batch operations adding mpdiffs
443
                    mpvf.get_diff(parent_ids[0]).num_lines()))
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
444
            else:
445
                left_matching_blocks = None
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
446
            try:
447
                _, _, version_text = self.add_lines_with_ghosts(version,
448
                    parent_ids, lines, vf_parents,
449
                    left_matching_blocks=left_matching_blocks)
450
            except NotImplementedError:
451
                # The vf can't handle ghosts, so add lines normally, which will
452
                # (reasonably) fail if there are ghosts in the data.
453
                _, _, version_text = self.add_lines(version,
454
                    parent_ids, lines, vf_parents,
455
                    left_matching_blocks=left_matching_blocks)
2520.4.61 by Aaron Bentley
Do bulk insertion of records
456
            vf_parents[version] = version_text
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
457
        sha1s = self.get_sha1s(versions)
458
        for version, parent_ids, expected_sha1, mpdiff in records:
459
            if expected_sha1 != sha1s[version]:
2520.4.71 by Aaron Bentley
Update test to accept VersionedFileInvalidChecksum instead of TestamentMismatch
460
                raise errors.VersionedFileInvalidChecksum(version)
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
461
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
462
    def get_text(self, version_id):
463
        """Return version contents as a text string.
464
465
        Raises RevisionNotPresent if version is not present in
466
        file history.
467
        """
468
        return ''.join(self.get_lines(version_id))
469
    get_string = get_text
470
1756.2.1 by Aaron Bentley
Implement get_texts
471
    def get_texts(self, version_ids):
472
        """Return the texts of listed versions as a list of strings.
473
474
        Raises RevisionNotPresent if version is not present in
475
        file history.
476
        """
477
        return [''.join(self.get_lines(v)) for v in version_ids]
478
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
479
    def get_lines(self, version_id):
480
        """Return version contents as a sequence of lines.
481
482
        Raises RevisionNotPresent if version is not present in
483
        file history.
484
        """
485
        raise NotImplementedError(self.get_lines)
486
2520.4.90 by Aaron Bentley
Handle \r terminated lines in Weaves properly
487
    def _get_lf_split_line_list(self, version_ids):
488
        return [StringIO(t).readlines() for t in self.get_texts(version_ids)]
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
489
2530.1.1 by Aaron Bentley
Make topological sorting optional for get_ancestry
490
    def get_ancestry(self, version_ids, topo_sorted=True):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
491
        """Return a list of all ancestors of given version(s). This
492
        will not include the null revision.
493
2490.2.32 by Aaron Bentley
Merge of not-sorting-ancestry branch
494
        This list will not be topologically sorted if topo_sorted=False is
495
        passed.
2530.1.1 by Aaron Bentley
Make topological sorting optional for get_ancestry
496
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
497
        Must raise RevisionNotPresent if any of the given versions are
498
        not present in file history."""
499
        if isinstance(version_ids, basestring):
500
            version_ids = [version_ids]
501
        raise NotImplementedError(self.get_ancestry)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
502
1594.2.8 by Robert Collins
add ghost aware apis to knits.
503
    def get_ancestry_with_ghosts(self, version_ids):
504
        """Return a list of all ancestors of given version(s). This
505
        will not include the null revision.
506
507
        Must raise RevisionNotPresent if any of the given versions are
508
        not present in file history.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
509
1594.2.8 by Robert Collins
add ghost aware apis to knits.
510
        Ghosts that are known about will be included in ancestry list,
511
        but are not explicitly marked.
512
        """
513
        raise NotImplementedError(self.get_ancestry_with_ghosts)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
514
3287.5.1 by Robert Collins
Add VersionedFile.get_parent_map.
515
    def get_parent_map(self, version_ids):
516
        """Get a map of the parents of version_ids.
517
518
        :param version_ids: The version ids to look up parents for.
519
        :return: A mapping from version id to parents.
520
        """
521
        raise NotImplementedError(self.get_parent_map)
522
1594.2.8 by Robert Collins
add ghost aware apis to knits.
523
    def get_parents_with_ghosts(self, version_id):
524
        """Return version names for parents of version_id.
525
526
        Will raise RevisionNotPresent if version_id is not present
527
        in the history.
528
529
        Ghosts that are known about will be included in the parent list,
530
        but are not explicitly marked.
531
        """
3287.5.1 by Robert Collins
Add VersionedFile.get_parent_map.
532
        try:
533
            return list(self.get_parent_map([version_id])[version_id])
534
        except KeyError:
535
            raise errors.RevisionNotPresent(version_id, self)
1594.2.8 by Robert Collins
add ghost aware apis to knits.
536
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
537
    def annotate(self, version_id):
3316.2.13 by Robert Collins
* ``VersionedFile.annotate_iter`` is deprecated. While in principal this
538
        """Return a list of (version-id, line) tuples for version_id.
539
540
        :raise RevisionNotPresent: If the given version is
541
        not present in file history.
542
        """
543
        raise NotImplementedError(self.annotate)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
544
2975.3.2 by Robert Collins
Review feedback - document the API change and improve readability in pack's _do_copy_nodes.
545
    def iter_lines_added_or_present_in_versions(self, version_ids=None,
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
546
                                                pb=None):
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
547
        """Iterate over the lines in the versioned file from version_ids.
548
2975.3.2 by Robert Collins
Review feedback - document the API change and improve readability in pack's _do_copy_nodes.
549
        This may return lines from other versions. Each item the returned
550
        iterator yields is a tuple of a line and a text version that that line
551
        is present in (not introduced in).
552
553
        Ordering of results is in whatever order is most suitable for the
554
        underlying storage format.
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
555
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
556
        If a progress bar is supplied, it may be used to indicate progress.
557
        The caller is responsible for cleaning up progress bars (because this
558
        is an iterator).
559
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
560
        NOTES: Lines are normalised: they will all have \n terminators.
561
               Lines are returned in arbitrary order.
2975.3.2 by Robert Collins
Review feedback - document the API change and improve readability in pack's _do_copy_nodes.
562
563
        :return: An iterator over (line, version_id).
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
564
        """
565
        raise NotImplementedError(self.iter_lines_added_or_present_in_versions)
566
1551.6.15 by Aaron Bentley
Moved plan_merge into Weave
567
    def plan_merge(self, ver_a, ver_b):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
568
        """Return pseudo-annotation indicating how the two versions merge.
569
570
        This is computed between versions a and b and their common
571
        base.
572
573
        Weave lines present in none of them are skipped entirely.
1664.2.2 by Aaron Bentley
Added legend for plan-merge output
574
575
        Legend:
576
        killed-base Dead in base revision
577
        killed-both Killed in each revision
578
        killed-a    Killed in a
579
        killed-b    Killed in b
580
        unchanged   Alive in both a and b (possibly created in both)
581
        new-a       Created in a
582
        new-b       Created in b
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
583
        ghost-a     Killed in a, unborn in b
1664.2.5 by Aaron Bentley
Update plan-merge legend
584
        ghost-b     Killed in b, unborn in a
1664.2.2 by Aaron Bentley
Added legend for plan-merge output
585
        irrelevant  Not in either revision
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
586
        """
1551.6.15 by Aaron Bentley
Moved plan_merge into Weave
587
        raise NotImplementedError(VersionedFile.plan_merge)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
588
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
589
    def weave_merge(self, plan, a_marker=TextMerge.A_MARKER,
1551.6.14 by Aaron Bentley
Tweaks from merge review
590
                    b_marker=TextMerge.B_MARKER):
1551.6.12 by Aaron Bentley
Indicate conflicts from merge_lines, insead of guessing
591
        return PlanWeaveMerge(plan, a_marker, b_marker).merge_lines()[0]
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
592
1664.2.7 by Aaron Bentley
Merge bzr.dev
593
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
594
class RecordingVersionedFilesDecorator(object):
595
    """A minimal versioned files that records calls made on it.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
596
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
597
    Only enough methods have been added to support tests using it to date.
598
599
    :ivar calls: A list of the calls made; can be reset at any time by
600
        assigning [] to it.
601
    """
602
603
    def __init__(self, backing_vf):
3871.4.1 by John Arbash Meinel
Add a VFDecorator that can yield records in a specified order
604
        """Create a RecordingVersionedFilesDecorator decorating backing_vf.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
605
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
606
        :param backing_vf: The versioned file to answer all methods.
607
        """
608
        self._backing_vf = backing_vf
609
        self.calls = []
610
3350.8.2 by Robert Collins
stacked get_parent_map.
611
    def add_lines(self, key, parents, lines, parent_texts=None,
612
        left_matching_blocks=None, nostore_sha=None, random_id=False,
613
        check_content=True):
614
        self.calls.append(("add_lines", key, parents, lines, parent_texts,
615
            left_matching_blocks, nostore_sha, random_id, check_content))
616
        return self._backing_vf.add_lines(key, parents, lines, parent_texts,
617
            left_matching_blocks, nostore_sha, random_id, check_content)
618
3517.4.19 by Martin Pool
Update test for knit.check() to expect it to recurse into fallback vfs
619
    def check(self):
620
        self._backing_vf.check()
621
3350.8.2 by Robert Collins
stacked get_parent_map.
622
    def get_parent_map(self, keys):
623
        self.calls.append(("get_parent_map", copy(keys)))
624
        return self._backing_vf.get_parent_map(keys)
625
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
626
    def get_record_stream(self, keys, sort_order, include_delta_closure):
3350.8.7 by Robert Collins
get_record_stream for fulltexts working (but note extreme memory use!).
627
        self.calls.append(("get_record_stream", list(keys), sort_order,
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
628
            include_delta_closure))
629
        return self._backing_vf.get_record_stream(keys, sort_order,
630
            include_delta_closure)
631
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
632
    def get_sha1s(self, keys):
633
        self.calls.append(("get_sha1s", copy(keys)))
634
        return self._backing_vf.get_sha1s(keys)
635
3350.8.5 by Robert Collins
Iter_lines_added_or_present_in_keys stacks.
636
    def iter_lines_added_or_present_in_keys(self, keys, pb=None):
637
        self.calls.append(("iter_lines_added_or_present_in_keys", copy(keys)))
3350.8.14 by Robert Collins
Review feedback.
638
        return self._backing_vf.iter_lines_added_or_present_in_keys(keys, pb=pb)
3350.8.5 by Robert Collins
Iter_lines_added_or_present_in_keys stacks.
639
3350.8.4 by Robert Collins
Vf.keys() stacking support.
640
    def keys(self):
641
        self.calls.append(("keys",))
642
        return self._backing_vf.keys()
643
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
644
3871.4.1 by John Arbash Meinel
Add a VFDecorator that can yield records in a specified order
645
class OrderingVersionedFilesDecorator(RecordingVersionedFilesDecorator):
646
    """A VF that records calls, and returns keys in specific order.
647
648
    :ivar calls: A list of the calls made; can be reset at any time by
649
        assigning [] to it.
650
    """
651
652
    def __init__(self, backing_vf, key_priority):
653
        """Create a RecordingVersionedFilesDecorator decorating backing_vf.
654
655
        :param backing_vf: The versioned file to answer all methods.
656
        :param key_priority: A dictionary defining what order keys should be
657
            returned from an 'unordered' get_record_stream request.
658
            Keys with lower priority are returned first, keys not present in
659
            the map get an implicit priority of 0, and are returned in
660
            lexicographical order.
661
        """
662
        RecordingVersionedFilesDecorator.__init__(self, backing_vf)
663
        self._key_priority = key_priority
664
665
    def get_record_stream(self, keys, sort_order, include_delta_closure):
666
        self.calls.append(("get_record_stream", list(keys), sort_order,
667
            include_delta_closure))
668
        if sort_order == 'unordered':
669
            def sort_key(key):
670
                return (self._key_priority.get(key, 0), key)
671
            # Use a defined order by asking for the keys one-by-one from the
672
            # backing_vf
673
            for key in sorted(keys, key=sort_key):
674
                for record in self._backing_vf.get_record_stream([key],
675
                                'unordered', include_delta_closure):
676
                    yield record
677
        else:
678
            for record in self._backing_vf.get_record_stream(keys, sort_order,
679
                            include_delta_closure):
680
                yield record
681
682
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
683
class KeyMapper(object):
3350.6.10 by Martin Pool
VersionedFiles review cleanups
684
    """KeyMappers map between keys and underlying partitioned storage."""
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
685
686
    def map(self, key):
687
        """Map key to an underlying storage identifier.
688
689
        :param key: A key tuple e.g. ('file-id', 'revision-id').
690
        :return: An underlying storage identifier, specific to the partitioning
691
            mechanism.
692
        """
693
        raise NotImplementedError(self.map)
694
695
    def unmap(self, partition_id):
696
        """Map a partitioned storage id back to a key prefix.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
697
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
698
        :param partition_id: The underlying partition id.
3350.6.10 by Martin Pool
VersionedFiles review cleanups
699
        :return: As much of a key (or prefix) as is derivable from the partition
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
700
            id.
701
        """
702
        raise NotImplementedError(self.unmap)
703
704
705
class ConstantMapper(KeyMapper):
706
    """A key mapper that maps to a constant result."""
707
708
    def __init__(self, result):
709
        """Create a ConstantMapper which will return result for all maps."""
710
        self._result = result
711
712
    def map(self, key):
713
        """See KeyMapper.map()."""
714
        return self._result
715
716
717
class URLEscapeMapper(KeyMapper):
718
    """Base class for use with transport backed storage.
719
720
    This provides a map and unmap wrapper that respectively url escape and
721
    unescape their outputs and inputs.
722
    """
723
724
    def map(self, key):
725
        """See KeyMapper.map()."""
726
        return urllib.quote(self._map(key))
727
728
    def unmap(self, partition_id):
729
        """See KeyMapper.unmap()."""
730
        return self._unmap(urllib.unquote(partition_id))
731
732
733
class PrefixMapper(URLEscapeMapper):
734
    """A key mapper that extracts the first component of a key.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
735
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
736
    This mapper is for use with a transport based backend.
737
    """
738
739
    def _map(self, key):
740
        """See KeyMapper.map()."""
741
        return key[0]
742
743
    def _unmap(self, partition_id):
744
        """See KeyMapper.unmap()."""
745
        return (partition_id,)
746
747
748
class HashPrefixMapper(URLEscapeMapper):
749
    """A key mapper that combines the first component of a key with a hash.
750
751
    This mapper is for use with a transport based backend.
752
    """
753
754
    def _map(self, key):
755
        """See KeyMapper.map()."""
756
        prefix = self._escape(key[0])
757
        return "%02x/%s" % (adler32(prefix) & 0xff, prefix)
758
759
    def _escape(self, prefix):
760
        """No escaping needed here."""
761
        return prefix
762
763
    def _unmap(self, partition_id):
764
        """See KeyMapper.unmap()."""
765
        return (self._unescape(osutils.basename(partition_id)),)
766
767
    def _unescape(self, basename):
768
        """No unescaping needed for HashPrefixMapper."""
769
        return basename
770
771
772
class HashEscapedPrefixMapper(HashPrefixMapper):
773
    """Combines the escaped first component of a key with a hash.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
774
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
775
    This mapper is for use with a transport based backend.
776
    """
777
778
    _safe = "abcdefghijklmnopqrstuvwxyz0123456789-_@,."
779
780
    def _escape(self, prefix):
781
        """Turn a key element into a filesystem safe string.
782
783
        This is similar to a plain urllib.quote, except
784
        it uses specific safe characters, so that it doesn't
785
        have to translate a lot of valid file ids.
786
        """
787
        # @ does not get escaped. This is because it is a valid
788
        # filesystem character we use all the time, and it looks
789
        # a lot better than seeing %40 all the time.
790
        r = [((c in self._safe) and c or ('%%%02x' % ord(c)))
791
             for c in prefix]
792
        return ''.join(r)
793
794
    def _unescape(self, basename):
795
        """Escaped names are easily unescaped by urlutils."""
796
        return urllib.unquote(basename)
797
798
799
def make_versioned_files_factory(versioned_file_factory, mapper):
800
    """Create a ThunkedVersionedFiles factory.
801
802
    This will create a callable which when called creates a
803
    ThunkedVersionedFiles on a transport, using mapper to access individual
804
    versioned files, and versioned_file_factory to create each individual file.
805
    """
806
    def factory(transport):
807
        return ThunkedVersionedFiles(transport, versioned_file_factory, mapper,
808
            lambda:True)
809
    return factory
810
811
812
class VersionedFiles(object):
813
    """Storage for many versioned files.
814
815
    This object allows a single keyspace for accessing the history graph and
816
    contents of named bytestrings.
817
818
    Currently no implementation allows the graph of different key prefixes to
819
    intersect, but the API does allow such implementations in the future.
3350.6.7 by Robert Collins
Review feedback, making things more clear, adding documentation on what is used where.
820
821
    The keyspace is expressed via simple tuples. Any instance of VersionedFiles
822
    may have a different length key-size, but that size will be constant for
823
    all texts added to or retrieved from it. For instance, bzrlib uses
824
    instances with a key-size of 2 for storing user files in a repository, with
825
    the first element the fileid, and the second the version of that file.
826
827
    The use of tuples allows a single code base to support several different
828
    uses with only the mapping logic changing from instance to instance.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
829
    """
830
831
    def add_lines(self, key, parents, lines, parent_texts=None,
832
        left_matching_blocks=None, nostore_sha=None, random_id=False,
833
        check_content=True):
834
        """Add a text to the store.
835
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
836
        :param key: The key tuple of the text to add. If the last element is
837
            None, a CHK string will be generated during the addition.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
838
        :param parents: The parents key tuples of the text to add.
839
        :param lines: A list of lines. Each line must be a bytestring. And all
840
            of them except the last must be terminated with \n and contain no
841
            other \n's. The last line may either contain no \n's or a single
842
            terminating \n. If the lines list does meet this constraint the add
843
            routine may error or may succeed - but you will be unable to read
844
            the data back accurately. (Checking the lines have been split
845
            correctly is expensive and extremely unlikely to catch bugs so it
846
            is not done at runtime unless check_content is True.)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
847
        :param parent_texts: An optional dictionary containing the opaque
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
848
            representations of some or all of the parents of version_id to
849
            allow delta optimisations.  VERY IMPORTANT: the texts must be those
850
            returned by add_lines or data corruption can be caused.
851
        :param left_matching_blocks: a hint about which areas are common
852
            between the text and its left-hand-parent.  The format is
853
            the SequenceMatcher.get_matching_blocks format.
854
        :param nostore_sha: Raise ExistingContent and do not add the lines to
855
            the versioned file if the digest of the lines matches this.
856
        :param random_id: If True a random id has been selected rather than
857
            an id determined by some deterministic process such as a converter
858
            from a foreign VCS. When True the backend may choose not to check
859
            for uniqueness of the resulting key within the versioned file, so
860
            this should only be done when the result is expected to be unique
861
            anyway.
862
        :param check_content: If True, the lines supplied are verified to be
863
            bytestrings that are correctly formed lines.
864
        :return: The text sha1, the number of bytes in the text, and an opaque
865
                 representation of the inserted version which can be provided
866
                 back to future add_lines calls in the parent_texts dictionary.
867
        """
868
        raise NotImplementedError(self.add_lines)
869
4398.8.6 by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'.
870
    def _add_text(self, key, parents, text, nostore_sha=None, random_id=False):
871
        """Add a text to the store.
872
873
        This is a private function for use by CommitBuilder.
874
875
        :param key: The key tuple of the text to add. If the last element is
876
            None, a CHK string will be generated during the addition.
877
        :param parents: The parents key tuples of the text to add.
878
        :param text: A string containing the text to be committed.
879
        :param nostore_sha: Raise ExistingContent and do not add the lines to
880
            the versioned file if the digest of the lines matches this.
881
        :param random_id: If True a random id has been selected rather than
882
            an id determined by some deterministic process such as a converter
883
            from a foreign VCS. When True the backend may choose not to check
884
            for uniqueness of the resulting key within the versioned file, so
885
            this should only be done when the result is expected to be unique
886
            anyway.
887
        :param check_content: If True, the lines supplied are verified to be
888
            bytestrings that are correctly formed lines.
889
        :return: The text sha1, the number of bytes in the text, and an opaque
890
                 representation of the inserted version which can be provided
891
                 back to future _add_text calls in the parent_texts dictionary.
892
        """
893
        # The default implementation just thunks over to .add_lines(),
894
        # inefficient, but it works.
4398.8.1 by John Arbash Meinel
Add a VersionedFile.add_text() api.
895
        return self.add_lines(key, parents, osutils.split_lines(text),
896
                              nostore_sha=nostore_sha,
897
                              random_id=random_id,
4398.8.6 by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'.
898
                              check_content=True)
4398.8.1 by John Arbash Meinel
Add a VersionedFile.add_text() api.
899
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
900
    def add_mpdiffs(self, records):
901
        """Add mpdiffs to this VersionedFile.
902
903
        Records should be iterables of version, parents, expected_sha1,
904
        mpdiff. mpdiff should be a MultiParent instance.
905
        """
906
        vf_parents = {}
907
        mpvf = multiparent.MultiMemoryVersionedFile()
908
        versions = []
909
        for version, parent_ids, expected_sha1, mpdiff in records:
910
            versions.append(version)
911
            mpvf.add_diff(mpdiff, version, parent_ids)
912
        needed_parents = set()
913
        for version, parent_ids, expected_sha1, mpdiff in records:
914
            needed_parents.update(p for p in parent_ids
915
                                  if not mpvf.has_version(p))
916
        # It seems likely that adding all the present parents as fulltexts can
917
        # easily exhaust memory.
3890.2.9 by John Arbash Meinel
Start using osutils.chunks_as_lines rather than osutils.split_lines.
918
        chunks_to_lines = osutils.chunks_to_lines
3350.8.11 by Robert Collins
Stacked add_mpdiffs.
919
        for record in self.get_record_stream(needed_parents, 'unordered',
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
920
            True):
3350.8.11 by Robert Collins
Stacked add_mpdiffs.
921
            if record.storage_kind == 'absent':
922
                continue
3890.2.9 by John Arbash Meinel
Start using osutils.chunks_as_lines rather than osutils.split_lines.
923
            mpvf.add_version(chunks_to_lines(record.get_bytes_as('chunked')),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
924
                record.key, [])
925
        for (key, parent_keys, expected_sha1, mpdiff), lines in\
926
            zip(records, mpvf.get_line_list(versions)):
927
            if len(parent_keys) == 1:
928
                left_matching_blocks = list(mpdiff.get_matching_blocks(0,
929
                    mpvf.get_diff(parent_keys[0]).num_lines()))
930
            else:
931
                left_matching_blocks = None
932
            version_sha1, _, version_text = self.add_lines(key,
933
                parent_keys, lines, vf_parents,
934
                left_matching_blocks=left_matching_blocks)
935
            if version_sha1 != expected_sha1:
936
                raise errors.VersionedFileInvalidChecksum(version)
937
            vf_parents[key] = version_text
938
939
    def annotate(self, key):
940
        """Return a list of (version-key, line) tuples for the text of key.
941
942
        :raise RevisionNotPresent: If the key is not present.
943
        """
944
        raise NotImplementedError(self.annotate)
945
946
    def check(self, progress_bar=None):
947
        """Check this object for integrity."""
948
        raise NotImplementedError(self.check)
949
950
    @staticmethod
951
    def check_not_reserved_id(version_id):
952
        revision.check_not_reserved_id(version_id)
953
954
    def _check_lines_not_unicode(self, lines):
955
        """Check that lines being added to a versioned file are not unicode."""
956
        for line in lines:
957
            if line.__class__ is not str:
958
                raise errors.BzrBadParameterUnicode("lines")
959
960
    def _check_lines_are_lines(self, lines):
961
        """Check that the lines really are full lines without inline EOL."""
962
        for line in lines:
963
            if '\n' in line[:-1]:
964
                raise errors.BzrBadParameterContainsNewline("lines")
965
966
    def get_parent_map(self, keys):
967
        """Get a map of the parents of keys.
968
969
        :param keys: The keys to look up parents for.
970
        :return: A mapping from keys to parents. Absent keys are absent from
971
            the mapping.
972
        """
973
        raise NotImplementedError(self.get_parent_map)
974
975
    def get_record_stream(self, keys, ordering, include_delta_closure):
976
        """Get a stream of records for keys.
977
978
        :param keys: The keys to include.
979
        :param ordering: Either 'unordered' or 'topological'. A topologically
980
            sorted stream has compression parents strictly before their
981
            children.
982
        :param include_delta_closure: If True then the closure across any
983
            compression parents will be included (in the opaque data).
984
        :return: An iterator of ContentFactory objects, each of which is only
985
            valid until the iterator is advanced.
986
        """
987
        raise NotImplementedError(self.get_record_stream)
988
989
    def get_sha1s(self, keys):
990
        """Get the sha1's of the texts for the given keys.
991
992
        :param keys: The names of the keys to lookup
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
993
        :return: a dict from key to sha1 digest. Keys of texts which are not
3350.8.14 by Robert Collins
Review feedback.
994
            present in the store are not present in the returned
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
995
            dictionary.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
996
        """
997
        raise NotImplementedError(self.get_sha1s)
998
3830.3.12 by Martin Pool
Review cleanups: unify has_key impls, add missing_keys(), clean up exception blocks
999
    has_key = index._has_key_from_parent_map
1000
4009.3.3 by Andrew Bennetts
Add docstrings.
1001
    def get_missing_compression_parent_keys(self):
1002
        """Return an iterable of keys of missing compression parents.
1003
1004
        Check this after calling insert_record_stream to find out if there are
1005
        any missing compression parents.  If there are, the records that
4009.3.12 by Robert Collins
Polish on inserting record streams with missing compression parents.
1006
        depend on them are not able to be inserted safely. The precise
1007
        behaviour depends on the concrete VersionedFiles class in use.
1008
1009
        Classes that do not support this will raise NotImplementedError.
4009.3.3 by Andrew Bennetts
Add docstrings.
1010
        """
1011
        raise NotImplementedError(self.get_missing_compression_parent_keys)
1012
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1013
    def insert_record_stream(self, stream):
1014
        """Insert a record stream into this container.
1015
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1016
        :param stream: A stream of records to insert.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1017
        :return: None
1018
        :seealso VersionedFile.get_record_stream:
1019
        """
1020
        raise NotImplementedError
1021
1022
    def iter_lines_added_or_present_in_keys(self, keys, pb=None):
1023
        """Iterate over the lines in the versioned files from keys.
1024
1025
        This may return lines from other keys. Each item the returned
1026
        iterator yields is a tuple of a line and a text version that that line
1027
        is present in (not introduced in).
1028
1029
        Ordering of results is in whatever order is most suitable for the
1030
        underlying storage format.
1031
1032
        If a progress bar is supplied, it may be used to indicate progress.
1033
        The caller is responsible for cleaning up progress bars (because this
1034
        is an iterator).
1035
1036
        NOTES:
1037
         * Lines are normalised by the underlying store: they will all have \n
1038
           terminators.
1039
         * Lines are returned in arbitrary order.
1040
1041
        :return: An iterator over (line, key).
1042
        """
1043
        raise NotImplementedError(self.iter_lines_added_or_present_in_keys)
1044
1045
    def keys(self):
1046
        """Return a iterable of the keys for all the contained texts."""
1047
        raise NotImplementedError(self.keys)
1048
1049
    def make_mpdiffs(self, keys):
1050
        """Create multiparent diffs for specified keys."""
1051
        keys_order = tuple(keys)
1052
        keys = frozenset(keys)
1053
        knit_keys = set(keys)
1054
        parent_map = self.get_parent_map(keys)
1055
        for parent_keys in parent_map.itervalues():
1056
            if parent_keys:
1057
                knit_keys.update(parent_keys)
1058
        missing_keys = keys - set(parent_map)
1059
        if missing_keys:
3530.3.2 by Robert Collins
Handling frozen set inputs in mpdiff generation when a key is missing
1060
            raise errors.RevisionNotPresent(list(missing_keys)[0], self)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1061
        # We need to filter out ghosts, because we can't diff against them.
1062
        maybe_ghosts = knit_keys - keys
1063
        ghosts = maybe_ghosts - set(self.get_parent_map(maybe_ghosts))
1064
        knit_keys.difference_update(ghosts)
1065
        lines = {}
3890.2.9 by John Arbash Meinel
Start using osutils.chunks_as_lines rather than osutils.split_lines.
1066
        chunks_to_lines = osutils.chunks_to_lines
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1067
        for record in self.get_record_stream(knit_keys, 'topological', True):
3890.2.9 by John Arbash Meinel
Start using osutils.chunks_as_lines rather than osutils.split_lines.
1068
            lines[record.key] = chunks_to_lines(record.get_bytes_as('chunked'))
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1069
            # line_block_dict = {}
1070
            # for parent, blocks in record.extract_line_blocks():
1071
            #   line_blocks[parent] = blocks
1072
            # line_blocks[record.key] = line_block_dict
1073
        diffs = []
1074
        for key in keys_order:
1075
            target = lines[key]
1076
            parents = parent_map[key] or []
1077
            # Note that filtering knit_keys can lead to a parent difference
1078
            # between the creation and the application of the mpdiff.
1079
            parent_lines = [lines[p] for p in parents if p in knit_keys]
1080
            if len(parent_lines) > 0:
1081
                left_parent_blocks = self._extract_blocks(key, parent_lines[0],
1082
                    target)
1083
            else:
1084
                left_parent_blocks = None
1085
            diffs.append(multiparent.MultiParent.from_lines(target,
1086
                parent_lines, left_parent_blocks))
1087
        return diffs
1088
3830.3.12 by Martin Pool
Review cleanups: unify has_key impls, add missing_keys(), clean up exception blocks
1089
    missing_keys = index._missing_keys_from_parent_map
1090
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1091
    def _extract_blocks(self, version_id, source, target):
1092
        return None
1093
1094
1095
class ThunkedVersionedFiles(VersionedFiles):
1096
    """Storage for many versioned files thunked onto a 'VersionedFile' class.
1097
1098
    This object allows a single keyspace for accessing the history graph and
1099
    contents of named bytestrings.
1100
1101
    Currently no implementation allows the graph of different key prefixes to
1102
    intersect, but the API does allow such implementations in the future.
1103
    """
1104
1105
    def __init__(self, transport, file_factory, mapper, is_locked):
1106
        """Create a ThunkedVersionedFiles."""
1107
        self._transport = transport
1108
        self._file_factory = file_factory
1109
        self._mapper = mapper
1110
        self._is_locked = is_locked
1111
1112
    def add_lines(self, key, parents, lines, parent_texts=None,
1113
        left_matching_blocks=None, nostore_sha=None, random_id=False,
1114
        check_content=True):
1115
        """See VersionedFiles.add_lines()."""
1116
        path = self._mapper.map(key)
1117
        version_id = key[-1]
1118
        parents = [parent[-1] for parent in parents]
1119
        vf = self._get_vf(path)
1120
        try:
1121
            try:
1122
                return vf.add_lines_with_ghosts(version_id, parents, lines,
1123
                    parent_texts=parent_texts,
1124
                    left_matching_blocks=left_matching_blocks,
1125
                    nostore_sha=nostore_sha, random_id=random_id,
1126
                    check_content=check_content)
1127
            except NotImplementedError:
1128
                return vf.add_lines(version_id, parents, lines,
1129
                    parent_texts=parent_texts,
1130
                    left_matching_blocks=left_matching_blocks,
1131
                    nostore_sha=nostore_sha, random_id=random_id,
1132
                    check_content=check_content)
1133
        except errors.NoSuchFile:
1134
            # parent directory may be missing, try again.
1135
            self._transport.mkdir(osutils.dirname(path))
1136
            try:
1137
                return vf.add_lines_with_ghosts(version_id, parents, lines,
1138
                    parent_texts=parent_texts,
1139
                    left_matching_blocks=left_matching_blocks,
1140
                    nostore_sha=nostore_sha, random_id=random_id,
1141
                    check_content=check_content)
1142
            except NotImplementedError:
1143
                return vf.add_lines(version_id, parents, lines,
1144
                    parent_texts=parent_texts,
1145
                    left_matching_blocks=left_matching_blocks,
1146
                    nostore_sha=nostore_sha, random_id=random_id,
1147
                    check_content=check_content)
1148
1149
    def annotate(self, key):
1150
        """Return a list of (version-key, line) tuples for the text of key.
1151
1152
        :raise RevisionNotPresent: If the key is not present.
1153
        """
1154
        prefix = key[:-1]
1155
        path = self._mapper.map(prefix)
1156
        vf = self._get_vf(path)
1157
        origins = vf.annotate(key[-1])
1158
        result = []
1159
        for origin, line in origins:
1160
            result.append((prefix + (origin,), line))
1161
        return result
1162
4454.3.65 by John Arbash Meinel
Tests that VF implementations support .get_annotator()
1163
    def get_annotator(self):
1164
        return annotate.Annotator(self)
1165
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1166
    def check(self, progress_bar=None):
1167
        """See VersionedFiles.check()."""
1168
        for prefix, vf in self._iter_all_components():
1169
            vf.check()
1170
1171
    def get_parent_map(self, keys):
1172
        """Get a map of the parents of keys.
1173
1174
        :param keys: The keys to look up parents for.
1175
        :return: A mapping from keys to parents. Absent keys are absent from
1176
            the mapping.
1177
        """
1178
        prefixes = self._partition_keys(keys)
1179
        result = {}
1180
        for prefix, suffixes in prefixes.items():
1181
            path = self._mapper.map(prefix)
1182
            vf = self._get_vf(path)
1183
            parent_map = vf.get_parent_map(suffixes)
1184
            for key, parents in parent_map.items():
1185
                result[prefix + (key,)] = tuple(
1186
                    prefix + (parent,) for parent in parents)
1187
        return result
1188
1189
    def _get_vf(self, path):
1190
        if not self._is_locked():
1191
            raise errors.ObjectNotLocked(self)
1192
        return self._file_factory(path, self._transport, create=True,
1193
            get_scope=lambda:None)
1194
1195
    def _partition_keys(self, keys):
1196
        """Turn keys into a dict of prefix:suffix_list."""
1197
        result = {}
1198
        for key in keys:
1199
            prefix_keys = result.setdefault(key[:-1], [])
1200
            prefix_keys.append(key[-1])
1201
        return result
1202
1203
    def _get_all_prefixes(self):
1204
        # Identify all key prefixes.
1205
        # XXX: A bit hacky, needs polish.
1206
        if type(self._mapper) == ConstantMapper:
1207
            paths = [self._mapper.map(())]
1208
            prefixes = [()]
1209
        else:
1210
            relpaths = set()
1211
            for quoted_relpath in self._transport.iter_files_recursive():
1212
                path, ext = os.path.splitext(quoted_relpath)
1213
                relpaths.add(path)
1214
            paths = list(relpaths)
1215
            prefixes = [self._mapper.unmap(path) for path in paths]
1216
        return zip(paths, prefixes)
1217
1218
    def get_record_stream(self, keys, ordering, include_delta_closure):
1219
        """See VersionedFiles.get_record_stream()."""
1220
        # Ordering will be taken care of by each partitioned store; group keys
1221
        # by partition.
1222
        keys = sorted(keys)
1223
        for prefix, suffixes, vf in self._iter_keys_vf(keys):
1224
            suffixes = [(suffix,) for suffix in suffixes]
1225
            for record in vf.get_record_stream(suffixes, ordering,
1226
                include_delta_closure):
1227
                if record.parents is not None:
1228
                    record.parents = tuple(
1229
                        prefix + parent for parent in record.parents)
1230
                record.key = prefix + record.key
1231
                yield record
1232
1233
    def _iter_keys_vf(self, keys):
1234
        prefixes = self._partition_keys(keys)
1235
        sha1s = {}
1236
        for prefix, suffixes in prefixes.items():
1237
            path = self._mapper.map(prefix)
1238
            vf = self._get_vf(path)
1239
            yield prefix, suffixes, vf
1240
1241
    def get_sha1s(self, keys):
1242
        """See VersionedFiles.get_sha1s()."""
1243
        sha1s = {}
1244
        for prefix,suffixes, vf in self._iter_keys_vf(keys):
1245
            vf_sha1s = vf.get_sha1s(suffixes)
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1246
            for suffix, sha1 in vf_sha1s.iteritems():
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1247
                sha1s[prefix + (suffix,)] = sha1
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1248
        return sha1s
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1249
1250
    def insert_record_stream(self, stream):
1251
        """Insert a record stream into this container.
1252
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1253
        :param stream: A stream of records to insert.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1254
        :return: None
1255
        :seealso VersionedFile.get_record_stream:
1256
        """
1257
        for record in stream:
1258
            prefix = record.key[:-1]
1259
            key = record.key[-1:]
1260
            if record.parents is not None:
1261
                parents = [parent[-1:] for parent in record.parents]
1262
            else:
1263
                parents = None
1264
            thunk_record = AdapterFactory(key, parents, record)
1265
            path = self._mapper.map(prefix)
1266
            # Note that this parses the file many times; we can do better but
1267
            # as this only impacts weaves in terms of performance, it is
1268
            # tolerable.
1269
            vf = self._get_vf(path)
1270
            vf.insert_record_stream([thunk_record])
1271
1272
    def iter_lines_added_or_present_in_keys(self, keys, pb=None):
1273
        """Iterate over the lines in the versioned files from keys.
1274
1275
        This may return lines from other keys. Each item the returned
1276
        iterator yields is a tuple of a line and a text version that that line
1277
        is present in (not introduced in).
1278
1279
        Ordering of results is in whatever order is most suitable for the
1280
        underlying storage format.
1281
1282
        If a progress bar is supplied, it may be used to indicate progress.
1283
        The caller is responsible for cleaning up progress bars (because this
1284
        is an iterator).
1285
1286
        NOTES:
1287
         * Lines are normalised by the underlying store: they will all have \n
1288
           terminators.
1289
         * Lines are returned in arbitrary order.
1290
1291
        :return: An iterator over (line, key).
1292
        """
1293
        for prefix, suffixes, vf in self._iter_keys_vf(keys):
1294
            for line, version in vf.iter_lines_added_or_present_in_versions(suffixes):
1295
                yield line, prefix + (version,)
1296
1297
    def _iter_all_components(self):
1298
        for path, prefix in self._get_all_prefixes():
1299
            yield prefix, self._get_vf(path)
1300
1301
    def keys(self):
1302
        """See VersionedFiles.keys()."""
1303
        result = set()
1304
        for prefix, vf in self._iter_all_components():
1305
            for suffix in vf.versions():
1306
                result.add(prefix + (suffix,))
1307
        return result
1308
1309
1310
class _PlanMergeVersionedFile(VersionedFiles):
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1311
    """A VersionedFile for uncommitted and committed texts.
1312
1313
    It is intended to allow merges to be planned with working tree texts.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1314
    It implements only the small part of the VersionedFiles interface used by
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1315
    PlanMerge.  It falls back to multiple versionedfiles for data not stored in
1316
    _PlanMergeVersionedFile itself.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1317
1318
    :ivar: fallback_versionedfiles a list of VersionedFiles objects that can be
1319
        queried for missing texts.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1320
    """
1321
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1322
    def __init__(self, file_id):
1323
        """Create a _PlanMergeVersionedFile.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1324
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1325
        :param file_id: Used with _PlanMerge code which is not yet fully
1326
            tuple-keyspace aware.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1327
        """
1328
        self._file_id = file_id
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1329
        # fallback locations
1330
        self.fallback_versionedfiles = []
1331
        # Parents for locally held keys.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1332
        self._parents = {}
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1333
        # line data for locally held keys.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1334
        self._lines = {}
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1335
        # key lookup providers
1336
        self._providers = [DictParentsProvider(self._parents)]
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1337
3062.2.3 by Aaron Bentley
Sync up with bzr.dev API changes
1338
    def plan_merge(self, ver_a, ver_b, base=None):
3062.1.13 by Aaron Bentley
Make _PlanMerge an implementation detail of _PlanMergeVersionedFile
1339
        """See VersionedFile.plan_merge"""
3144.3.7 by Aaron Bentley
Update from review
1340
        from bzrlib.merge import _PlanMerge
3062.2.3 by Aaron Bentley
Sync up with bzr.dev API changes
1341
        if base is None:
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1342
            return _PlanMerge(ver_a, ver_b, self, (self._file_id,)).plan_merge()
1343
        old_plan = list(_PlanMerge(ver_a, base, self, (self._file_id,)).plan_merge())
1344
        new_plan = list(_PlanMerge(ver_a, ver_b, self, (self._file_id,)).plan_merge())
3062.2.3 by Aaron Bentley
Sync up with bzr.dev API changes
1345
        return _PlanMerge._subtract_plans(old_plan, new_plan)
1346
3144.3.1 by Aaron Bentley
Implement LCA merge, with problematic conflict markers
1347
    def plan_lca_merge(self, ver_a, ver_b, base=None):
3144.3.7 by Aaron Bentley
Update from review
1348
        from bzrlib.merge import _PlanLCAMerge
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1349
        graph = Graph(self)
1350
        new_plan = _PlanLCAMerge(ver_a, ver_b, self, (self._file_id,), graph).plan_merge()
3144.3.1 by Aaron Bentley
Implement LCA merge, with problematic conflict markers
1351
        if base is None:
1352
            return new_plan
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1353
        old_plan = _PlanLCAMerge(ver_a, base, self, (self._file_id,), graph).plan_merge()
3144.3.1 by Aaron Bentley
Implement LCA merge, with problematic conflict markers
1354
        return _PlanLCAMerge._subtract_plans(list(old_plan), list(new_plan))
3062.1.13 by Aaron Bentley
Make _PlanMerge an implementation detail of _PlanMergeVersionedFile
1355
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1356
    def add_lines(self, key, parents, lines):
1357
        """See VersionedFiles.add_lines
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1358
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1359
        Lines are added locally, not to fallback versionedfiles.  Also, ghosts
1360
        are permitted.  Only reserved ids are permitted.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1361
        """
3350.6.8 by Martin Pool
Change stray pdb calls to exceptions
1362
        if type(key) is not tuple:
1363
            raise TypeError(key)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1364
        if not revision.is_reserved_id(key[-1]):
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1365
            raise ValueError('Only reserved ids may be used')
1366
        if parents is None:
1367
            raise ValueError('Parents may not be None')
1368
        if lines is None:
1369
            raise ValueError('Lines may not be None')
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1370
        self._parents[key] = tuple(parents)
1371
        self._lines[key] = lines
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1372
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1373
    def get_record_stream(self, keys, ordering, include_delta_closure):
1374
        pending = set(keys)
1375
        for key in keys:
1376
            if key in self._lines:
1377
                lines = self._lines[key]
1378
                parents = self._parents[key]
1379
                pending.remove(key)
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1380
                yield ChunkedContentFactory(key, parents, None, lines)
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1381
        for versionedfile in self.fallback_versionedfiles:
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1382
            for record in versionedfile.get_record_stream(
1383
                pending, 'unordered', True):
1384
                if record.storage_kind == 'absent':
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1385
                    continue
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1386
                else:
1387
                    pending.remove(record.key)
1388
                    yield record
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
1389
            if not pending:
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1390
                return
1391
        # report absent entries
1392
        for key in pending:
1393
            yield AbsentContentFactory(key)
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1394
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1395
    def get_parent_map(self, keys):
1396
        """See VersionedFiles.get_parent_map"""
1397
        # We create a new provider because a fallback may have been added.
1398
        # If we make fallbacks private we can update a stack list and avoid
1399
        # object creation thrashing.
3350.6.6 by Robert Collins
Fix test_plan_file_merge
1400
        keys = set(keys)
1401
        result = {}
1402
        if revision.NULL_REVISION in keys:
1403
            keys.remove(revision.NULL_REVISION)
1404
            result[revision.NULL_REVISION] = ()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1405
        self._providers = self._providers[:1] + self.fallback_versionedfiles
3350.6.6 by Robert Collins
Fix test_plan_file_merge
1406
        result.update(
4379.3.3 by Gary van der Merwe
Rename and add doc string for StackedParentsProvider.
1407
            StackedParentsProvider(self._providers).get_parent_map(keys))
3350.6.5 by Robert Collins
Update to bzr.dev.
1408
        for key, parents in result.iteritems():
1409
            if parents == ():
1410
                result[key] = (revision.NULL_REVISION,)
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
1411
        return result
3144.3.1 by Aaron Bentley
Implement LCA merge, with problematic conflict markers
1412
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1413
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
1414
class PlanWeaveMerge(TextMerge):
1551.6.13 by Aaron Bentley
Cleanup
1415
    """Weave merge that takes a plan as its input.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1416
1551.6.14 by Aaron Bentley
Tweaks from merge review
1417
    This exists so that VersionedFile.plan_merge is implementable.
1418
    Most callers will want to use WeaveMerge instead.
1551.6.13 by Aaron Bentley
Cleanup
1419
    """
1420
1551.6.14 by Aaron Bentley
Tweaks from merge review
1421
    def __init__(self, plan, a_marker=TextMerge.A_MARKER,
1422
                 b_marker=TextMerge.B_MARKER):
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
1423
        TextMerge.__init__(self, a_marker, b_marker)
1424
        self.plan = plan
1425
1551.6.7 by Aaron Bentley
Implemented two-way merge, refactored weave merge
1426
    def _merge_struct(self):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1427
        lines_a = []
1428
        lines_b = []
1429
        ch_a = ch_b = False
1664.2.8 by Aaron Bentley
Fix WeaveMerge when plan doesn't end with unchanged lines
1430
1431
        def outstanding_struct():
1432
            if not lines_a and not lines_b:
1433
                return
1434
            elif ch_a and not ch_b:
1435
                # one-sided change:
1436
                yield(lines_a,)
1437
            elif ch_b and not ch_a:
1438
                yield (lines_b,)
1439
            elif lines_a == lines_b:
1440
                yield(lines_a,)
1441
            else:
1442
                yield (lines_a, lines_b)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1443
1616.1.18 by Martin Pool
(weave-merge) don't treat killed-both lines as points of agreement;
1444
        # We previously considered either 'unchanged' or 'killed-both' lines
1445
        # to be possible places to resynchronize.  However, assuming agreement
1759.2.1 by Jelmer Vernooij
Fix some types (found using aspell).
1446
        # on killed-both lines may be too aggressive. -- mbp 20060324
1551.6.7 by Aaron Bentley
Implemented two-way merge, refactored weave merge
1447
        for state, line in self.plan:
1616.1.18 by Martin Pool
(weave-merge) don't treat killed-both lines as points of agreement;
1448
            if state == 'unchanged':
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1449
                # resync and flush queued conflicts changes if any
1664.2.8 by Aaron Bentley
Fix WeaveMerge when plan doesn't end with unchanged lines
1450
                for struct in outstanding_struct():
1451
                    yield struct
1551.6.11 by Aaron Bentley
Switched TextMerge_lines to work on a list
1452
                lines_a = []
1453
                lines_b = []
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1454
                ch_a = ch_b = False
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1455
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1456
            if state == 'unchanged':
1457
                if line:
1551.6.5 by Aaron Bentley
Got weave merge producing structural output
1458
                    yield ([line],)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1459
            elif state == 'killed-a':
1460
                ch_a = True
1461
                lines_b.append(line)
1462
            elif state == 'killed-b':
1463
                ch_b = True
1464
                lines_a.append(line)
1465
            elif state == 'new-a':
1466
                ch_a = True
1467
                lines_a.append(line)
1468
            elif state == 'new-b':
1469
                ch_b = True
1470
                lines_b.append(line)
3144.3.2 by Aaron Bentley
Get conflict handling working
1471
            elif state == 'conflicted-a':
1472
                ch_b = ch_a = True
1473
                lines_a.append(line)
1474
            elif state == 'conflicted-b':
1475
                ch_b = ch_a = True
1476
                lines_b.append(line)
4312.1.1 by John Arbash Meinel
Add a per-implementation test that deleting lines conflicts with modifying lines.
1477
            elif state == 'killed-both':
1478
                # This counts as a change, even though there is no associated
1479
                # line
1480
                ch_b = ch_a = True
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1481
            else:
3376.2.4 by Martin Pool
Remove every assert statement from bzrlib!
1482
                if state not in ('irrelevant', 'ghost-a', 'ghost-b',
4312.1.1 by John Arbash Meinel
Add a per-implementation test that deleting lines conflicts with modifying lines.
1483
                        'killed-base'):
3376.2.4 by Martin Pool
Remove every assert statement from bzrlib!
1484
                    raise AssertionError(state)
1664.2.8 by Aaron Bentley
Fix WeaveMerge when plan doesn't end with unchanged lines
1485
        for struct in outstanding_struct():
1486
            yield struct
1563.2.12 by Robert Collins
Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.
1487
1664.2.14 by Aaron Bentley
spacing fix
1488
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
1489
class WeaveMerge(PlanWeaveMerge):
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
1490
    """Weave merge that takes a VersionedFile and two versions as its input."""
1551.6.13 by Aaron Bentley
Cleanup
1491
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1492
    def __init__(self, versionedfile, ver_a, ver_b,
1551.6.14 by Aaron Bentley
Tweaks from merge review
1493
        a_marker=PlanWeaveMerge.A_MARKER, b_marker=PlanWeaveMerge.B_MARKER):
1551.6.15 by Aaron Bentley
Moved plan_merge into Weave
1494
        plan = versionedfile.plan_merge(ver_a, ver_b)
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
1495
        PlanWeaveMerge.__init__(self, plan, a_marker, b_marker)
1496
1497
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1498
class VirtualVersionedFiles(VersionedFiles):
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1499
    """Dummy implementation for VersionedFiles that uses other functions for
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1500
    obtaining fulltexts and parent maps.
1501
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1502
    This is always on the bottom of the stack and uses string keys
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1503
    (rather than tuples) internally.
1504
    """
1505
1506
    def __init__(self, get_parent_map, get_lines):
1507
        """Create a VirtualVersionedFiles.
1508
1509
        :param get_parent_map: Same signature as Repository.get_parent_map.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1510
        :param get_lines: Should return lines for specified key or None if
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1511
                          not available.
1512
        """
1513
        super(VirtualVersionedFiles, self).__init__()
1514
        self._get_parent_map = get_parent_map
1515
        self._get_lines = get_lines
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1516
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1517
    def check(self, progressbar=None):
1518
        """See VersionedFiles.check.
1519
1520
        :note: Always returns True for VirtualVersionedFiles.
1521
        """
1522
        return True
1523
1524
    def add_mpdiffs(self, records):
1525
        """See VersionedFiles.mpdiffs.
1526
1527
        :note: Not implemented for VirtualVersionedFiles.
1528
        """
1529
        raise NotImplementedError(self.add_mpdiffs)
1530
1531
    def get_parent_map(self, keys):
1532
        """See VersionedFiles.get_parent_map."""
3518.1.2 by Jelmer Vernooij
Fix some stylistic issues pointed out by Ian.
1533
        return dict([((k,), tuple([(p,) for p in v]))
1534
            for k,v in self._get_parent_map([k for (k,) in keys]).iteritems()])
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1535
1536
    def get_sha1s(self, keys):
1537
        """See VersionedFiles.get_sha1s."""
1538
        ret = {}
1539
        for (k,) in keys:
1540
            lines = self._get_lines(k)
1541
            if lines is not None:
3518.1.2 by Jelmer Vernooij
Fix some stylistic issues pointed out by Ian.
1542
                if not isinstance(lines, list):
1543
                    raise AssertionError
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1544
                ret[(k,)] = osutils.sha_strings(lines)
1545
        return ret
1546
1547
    def get_record_stream(self, keys, ordering, include_delta_closure):
1548
        """See VersionedFiles.get_record_stream."""
1549
        for (k,) in list(keys):
1550
            lines = self._get_lines(k)
1551
            if lines is not None:
3518.1.2 by Jelmer Vernooij
Fix some stylistic issues pointed out by Ian.
1552
                if not isinstance(lines, list):
1553
                    raise AssertionError
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1554
                yield ChunkedContentFactory((k,), None,
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1555
                        sha1=osutils.sha_strings(lines),
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1556
                        chunks=lines)
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1557
            else:
1558
                yield AbsentContentFactory((k,))
1559
3949.4.1 by Jelmer Vernooij
Implement VirtualVersionedFiles.iter_lines_added_or_present_in_keys.
1560
    def iter_lines_added_or_present_in_keys(self, keys, pb=None):
1561
        """See VersionedFile.iter_lines_added_or_present_in_versions()."""
1562
        for i, (key,) in enumerate(keys):
1563
            if pb is not None:
4110.2.10 by Martin Pool
Tweak iter_lines progress messages
1564
                pb.update("Finding changed lines", i, len(keys))
3949.4.1 by Jelmer Vernooij
Implement VirtualVersionedFiles.iter_lines_added_or_present_in_keys.
1565
            for l in self._get_lines(key):
1566
                yield (l, key)
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
1567
1568
1569
def network_bytes_to_kind_and_offset(network_bytes):
1570
    """Strip of a record kind from the front of network_bytes.
1571
1572
    :param network_bytes: The bytes of a record.
1573
    :return: A tuple (storage_kind, offset_of_remaining_bytes)
1574
    """
1575
    line_end = network_bytes.find('\n')
1576
    storage_kind = network_bytes[:line_end]
1577
    return storage_kind, line_end + 1
1578
1579
1580
class NetworkRecordStream(object):
1581
    """A record_stream which reconstitures a serialised stream."""
1582
1583
    def __init__(self, bytes_iterator):
1584
        """Create a NetworkRecordStream.
1585
1586
        :param bytes_iterator: An iterator of bytes. Each item in this
1587
            iterator should have been obtained from a record_streams'
1588
            record.get_bytes_as(record.storage_kind) call.
1589
        """
1590
        self._bytes_iterator = bytes_iterator
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1591
        self._kind_factory = {
1592
            'fulltext': fulltext_network_to_record,
1593
            'groupcompress-block': groupcompress.network_block_to_records,
1594
            'inventory-delta': inventory_delta_network_to_record,
1595
            'knit-ft-gz': knit.knit_network_to_record,
1596
            'knit-delta-gz': knit.knit_network_to_record,
1597
            'knit-annotated-ft-gz': knit.knit_network_to_record,
1598
            'knit-annotated-delta-gz': knit.knit_network_to_record,
1599
            'knit-delta-closure': knit.knit_delta_closure_to_records,
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
1600
            }
1601
1602
    def read(self):
1603
        """Read the stream.
1604
1605
        :return: An iterator as per VersionedFiles.get_record_stream().
1606
        """
1607
        for bytes in self._bytes_iterator:
1608
            storage_kind, line_end = network_bytes_to_kind_and_offset(bytes)
4005.3.6 by Robert Collins
Support delta_closure=True with NetworkRecordStream to transmit deltas over the wire when full text extraction is required on the far end.
1609
            for record in self._kind_factory[storage_kind](
1610
                storage_kind, bytes, line_end):
1611
                yield record
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1612
1613
1614
def fulltext_network_to_record(kind, bytes, line_end):
1615
    """Convert a network fulltext record to record."""
1616
    meta_len, = struct.unpack('!L', bytes[line_end:line_end+4])
4060.1.4 by Robert Collins
Streaming fetch from remote servers.
1617
    record_meta = bytes[line_end+4:line_end+4+meta_len]
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1618
    key, parents = bencode.bdecode_as_tuple(record_meta)
1619
    if parents == 'nil':
1620
        parents = None
4060.1.4 by Robert Collins
Streaming fetch from remote servers.
1621
    fulltext = bytes[line_end+4+meta_len:]
1622
    return [FulltextContentFactory(key, parents, None, fulltext)]
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1623
1624
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1625
def inventory_delta_network_to_record(kind, bytes, line_end):
4476.3.24 by Andrew Bennetts
Finish updating verb name from _1.17 to _1.18. Also, fix a typo in a comment.
1626
    """Convert a network inventory-delta record to record."""
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1627
    meta_len, = struct.unpack('!L', bytes[line_end:line_end+4])
1628
    record_meta = bytes[line_end+4:line_end+4+meta_len]
1629
    key, parents = bencode.bdecode_as_tuple(record_meta)
1630
    if parents == 'nil':
1631
        parents = None
1632
    inventory_delta_bytes = bytes[line_end+4+meta_len:]
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
1633
    deserialiser = inventory_delta.InventoryDeltaSerializer()
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1634
    parse_result = deserialiser.parse_text_bytes(inventory_delta_bytes)
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
1635
    basis_id, new_id, rich_root, tree_refs, delta = parse_result
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1636
    return [InventoryDeltaContentFactory(
1637
        key, parents, None, delta, basis_id, (rich_root, tree_refs))]
1638
1639
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1640
def _length_prefix(bytes):
1641
    return struct.pack('!L', len(bytes))
1642
1643
4060.1.4 by Robert Collins
Streaming fetch from remote servers.
1644
def record_to_fulltext_bytes(record):
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1645
    if record.parents is None:
1646
        parents = 'nil'
1647
    else:
1648
        parents = record.parents
1649
    record_meta = bencode.bencode((record.key, parents))
1650
    record_content = record.get_bytes_as('fulltext')
1651
    return "fulltext\n%s%s%s" % (
1652
        _length_prefix(record_meta), record_meta, record_content)
4111.1.1 by Robert Collins
Add a groupcompress sort order.
1653
1654
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1655
def record_to_inventory_delta_bytes(record):
1656
    record_content = record.get_bytes_as('inventory-delta-bytes')
4476.3.15 by Andrew Bennetts
Partially working fallback for pre-1.17 servers.
1657
    if record.parents is None:
1658
        parents = 'nil'
1659
    else:
1660
        parents = record.parents
4476.3.4 by Andrew Bennetts
Network serialisation, and most tests passing with InterDifferingSerializer commented out.
1661
    record_meta = bencode.bencode((record.key, parents))
1662
    return "inventory-delta\n%s%s%s" % (
1663
        _length_prefix(record_meta), record_meta, record_content)
1664
1665
4111.1.1 by Robert Collins
Add a groupcompress sort order.
1666
def sort_groupcompress(parent_map):
1667
    """Sort and group the keys in parent_map into groupcompress order.
1668
1669
    groupcompress is defined (currently) as reverse-topological order, grouped
1670
    by the key prefix.
1671
1672
    :return: A sorted-list of keys
1673
    """
1674
    # gc-optimal ordering is approximately reverse topological,
1675
    # properly grouped by file-id.
1676
    per_prefix_map = {}
1677
    for item in parent_map.iteritems():
1678
        key = item[0]
1679
        if isinstance(key, str) or len(key) == 1:
1680
            prefix = ''
1681
        else:
1682
            prefix = key[0]
1683
        try:
1684
            per_prefix_map[prefix].append(item)
1685
        except KeyError:
1686
            per_prefix_map[prefix] = [item]
1687
1688
    present_keys = []
1689
    for prefix in sorted(per_prefix_map):
1690
        present_keys.extend(reversed(tsort.topo_sort(per_prefix_map[prefix])))
1691
    return present_keys