/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
3830.3.20 by John Arbash Meinel
Minor PEP8 and copyright updates.
1
# Copyright (C) 2005, 2006, 2007, 2008 Canonical Ltd
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
2
#
3
# Authors:
4
#   Johan Rydberg <jrydberg@gnu.org>
5
#
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
10
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
# GNU General Public License for more details.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
15
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
4183.7.1 by Sabin Iacob
update FSF mailing address
18
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
19
20
"""Versioned text file storage api."""
21
3350.8.2 by Robert Collins
stacked get_parent_map.
22
from copy import copy
3350.6.1 by Robert Collins
* New ``versionedfile.KeyMapper`` interface to abstract out the access to
23
from cStringIO import StringIO
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
24
import os
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
25
import struct
3350.6.1 by Robert Collins
* New ``versionedfile.KeyMapper`` interface to abstract out the access to
26
from zlib import adler32
27
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
28
from bzrlib.lazy_import import lazy_import
29
lazy_import(globals(), """
3224.5.20 by Andrew Bennetts
Remove or lazyify a couple more imports.
30
import urllib
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
31
32
from bzrlib import (
33
    errors,
3735.32.18 by John Arbash Meinel
We now support generating a network stream.
34
    groupcompress,
3830.3.12 by Martin Pool
Review cleanups: unify has_key impls, add missing_keys(), clean up exception blocks
35
    index,
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
36
    inventory_delta,
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
37
    knit,
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
38
    osutils,
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
39
    multiparent,
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
40
    tsort,
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
41
    revision,
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
42
    ui,
43
    )
4379.3.3 by Gary van der Merwe
Rename and add doc string for StackedParentsProvider.
44
from bzrlib.graph import DictParentsProvider, Graph, StackedParentsProvider
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
45
from bzrlib.transport.memory import MemoryTransport
46
""")
1563.2.12 by Robert Collins
Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.
47
from bzrlib.inter import InterObject
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
48
from bzrlib.registry import Registry
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
49
from bzrlib.symbol_versioning import *
1551.6.7 by Aaron Bentley
Implemented two-way merge, refactored weave merge
50
from bzrlib.textmerge import TextMerge
2694.5.4 by Jelmer Vernooij
Move bzrlib.util.bencode to bzrlib._bencode_py.
51
from bzrlib import bencode
1563.2.11 by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis.
52
53
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
54
adapter_registry = Registry()
55
adapter_registry.register_lazy(('knit-delta-gz', 'fulltext'), 'bzrlib.knit',
56
    'DeltaPlainToFullText')
57
adapter_registry.register_lazy(('knit-ft-gz', 'fulltext'), 'bzrlib.knit',
58
    'FTPlainToFullText')
59
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'knit-delta-gz'),
60
    'bzrlib.knit', 'DeltaAnnotatedToUnannotated')
61
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'fulltext'),
62
    'bzrlib.knit', 'DeltaAnnotatedToFullText')
63
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'knit-ft-gz'),
64
    'bzrlib.knit', 'FTAnnotatedToUnannotated')
65
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'fulltext'),
66
    'bzrlib.knit', 'FTAnnotatedToFullText')
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
67
# adapter_registry.register_lazy(('knit-annotated-ft-gz', 'chunked'),
68
#     'bzrlib.knit', 'FTAnnotatedToChunked')
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
69
70
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
71
class ContentFactory(object):
72
    """Abstract interface for insertion and retrieval from a VersionedFile.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
73
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
74
    :ivar sha1: None, or the sha1 of the content fulltext.
75
    :ivar storage_kind: The native storage kind of this factory. One of
76
        'mpdiff', 'knit-annotated-ft', 'knit-annotated-delta', 'knit-ft',
77
        'knit-delta', 'fulltext', 'knit-annotated-ft-gz',
78
        'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'.
79
    :ivar key: The key of this content. Each key is a tuple with a single
80
        string in it.
81
    :ivar parents: A tuple of parent keys for self.key. If the object has
82
        no parent information, None (as opposed to () for an empty list of
83
        parents).
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
84
    """
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
85
86
    def __init__(self):
87
        """Create a ContentFactory."""
88
        self.sha1 = None
89
        self.storage_kind = None
90
        self.key = None
91
        self.parents = None
92
93
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
94
class ChunkedContentFactory(ContentFactory):
95
    """Static data content factory.
96
97
    This takes a 'chunked' list of strings. The only requirement on 'chunked' is
98
    that ''.join(lines) becomes a valid fulltext. A tuple of a single string
99
    satisfies this, as does a list of lines.
100
101
    :ivar sha1: None, or the sha1 of the content fulltext.
102
    :ivar storage_kind: The native storage kind of this factory. Always
3890.2.2 by John Arbash Meinel
Change the signature to report the storage kind as 'chunked'
103
        'chunked'
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
104
    :ivar key: The key of this content. Each key is a tuple with a single
105
        string in it.
106
    :ivar parents: A tuple of parent keys for self.key. If the object has
107
        no parent information, None (as opposed to () for an empty list of
108
        parents).
109
     """
110
111
    def __init__(self, key, parents, sha1, chunks):
112
        """Create a ContentFactory."""
113
        self.sha1 = sha1
3890.2.2 by John Arbash Meinel
Change the signature to report the storage kind as 'chunked'
114
        self.storage_kind = 'chunked'
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
115
        self.key = key
116
        self.parents = parents
117
        self._chunks = chunks
118
119
    def get_bytes_as(self, storage_kind):
120
        if storage_kind == 'chunked':
121
            return self._chunks
122
        elif storage_kind == 'fulltext':
123
            return ''.join(self._chunks)
124
        raise errors.UnavailableRepresentation(self.key, storage_kind,
125
            self.storage_kind)
126
127
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
128
class FulltextContentFactory(ContentFactory):
129
    """Static data content factory.
130
131
    This takes a fulltext when created and just returns that during
132
    get_bytes_as('fulltext').
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
133
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
134
    :ivar sha1: None, or the sha1 of the content fulltext.
135
    :ivar storage_kind: The native storage kind of this factory. Always
136
        'fulltext'.
137
    :ivar key: The key of this content. Each key is a tuple with a single
138
        string in it.
139
    :ivar parents: A tuple of parent keys for self.key. If the object has
140
        no parent information, None (as opposed to () for an empty list of
141
        parents).
142
     """
143
144
    def __init__(self, key, parents, sha1, text):
145
        """Create a ContentFactory."""
146
        self.sha1 = sha1
147
        self.storage_kind = 'fulltext'
148
        self.key = key
149
        self.parents = parents
150
        self._text = text
151
152
    def get_bytes_as(self, storage_kind):
153
        if storage_kind == self.storage_kind:
154
            return self._text
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
155
        elif storage_kind == 'chunked':
3976.2.1 by Robert Collins
Use a list not a tuple for chunks returned from FullTextContentFactory objects, because otherwise code tries to assign to tuples.
156
            return [self._text]
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
157
        raise errors.UnavailableRepresentation(self.key, storage_kind,
158
            self.storage_kind)
159
160
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
161
class InventoryDeltaContentFactory(ContentFactory):
162
163
    def __init__(self, key, parents, sha1, delta, basis_id, format_flags):
164
        self.sha1 = sha1
165
        self.storage_kind = 'inventory-delta'
166
        self.key = key
167
        self.parents = parents
168
        self._delta = delta
169
        self._basis_id = basis_id
170
        self._format_flags = format_flags
171
172
    def get_bytes_as(self, storage_kind):
173
        if storage_kind == self.storage_kind:
4476.3.2 by Andrew Bennetts
Make it possible for a StreamSink for a rich-root/tree-refs repo format to consume inventories without those features.
174
            return self._basis_id, self.key, self._delta, self._format_flags
4476.3.1 by Andrew Bennetts
Initial hacking to use inventory deltas for cross-format fetch.
175
        elif storage_kind == 'inventory-delta-bytes':
176
            serializer = inventory_delta.InventoryDeltaSerializer(
177
                *self._format_flags)
178
            return serializer.delta_to_lines(
179
                self._basis_id, self.key, self.delta)
180
        raise errors.UnavailableRepresentation(self.key, storage_kind,
181
            self.storage_kind)
182
183
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
184
class AbsentContentFactory(ContentFactory):
3350.3.12 by Robert Collins
Generate streams with absent records.
185
    """A placeholder content factory for unavailable texts.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
186
3350.3.12 by Robert Collins
Generate streams with absent records.
187
    :ivar sha1: None.
188
    :ivar storage_kind: 'absent'.
189
    :ivar key: The key of this content. Each key is a tuple with a single
190
        string in it.
191
    :ivar parents: None.
192
    """
193
194
    def __init__(self, key):
195
        """Create a ContentFactory."""
196
        self.sha1 = None
197
        self.storage_kind = 'absent'
198
        self.key = key
199
        self.parents = None
200
201
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
202
class AdapterFactory(ContentFactory):
203
    """A content factory to adapt between key prefix's."""
204
205
    def __init__(self, key, parents, adapted):
206
        """Create an adapter factory instance."""
207
        self.key = key
208
        self.parents = parents
209
        self._adapted = adapted
210
211
    def __getattr__(self, attr):
212
        """Return a member from the adapted object."""
213
        if attr in ('key', 'parents'):
214
            return self.__dict__[attr]
215
        else:
216
            return getattr(self._adapted, attr)
217
218
3350.3.14 by Robert Collins
Deprecate VersionedFile.join.
219
def filter_absent(record_stream):
220
    """Adapt a record stream to remove absent records."""
221
    for record in record_stream:
222
        if record.storage_kind != 'absent':
223
            yield record
224
225
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
226
class VersionedFile(object):
227
    """Versioned text file storage.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
228
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
229
    A versioned file manages versions of line-based text files,
230
    keeping track of the originating version for each line.
231
232
    To clients the "lines" of the file are represented as a list of
233
    strings. These strings will typically have terminal newline
234
    characters, but this is not required.  In particular files commonly
235
    do not have a newline at the end of the file.
236
237
    Texts are identified by a version-id string.
238
    """
239
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
240
    @staticmethod
2229.2.3 by Aaron Bentley
change reserved_id to is_reserved_id, add check_not_reserved for DRY
241
    def check_not_reserved_id(version_id):
242
        revision.check_not_reserved_id(version_id)
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
243
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
244
    def copy_to(self, name, transport):
245
        """Copy this versioned file to name on transport."""
246
        raise NotImplementedError(self.copy_to)
1863.1.1 by John Arbash Meinel
Allow Versioned files to do caching if explicitly asked, and implement for Knit
247
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
248
    def get_record_stream(self, versions, ordering, include_delta_closure):
249
        """Get a stream of records for versions.
250
251
        :param versions: The versions to include. Each version is a tuple
252
            (version,).
253
        :param ordering: Either 'unordered' or 'topological'. A topologically
254
            sorted stream has compression parents strictly before their
255
            children.
256
        :param include_delta_closure: If True then the closure across any
3350.3.22 by Robert Collins
Review feedback.
257
            compression parents will be included (in the data content of the
258
            stream, not in the emitted records). This guarantees that
259
            'fulltext' can be used successfully on every record.
3350.3.3 by Robert Collins
Functional get_record_stream interface tests covering full interface.
260
        :return: An iterator of ContentFactory objects, each of which is only
261
            valid until the iterator is advanced.
262
        """
263
        raise NotImplementedError(self.get_record_stream)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
264
265
    def has_version(self, version_id):
266
        """Returns whether version is present."""
267
        raise NotImplementedError(self.has_version)
268
3350.3.8 by Robert Collins
Basic stream insertion, no fast path yet for knit to knit.
269
    def insert_record_stream(self, stream):
270
        """Insert a record stream into this versioned file.
271
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
272
        :param stream: A stream of records to insert.
3350.3.8 by Robert Collins
Basic stream insertion, no fast path yet for knit to knit.
273
        :return: None
274
        :seealso VersionedFile.get_record_stream:
275
        """
276
        raise NotImplementedError
277
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
278
    def add_lines(self, version_id, parents, lines, parent_texts=None,
2805.6.7 by Robert Collins
Review feedback.
279
        left_matching_blocks=None, nostore_sha=None, random_id=False,
280
        check_content=True):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
281
        """Add a single text on top of the versioned file.
282
283
        Must raise RevisionAlreadyPresent if the new version is
284
        already present in file history.
285
286
        Must raise RevisionNotPresent if any of the given parents are
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
287
        not present in file history.
2805.6.3 by Robert Collins
* The ``VersionedFile`` interface no longer protects against misuse when
288
289
        :param lines: A list of lines. Each line must be a bytestring. And all
290
            of them except the last must be terminated with \n and contain no
291
            other \n's. The last line may either contain no \n's or a single
292
            terminated \n. If the lines list does meet this constraint the add
293
            routine may error or may succeed - but you will be unable to read
294
            the data back accurately. (Checking the lines have been split
2805.6.7 by Robert Collins
Review feedback.
295
            correctly is expensive and extremely unlikely to catch bugs so it
296
            is not done at runtime unless check_content is True.)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
297
        :param parent_texts: An optional dictionary containing the opaque
2805.6.3 by Robert Collins
* The ``VersionedFile`` interface no longer protects against misuse when
298
            representations of some or all of the parents of version_id to
299
            allow delta optimisations.  VERY IMPORTANT: the texts must be those
300
            returned by add_lines or data corruption can be caused.
2520.4.148 by Aaron Bentley
Updates from review
301
        :param left_matching_blocks: a hint about which areas are common
302
            between the text and its left-hand-parent.  The format is
303
            the SequenceMatcher.get_matching_blocks format.
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
304
        :param nostore_sha: Raise ExistingContent and do not add the lines to
305
            the versioned file if the digest of the lines matches this.
2805.6.4 by Robert Collins
Don't check for existing versions when adding texts with random revision ids.
306
        :param random_id: If True a random id has been selected rather than
307
            an id determined by some deterministic process such as a converter
308
            from a foreign VCS. When True the backend may choose not to check
309
            for uniqueness of the resulting key within the versioned file, so
310
            this should only be done when the result is expected to be unique
311
            anyway.
2805.6.7 by Robert Collins
Review feedback.
312
        :param check_content: If True, the lines supplied are verified to be
313
            bytestrings that are correctly formed lines.
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
314
        :return: The text sha1, the number of bytes in the text, and an opaque
315
                 representation of the inserted version which can be provided
316
                 back to future add_lines calls in the parent_texts dictionary.
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
317
        """
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
318
        self._check_write_ok()
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
319
        return self._add_lines(version_id, parents, lines, parent_texts,
2805.6.7 by Robert Collins
Review feedback.
320
            left_matching_blocks, nostore_sha, random_id, check_content)
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
321
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
322
    def _add_lines(self, version_id, parents, lines, parent_texts,
2805.6.7 by Robert Collins
Review feedback.
323
        left_matching_blocks, nostore_sha, random_id, check_content):
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
324
        """Helper to do the class specific add_lines."""
1563.2.4 by Robert Collins
First cut at including the knit implementation of versioned_file.
325
        raise NotImplementedError(self.add_lines)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
326
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
327
    def add_lines_with_ghosts(self, version_id, parents, lines,
2805.6.7 by Robert Collins
Review feedback.
328
        parent_texts=None, nostore_sha=None, random_id=False,
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
329
        check_content=True, left_matching_blocks=None):
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
330
        """Add lines to the versioned file, allowing ghosts to be present.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
331
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
332
        This takes the same parameters as add_lines and returns the same.
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
333
        """
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
334
        self._check_write_ok()
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
335
        return self._add_lines_with_ghosts(version_id, parents, lines,
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
336
            parent_texts, nostore_sha, random_id, check_content, left_matching_blocks)
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
337
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
338
    def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts,
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
339
        nostore_sha, random_id, check_content, left_matching_blocks):
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
340
        """Helper to do class specific add_lines_with_ghosts."""
1594.2.8 by Robert Collins
add ghost aware apis to knits.
341
        raise NotImplementedError(self.add_lines_with_ghosts)
342
1563.2.19 by Robert Collins
stub out a check for knits.
343
    def check(self, progress_bar=None):
344
        """Check the versioned file for integrity."""
345
        raise NotImplementedError(self.check)
346
1666.1.6 by Robert Collins
Make knit the default format.
347
    def _check_lines_not_unicode(self, lines):
348
        """Check that lines being added to a versioned file are not unicode."""
349
        for line in lines:
350
            if line.__class__ is not str:
351
                raise errors.BzrBadParameterUnicode("lines")
352
353
    def _check_lines_are_lines(self, lines):
354
        """Check that the lines really are full lines without inline EOL."""
355
        for line in lines:
356
            if '\n' in line[:-1]:
357
                raise errors.BzrBadParameterContainsNewline("lines")
358
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
359
    def get_format_signature(self):
360
        """Get a text description of the data encoding in this file.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
361
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
362
        :since: 0.90
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
363
        """
364
        raise NotImplementedError(self.get_format_signature)
365
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
366
    def make_mpdiffs(self, version_ids):
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
367
        """Create multiparent diffs for specified versions."""
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
368
        knit_versions = set()
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
369
        knit_versions.update(version_ids)
370
        parent_map = self.get_parent_map(version_ids)
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
371
        for version_id in version_ids:
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
372
            try:
373
                knit_versions.update(parent_map[version_id])
374
            except KeyError:
3453.3.1 by Daniel Fischer
Raise the right exception in make_mpdiffs (bug #235687)
375
                raise errors.RevisionNotPresent(version_id, self)
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
376
        # We need to filter out ghosts, because we can't diff against them.
377
        knit_versions = set(self.get_parent_map(knit_versions).keys())
2520.4.90 by Aaron Bentley
Handle \r terminated lines in Weaves properly
378
        lines = dict(zip(knit_versions,
379
            self._get_lf_split_line_list(knit_versions)))
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
380
        diffs = []
381
        for version_id in version_ids:
382
            target = lines[version_id]
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
383
            try:
384
                parents = [lines[p] for p in parent_map[version_id] if p in
385
                    knit_versions]
386
            except KeyError:
3453.3.2 by John Arbash Meinel
Add a test case for the first loop, unable to find a way to trigger the second loop
387
                # I don't know how this could ever trigger.
388
                # parent_map[version_id] was already triggered in the previous
389
                # for loop, and lines[p] has the 'if p in knit_versions' check,
390
                # so we again won't have a KeyError.
3453.3.1 by Daniel Fischer
Raise the right exception in make_mpdiffs (bug #235687)
391
                raise errors.RevisionNotPresent(version_id, self)
2520.4.48 by Aaron Bentley
Support getting blocks from knit deltas with no final EOL
392
            if len(parents) > 0:
393
                left_parent_blocks = self._extract_blocks(version_id,
394
                                                          parents[0], target)
395
            else:
396
                left_parent_blocks = None
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
397
            diffs.append(multiparent.MultiParent.from_lines(target, parents,
398
                         left_parent_blocks))
399
        return diffs
400
2520.4.48 by Aaron Bentley
Support getting blocks from knit deltas with no final EOL
401
    def _extract_blocks(self, version_id, source, target):
2520.4.41 by Aaron Bentley
Accelerate mpdiff generation
402
        return None
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
403
2520.4.61 by Aaron Bentley
Do bulk insertion of records
404
    def add_mpdiffs(self, records):
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
405
        """Add mpdiffs to this VersionedFile.
2520.4.126 by Aaron Bentley
Add more docs
406
407
        Records should be iterables of version, parents, expected_sha1,
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
408
        mpdiff. mpdiff should be a MultiParent instance.
2520.4.126 by Aaron Bentley
Add more docs
409
        """
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
410
        # Does this need to call self._check_write_ok()? (IanC 20070919)
2520.4.61 by Aaron Bentley
Do bulk insertion of records
411
        vf_parents = {}
2520.4.141 by Aaron Bentley
More batch operations adding mpdiffs
412
        mpvf = multiparent.MultiMemoryVersionedFile()
413
        versions = []
414
        for version, parent_ids, expected_sha1, mpdiff in records:
415
            versions.append(version)
416
            mpvf.add_diff(mpdiff, version, parent_ids)
417
        needed_parents = set()
2520.4.142 by Aaron Bentley
Clean up installation of inventory records
418
        for version, parent_ids, expected_sha1, mpdiff in records:
2520.4.141 by Aaron Bentley
More batch operations adding mpdiffs
419
            needed_parents.update(p for p in parent_ids
420
                                  if not mpvf.has_version(p))
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
421
        present_parents = set(self.get_parent_map(needed_parents).keys())
422
        for parent_id, lines in zip(present_parents,
423
                                 self._get_lf_split_line_list(present_parents)):
2520.4.141 by Aaron Bentley
More batch operations adding mpdiffs
424
            mpvf.add_version(lines, parent_id, [])
425
        for (version, parent_ids, expected_sha1, mpdiff), lines in\
426
            zip(records, mpvf.get_line_list(versions)):
427
            if len(parent_ids) == 1:
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
428
                left_matching_blocks = list(mpdiff.get_matching_blocks(0,
2520.4.141 by Aaron Bentley
More batch operations adding mpdiffs
429
                    mpvf.get_diff(parent_ids[0]).num_lines()))
2520.4.140 by Aaron Bentley
Use matching blocks from mpdiff for knit delta creation
430
            else:
431
                left_matching_blocks = None
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
432
            try:
433
                _, _, version_text = self.add_lines_with_ghosts(version,
434
                    parent_ids, lines, vf_parents,
435
                    left_matching_blocks=left_matching_blocks)
436
            except NotImplementedError:
437
                # The vf can't handle ghosts, so add lines normally, which will
438
                # (reasonably) fail if there are ghosts in the data.
439
                _, _, version_text = self.add_lines(version,
440
                    parent_ids, lines, vf_parents,
441
                    left_matching_blocks=left_matching_blocks)
2520.4.61 by Aaron Bentley
Do bulk insertion of records
442
            vf_parents[version] = version_text
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
443
        sha1s = self.get_sha1s(versions)
444
        for version, parent_ids, expected_sha1, mpdiff in records:
445
            if expected_sha1 != sha1s[version]:
2520.4.71 by Aaron Bentley
Update test to accept VersionedFileInvalidChecksum instead of TestamentMismatch
446
                raise errors.VersionedFileInvalidChecksum(version)
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
447
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
448
    def get_text(self, version_id):
449
        """Return version contents as a text string.
450
451
        Raises RevisionNotPresent if version is not present in
452
        file history.
453
        """
454
        return ''.join(self.get_lines(version_id))
455
    get_string = get_text
456
1756.2.1 by Aaron Bentley
Implement get_texts
457
    def get_texts(self, version_ids):
458
        """Return the texts of listed versions as a list of strings.
459
460
        Raises RevisionNotPresent if version is not present in
461
        file history.
462
        """
463
        return [''.join(self.get_lines(v)) for v in version_ids]
464
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
465
    def get_lines(self, version_id):
466
        """Return version contents as a sequence of lines.
467
468
        Raises RevisionNotPresent if version is not present in
469
        file history.
470
        """
471
        raise NotImplementedError(self.get_lines)
472
2520.4.90 by Aaron Bentley
Handle \r terminated lines in Weaves properly
473
    def _get_lf_split_line_list(self, version_ids):
474
        return [StringIO(t).readlines() for t in self.get_texts(version_ids)]
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
475
2530.1.1 by Aaron Bentley
Make topological sorting optional for get_ancestry
476
    def get_ancestry(self, version_ids, topo_sorted=True):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
477
        """Return a list of all ancestors of given version(s). This
478
        will not include the null revision.
479
2490.2.32 by Aaron Bentley
Merge of not-sorting-ancestry branch
480
        This list will not be topologically sorted if topo_sorted=False is
481
        passed.
2530.1.1 by Aaron Bentley
Make topological sorting optional for get_ancestry
482
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
483
        Must raise RevisionNotPresent if any of the given versions are
484
        not present in file history."""
485
        if isinstance(version_ids, basestring):
486
            version_ids = [version_ids]
487
        raise NotImplementedError(self.get_ancestry)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
488
1594.2.8 by Robert Collins
add ghost aware apis to knits.
489
    def get_ancestry_with_ghosts(self, version_ids):
490
        """Return a list of all ancestors of given version(s). This
491
        will not include the null revision.
492
493
        Must raise RevisionNotPresent if any of the given versions are
494
        not present in file history.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
495
1594.2.8 by Robert Collins
add ghost aware apis to knits.
496
        Ghosts that are known about will be included in ancestry list,
497
        but are not explicitly marked.
498
        """
499
        raise NotImplementedError(self.get_ancestry_with_ghosts)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
500
3287.5.1 by Robert Collins
Add VersionedFile.get_parent_map.
501
    def get_parent_map(self, version_ids):
502
        """Get a map of the parents of version_ids.
503
504
        :param version_ids: The version ids to look up parents for.
505
        :return: A mapping from version id to parents.
506
        """
507
        raise NotImplementedError(self.get_parent_map)
508
1594.2.8 by Robert Collins
add ghost aware apis to knits.
509
    def get_parents_with_ghosts(self, version_id):
510
        """Return version names for parents of version_id.
511
512
        Will raise RevisionNotPresent if version_id is not present
513
        in the history.
514
515
        Ghosts that are known about will be included in the parent list,
516
        but are not explicitly marked.
517
        """
3287.5.1 by Robert Collins
Add VersionedFile.get_parent_map.
518
        try:
519
            return list(self.get_parent_map([version_id])[version_id])
520
        except KeyError:
521
            raise errors.RevisionNotPresent(version_id, self)
1594.2.8 by Robert Collins
add ghost aware apis to knits.
522
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
523
    def annotate(self, version_id):
3316.2.13 by Robert Collins
* ``VersionedFile.annotate_iter`` is deprecated. While in principal this
524
        """Return a list of (version-id, line) tuples for version_id.
525
526
        :raise RevisionNotPresent: If the given version is
527
        not present in file history.
528
        """
529
        raise NotImplementedError(self.annotate)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
530
2975.3.2 by Robert Collins
Review feedback - document the API change and improve readability in pack's _do_copy_nodes.
531
    def iter_lines_added_or_present_in_versions(self, version_ids=None,
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
532
                                                pb=None):
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
533
        """Iterate over the lines in the versioned file from version_ids.
534
2975.3.2 by Robert Collins
Review feedback - document the API change and improve readability in pack's _do_copy_nodes.
535
        This may return lines from other versions. Each item the returned
536
        iterator yields is a tuple of a line and a text version that that line
537
        is present in (not introduced in).
538
539
        Ordering of results is in whatever order is most suitable for the
540
        underlying storage format.
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
541
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
542
        If a progress bar is supplied, it may be used to indicate progress.
543
        The caller is responsible for cleaning up progress bars (because this
544
        is an iterator).
545
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
546
        NOTES: Lines are normalised: they will all have \n terminators.
547
               Lines are returned in arbitrary order.
2975.3.2 by Robert Collins
Review feedback - document the API change and improve readability in pack's _do_copy_nodes.
548
549
        :return: An iterator over (line, version_id).
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
550
        """
551
        raise NotImplementedError(self.iter_lines_added_or_present_in_versions)
552
1551.6.15 by Aaron Bentley
Moved plan_merge into Weave
553
    def plan_merge(self, ver_a, ver_b):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
554
        """Return pseudo-annotation indicating how the two versions merge.
555
556
        This is computed between versions a and b and their common
557
        base.
558
559
        Weave lines present in none of them are skipped entirely.
1664.2.2 by Aaron Bentley
Added legend for plan-merge output
560
561
        Legend:
562
        killed-base Dead in base revision
563
        killed-both Killed in each revision
564
        killed-a    Killed in a
565
        killed-b    Killed in b
566
        unchanged   Alive in both a and b (possibly created in both)
567
        new-a       Created in a
568
        new-b       Created in b
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
569
        ghost-a     Killed in a, unborn in b
1664.2.5 by Aaron Bentley
Update plan-merge legend
570
        ghost-b     Killed in b, unborn in a
1664.2.2 by Aaron Bentley
Added legend for plan-merge output
571
        irrelevant  Not in either revision
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
572
        """
1551.6.15 by Aaron Bentley
Moved plan_merge into Weave
573
        raise NotImplementedError(VersionedFile.plan_merge)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
574
1996.3.7 by John Arbash Meinel
lazy import versionedfile, late-load bzrlib.merge
575
    def weave_merge(self, plan, a_marker=TextMerge.A_MARKER,
1551.6.14 by Aaron Bentley
Tweaks from merge review
576
                    b_marker=TextMerge.B_MARKER):
1551.6.12 by Aaron Bentley
Indicate conflicts from merge_lines, insead of guessing
577
        return PlanWeaveMerge(plan, a_marker, b_marker).merge_lines()[0]
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
578
1664.2.7 by Aaron Bentley
Merge bzr.dev
579
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
580
class RecordingVersionedFilesDecorator(object):
581
    """A minimal versioned files that records calls made on it.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
582
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
583
    Only enough methods have been added to support tests using it to date.
584
585
    :ivar calls: A list of the calls made; can be reset at any time by
586
        assigning [] to it.
587
    """
588
589
    def __init__(self, backing_vf):
3871.4.1 by John Arbash Meinel
Add a VFDecorator that can yield records in a specified order
590
        """Create a RecordingVersionedFilesDecorator decorating backing_vf.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
591
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
592
        :param backing_vf: The versioned file to answer all methods.
593
        """
594
        self._backing_vf = backing_vf
595
        self.calls = []
596
3350.8.2 by Robert Collins
stacked get_parent_map.
597
    def add_lines(self, key, parents, lines, parent_texts=None,
598
        left_matching_blocks=None, nostore_sha=None, random_id=False,
599
        check_content=True):
600
        self.calls.append(("add_lines", key, parents, lines, parent_texts,
601
            left_matching_blocks, nostore_sha, random_id, check_content))
602
        return self._backing_vf.add_lines(key, parents, lines, parent_texts,
603
            left_matching_blocks, nostore_sha, random_id, check_content)
604
3517.4.19 by Martin Pool
Update test for knit.check() to expect it to recurse into fallback vfs
605
    def check(self):
606
        self._backing_vf.check()
607
3350.8.2 by Robert Collins
stacked get_parent_map.
608
    def get_parent_map(self, keys):
609
        self.calls.append(("get_parent_map", copy(keys)))
610
        return self._backing_vf.get_parent_map(keys)
611
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
612
    def get_record_stream(self, keys, sort_order, include_delta_closure):
3350.8.7 by Robert Collins
get_record_stream for fulltexts working (but note extreme memory use!).
613
        self.calls.append(("get_record_stream", list(keys), sort_order,
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
614
            include_delta_closure))
615
        return self._backing_vf.get_record_stream(keys, sort_order,
616
            include_delta_closure)
617
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
618
    def get_sha1s(self, keys):
619
        self.calls.append(("get_sha1s", copy(keys)))
620
        return self._backing_vf.get_sha1s(keys)
621
3350.8.5 by Robert Collins
Iter_lines_added_or_present_in_keys stacks.
622
    def iter_lines_added_or_present_in_keys(self, keys, pb=None):
623
        self.calls.append(("iter_lines_added_or_present_in_keys", copy(keys)))
3350.8.14 by Robert Collins
Review feedback.
624
        return self._backing_vf.iter_lines_added_or_present_in_keys(keys, pb=pb)
3350.8.5 by Robert Collins
Iter_lines_added_or_present_in_keys stacks.
625
3350.8.4 by Robert Collins
Vf.keys() stacking support.
626
    def keys(self):
627
        self.calls.append(("keys",))
628
        return self._backing_vf.keys()
629
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
630
3871.4.1 by John Arbash Meinel
Add a VFDecorator that can yield records in a specified order
631
class OrderingVersionedFilesDecorator(RecordingVersionedFilesDecorator):
632
    """A VF that records calls, and returns keys in specific order.
633
634
    :ivar calls: A list of the calls made; can be reset at any time by
635
        assigning [] to it.
636
    """
637
638
    def __init__(self, backing_vf, key_priority):
639
        """Create a RecordingVersionedFilesDecorator decorating backing_vf.
640
641
        :param backing_vf: The versioned file to answer all methods.
642
        :param key_priority: A dictionary defining what order keys should be
643
            returned from an 'unordered' get_record_stream request.
644
            Keys with lower priority are returned first, keys not present in
645
            the map get an implicit priority of 0, and are returned in
646
            lexicographical order.
647
        """
648
        RecordingVersionedFilesDecorator.__init__(self, backing_vf)
649
        self._key_priority = key_priority
650
651
    def get_record_stream(self, keys, sort_order, include_delta_closure):
652
        self.calls.append(("get_record_stream", list(keys), sort_order,
653
            include_delta_closure))
654
        if sort_order == 'unordered':
655
            def sort_key(key):
656
                return (self._key_priority.get(key, 0), key)
657
            # Use a defined order by asking for the keys one-by-one from the
658
            # backing_vf
659
            for key in sorted(keys, key=sort_key):
660
                for record in self._backing_vf.get_record_stream([key],
661
                                'unordered', include_delta_closure):
662
                    yield record
663
        else:
664
            for record in self._backing_vf.get_record_stream(keys, sort_order,
665
                            include_delta_closure):
666
                yield record
667
668
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
669
class KeyMapper(object):
3350.6.10 by Martin Pool
VersionedFiles review cleanups
670
    """KeyMappers map between keys and underlying partitioned storage."""
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
671
672
    def map(self, key):
673
        """Map key to an underlying storage identifier.
674
675
        :param key: A key tuple e.g. ('file-id', 'revision-id').
676
        :return: An underlying storage identifier, specific to the partitioning
677
            mechanism.
678
        """
679
        raise NotImplementedError(self.map)
680
681
    def unmap(self, partition_id):
682
        """Map a partitioned storage id back to a key prefix.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
683
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
684
        :param partition_id: The underlying partition id.
3350.6.10 by Martin Pool
VersionedFiles review cleanups
685
        :return: As much of a key (or prefix) as is derivable from the partition
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
686
            id.
687
        """
688
        raise NotImplementedError(self.unmap)
689
690
691
class ConstantMapper(KeyMapper):
692
    """A key mapper that maps to a constant result."""
693
694
    def __init__(self, result):
695
        """Create a ConstantMapper which will return result for all maps."""
696
        self._result = result
697
698
    def map(self, key):
699
        """See KeyMapper.map()."""
700
        return self._result
701
702
703
class URLEscapeMapper(KeyMapper):
704
    """Base class for use with transport backed storage.
705
706
    This provides a map and unmap wrapper that respectively url escape and
707
    unescape their outputs and inputs.
708
    """
709
710
    def map(self, key):
711
        """See KeyMapper.map()."""
712
        return urllib.quote(self._map(key))
713
714
    def unmap(self, partition_id):
715
        """See KeyMapper.unmap()."""
716
        return self._unmap(urllib.unquote(partition_id))
717
718
719
class PrefixMapper(URLEscapeMapper):
720
    """A key mapper that extracts the first component of a key.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
721
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
722
    This mapper is for use with a transport based backend.
723
    """
724
725
    def _map(self, key):
726
        """See KeyMapper.map()."""
727
        return key[0]
728
729
    def _unmap(self, partition_id):
730
        """See KeyMapper.unmap()."""
731
        return (partition_id,)
732
733
734
class HashPrefixMapper(URLEscapeMapper):
735
    """A key mapper that combines the first component of a key with a hash.
736
737
    This mapper is for use with a transport based backend.
738
    """
739
740
    def _map(self, key):
741
        """See KeyMapper.map()."""
742
        prefix = self._escape(key[0])
743
        return "%02x/%s" % (adler32(prefix) & 0xff, prefix)
744
745
    def _escape(self, prefix):
746
        """No escaping needed here."""
747
        return prefix
748
749
    def _unmap(self, partition_id):
750
        """See KeyMapper.unmap()."""
751
        return (self._unescape(osutils.basename(partition_id)),)
752
753
    def _unescape(self, basename):
754
        """No unescaping needed for HashPrefixMapper."""
755
        return basename
756
757
758
class HashEscapedPrefixMapper(HashPrefixMapper):
759
    """Combines the escaped first component of a key with a hash.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
760
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
761
    This mapper is for use with a transport based backend.
762
    """
763
764
    _safe = "abcdefghijklmnopqrstuvwxyz0123456789-_@,."
765
766
    def _escape(self, prefix):
767
        """Turn a key element into a filesystem safe string.
768
769
        This is similar to a plain urllib.quote, except
770
        it uses specific safe characters, so that it doesn't
771
        have to translate a lot of valid file ids.
772
        """
773
        # @ does not get escaped. This is because it is a valid
774
        # filesystem character we use all the time, and it looks
775
        # a lot better than seeing %40 all the time.
776
        r = [((c in self._safe) and c or ('%%%02x' % ord(c)))
777
             for c in prefix]
778
        return ''.join(r)
779
780
    def _unescape(self, basename):
781
        """Escaped names are easily unescaped by urlutils."""
782
        return urllib.unquote(basename)
783
784
785
def make_versioned_files_factory(versioned_file_factory, mapper):
786
    """Create a ThunkedVersionedFiles factory.
787
788
    This will create a callable which when called creates a
789
    ThunkedVersionedFiles on a transport, using mapper to access individual
790
    versioned files, and versioned_file_factory to create each individual file.
791
    """
792
    def factory(transport):
793
        return ThunkedVersionedFiles(transport, versioned_file_factory, mapper,
794
            lambda:True)
795
    return factory
796
797
798
class VersionedFiles(object):
799
    """Storage for many versioned files.
800
801
    This object allows a single keyspace for accessing the history graph and
802
    contents of named bytestrings.
803
804
    Currently no implementation allows the graph of different key prefixes to
805
    intersect, but the API does allow such implementations in the future.
3350.6.7 by Robert Collins
Review feedback, making things more clear, adding documentation on what is used where.
806
807
    The keyspace is expressed via simple tuples. Any instance of VersionedFiles
808
    may have a different length key-size, but that size will be constant for
809
    all texts added to or retrieved from it. For instance, bzrlib uses
810
    instances with a key-size of 2 for storing user files in a repository, with
811
    the first element the fileid, and the second the version of that file.
812
813
    The use of tuples allows a single code base to support several different
814
    uses with only the mapping logic changing from instance to instance.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
815
    """
816
817
    def add_lines(self, key, parents, lines, parent_texts=None,
818
        left_matching_blocks=None, nostore_sha=None, random_id=False,
819
        check_content=True):
820
        """Add a text to the store.
821
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
822
        :param key: The key tuple of the text to add. If the last element is
823
            None, a CHK string will be generated during the addition.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
824
        :param parents: The parents key tuples of the text to add.
825
        :param lines: A list of lines. Each line must be a bytestring. And all
826
            of them except the last must be terminated with \n and contain no
827
            other \n's. The last line may either contain no \n's or a single
828
            terminating \n. If the lines list does meet this constraint the add
829
            routine may error or may succeed - but you will be unable to read
830
            the data back accurately. (Checking the lines have been split
831
            correctly is expensive and extremely unlikely to catch bugs so it
832
            is not done at runtime unless check_content is True.)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
833
        :param parent_texts: An optional dictionary containing the opaque
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
834
            representations of some or all of the parents of version_id to
835
            allow delta optimisations.  VERY IMPORTANT: the texts must be those
836
            returned by add_lines or data corruption can be caused.
837
        :param left_matching_blocks: a hint about which areas are common
838
            between the text and its left-hand-parent.  The format is
839
            the SequenceMatcher.get_matching_blocks format.
840
        :param nostore_sha: Raise ExistingContent and do not add the lines to
841
            the versioned file if the digest of the lines matches this.
842
        :param random_id: If True a random id has been selected rather than
843
            an id determined by some deterministic process such as a converter
844
            from a foreign VCS. When True the backend may choose not to check
845
            for uniqueness of the resulting key within the versioned file, so
846
            this should only be done when the result is expected to be unique
847
            anyway.
848
        :param check_content: If True, the lines supplied are verified to be
849
            bytestrings that are correctly formed lines.
850
        :return: The text sha1, the number of bytes in the text, and an opaque
851
                 representation of the inserted version which can be provided
852
                 back to future add_lines calls in the parent_texts dictionary.
853
        """
854
        raise NotImplementedError(self.add_lines)
855
4398.8.6 by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'.
856
    def _add_text(self, key, parents, text, nostore_sha=None, random_id=False):
857
        """Add a text to the store.
858
859
        This is a private function for use by CommitBuilder.
860
861
        :param key: The key tuple of the text to add. If the last element is
862
            None, a CHK string will be generated during the addition.
863
        :param parents: The parents key tuples of the text to add.
864
        :param text: A string containing the text to be committed.
865
        :param nostore_sha: Raise ExistingContent and do not add the lines to
866
            the versioned file if the digest of the lines matches this.
867
        :param random_id: If True a random id has been selected rather than
868
            an id determined by some deterministic process such as a converter
869
            from a foreign VCS. When True the backend may choose not to check
870
            for uniqueness of the resulting key within the versioned file, so
871
            this should only be done when the result is expected to be unique
872
            anyway.
873
        :param check_content: If True, the lines supplied are verified to be
874
            bytestrings that are correctly formed lines.
875
        :return: The text sha1, the number of bytes in the text, and an opaque
876
                 representation of the inserted version which can be provided
877
                 back to future _add_text calls in the parent_texts dictionary.
878
        """
879
        # The default implementation just thunks over to .add_lines(),
880
        # inefficient, but it works.
4398.8.1 by John Arbash Meinel
Add a VersionedFile.add_text() api.
881
        return self.add_lines(key, parents, osutils.split_lines(text),
882
                              nostore_sha=nostore_sha,
883
                              random_id=random_id,
4398.8.6 by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'.
884
                              check_content=True)
4398.8.1 by John Arbash Meinel
Add a VersionedFile.add_text() api.
885
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
886
    def add_mpdiffs(self, records):
887
        """Add mpdiffs to this VersionedFile.
888
889
        Records should be iterables of version, parents, expected_sha1,
890
        mpdiff. mpdiff should be a MultiParent instance.
891
        """
892
        vf_parents = {}
893
        mpvf = multiparent.MultiMemoryVersionedFile()
894
        versions = []
895
        for version, parent_ids, expected_sha1, mpdiff in records:
896
            versions.append(version)
897
            mpvf.add_diff(mpdiff, version, parent_ids)
898
        needed_parents = set()
899
        for version, parent_ids, expected_sha1, mpdiff in records:
900
            needed_parents.update(p for p in parent_ids
901
                                  if not mpvf.has_version(p))
902
        # It seems likely that adding all the present parents as fulltexts can
903
        # easily exhaust memory.
3890.2.9 by John Arbash Meinel
Start using osutils.chunks_as_lines rather than osutils.split_lines.
904
        chunks_to_lines = osutils.chunks_to_lines
3350.8.11 by Robert Collins
Stacked add_mpdiffs.
905
        for record in self.get_record_stream(needed_parents, 'unordered',
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
906
            True):
3350.8.11 by Robert Collins
Stacked add_mpdiffs.
907
            if record.storage_kind == 'absent':
908
                continue
3890.2.9 by John Arbash Meinel
Start using osutils.chunks_as_lines rather than osutils.split_lines.
909
            mpvf.add_version(chunks_to_lines(record.get_bytes_as('chunked')),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
910
                record.key, [])
911
        for (key, parent_keys, expected_sha1, mpdiff), lines in\
912
            zip(records, mpvf.get_line_list(versions)):
913
            if len(parent_keys) == 1:
914
                left_matching_blocks = list(mpdiff.get_matching_blocks(0,
915
                    mpvf.get_diff(parent_keys[0]).num_lines()))
916
            else:
917
                left_matching_blocks = None
918
            version_sha1, _, version_text = self.add_lines(key,
919
                parent_keys, lines, vf_parents,
920
                left_matching_blocks=left_matching_blocks)
921
            if version_sha1 != expected_sha1:
922
                raise errors.VersionedFileInvalidChecksum(version)
923
            vf_parents[key] = version_text
924
925
    def annotate(self, key):
926
        """Return a list of (version-key, line) tuples for the text of key.
927
928
        :raise RevisionNotPresent: If the key is not present.
929
        """
930
        raise NotImplementedError(self.annotate)
931
932
    def check(self, progress_bar=None):
933
        """Check this object for integrity."""
934
        raise NotImplementedError(self.check)
935
936
    @staticmethod
937
    def check_not_reserved_id(version_id):
938
        revision.check_not_reserved_id(version_id)
939
940
    def _check_lines_not_unicode(self, lines):
941
        """Check that lines being added to a versioned file are not unicode."""
942
        for line in lines:
943
            if line.__class__ is not str:
944
                raise errors.BzrBadParameterUnicode("lines")
945
946
    def _check_lines_are_lines(self, lines):
947
        """Check that the lines really are full lines without inline EOL."""
948
        for line in lines:
949
            if '\n' in line[:-1]:
950
                raise errors.BzrBadParameterContainsNewline("lines")
951
952
    def get_parent_map(self, keys):
953
        """Get a map of the parents of keys.
954
955
        :param keys: The keys to look up parents for.
956
        :return: A mapping from keys to parents. Absent keys are absent from
957
            the mapping.
958
        """
959
        raise NotImplementedError(self.get_parent_map)
960
961
    def get_record_stream(self, keys, ordering, include_delta_closure):
962
        """Get a stream of records for keys.
963
964
        :param keys: The keys to include.
965
        :param ordering: Either 'unordered' or 'topological'. A topologically
966
            sorted stream has compression parents strictly before their
967
            children.
968
        :param include_delta_closure: If True then the closure across any
969
            compression parents will be included (in the opaque data).
970
        :return: An iterator of ContentFactory objects, each of which is only
971
            valid until the iterator is advanced.
972
        """
973
        raise NotImplementedError(self.get_record_stream)
974
975
    def get_sha1s(self, keys):
976
        """Get the sha1's of the texts for the given keys.
977
978
        :param keys: The names of the keys to lookup
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
979
        :return: a dict from key to sha1 digest. Keys of texts which are not
3350.8.14 by Robert Collins
Review feedback.
980
            present in the store are not present in the returned
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
981
            dictionary.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
982
        """
983
        raise NotImplementedError(self.get_sha1s)
984
3830.3.12 by Martin Pool
Review cleanups: unify has_key impls, add missing_keys(), clean up exception blocks
985
    has_key = index._has_key_from_parent_map
986
4009.3.3 by Andrew Bennetts
Add docstrings.
987
    def get_missing_compression_parent_keys(self):
988
        """Return an iterable of keys of missing compression parents.
989
990
        Check this after calling insert_record_stream to find out if there are
991
        any missing compression parents.  If there are, the records that
4009.3.12 by Robert Collins
Polish on inserting record streams with missing compression parents.
992
        depend on them are not able to be inserted safely. The precise
993
        behaviour depends on the concrete VersionedFiles class in use.
994
995
        Classes that do not support this will raise NotImplementedError.
4009.3.3 by Andrew Bennetts
Add docstrings.
996
        """
997
        raise NotImplementedError(self.get_missing_compression_parent_keys)
998
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
999
    def insert_record_stream(self, stream):
1000
        """Insert a record stream into this container.
1001
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1002
        :param stream: A stream of records to insert.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1003
        :return: None
1004
        :seealso VersionedFile.get_record_stream:
1005
        """
1006
        raise NotImplementedError
1007
1008
    def iter_lines_added_or_present_in_keys(self, keys, pb=None):
1009
        """Iterate over the lines in the versioned files from keys.
1010
1011
        This may return lines from other keys. Each item the returned
1012
        iterator yields is a tuple of a line and a text version that that line
1013
        is present in (not introduced in).
1014
1015
        Ordering of results is in whatever order is most suitable for the
1016
        underlying storage format.
1017
1018
        If a progress bar is supplied, it may be used to indicate progress.
1019
        The caller is responsible for cleaning up progress bars (because this
1020
        is an iterator).
1021
1022
        NOTES:
1023
         * Lines are normalised by the underlying store: they will all have \n
1024
           terminators.
1025
         * Lines are returned in arbitrary order.
1026
1027
        :return: An iterator over (line, key).
1028
        """
1029
        raise NotImplementedError(self.iter_lines_added_or_present_in_keys)
1030
1031
    def keys(self):
1032
        """Return a iterable of the keys for all the contained texts."""
1033
        raise NotImplementedError(self.keys)
1034
1035
    def make_mpdiffs(self, keys):
1036
        """Create multiparent diffs for specified keys."""
1037
        keys_order = tuple(keys)
1038
        keys = frozenset(keys)
1039
        knit_keys = set(keys)
1040
        parent_map = self.get_parent_map(keys)
1041
        for parent_keys in parent_map.itervalues():
1042
            if parent_keys:
1043
                knit_keys.update(parent_keys)
1044
        missing_keys = keys - set(parent_map)
1045
        if missing_keys:
3530.3.2 by Robert Collins
Handling frozen set inputs in mpdiff generation when a key is missing
1046
            raise errors.RevisionNotPresent(list(missing_keys)[0], self)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1047
        # We need to filter out ghosts, because we can't diff against them.
1048
        maybe_ghosts = knit_keys - keys
1049
        ghosts = maybe_ghosts - set(self.get_parent_map(maybe_ghosts))
1050
        knit_keys.difference_update(ghosts)
1051
        lines = {}
3890.2.9 by John Arbash Meinel
Start using osutils.chunks_as_lines rather than osutils.split_lines.
1052
        chunks_to_lines = osutils.chunks_to_lines
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1053
        for record in self.get_record_stream(knit_keys, 'topological', True):
3890.2.9 by John Arbash Meinel
Start using osutils.chunks_as_lines rather than osutils.split_lines.
1054
            lines[record.key] = chunks_to_lines(record.get_bytes_as('chunked'))
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1055
            # line_block_dict = {}
1056
            # for parent, blocks in record.extract_line_blocks():
1057
            #   line_blocks[parent] = blocks
1058
            # line_blocks[record.key] = line_block_dict
1059
        diffs = []
1060
        for key in keys_order:
1061
            target = lines[key]
1062
            parents = parent_map[key] or []
1063
            # Note that filtering knit_keys can lead to a parent difference
1064
            # between the creation and the application of the mpdiff.
1065
            parent_lines = [lines[p] for p in parents if p in knit_keys]
1066
            if len(parent_lines) > 0:
1067
                left_parent_blocks = self._extract_blocks(key, parent_lines[0],
1068
                    target)
1069
            else:
1070
                left_parent_blocks = None
1071
            diffs.append(multiparent.MultiParent.from_lines(target,
1072
                parent_lines, left_parent_blocks))
1073
        return diffs
1074
3830.3.12 by Martin Pool
Review cleanups: unify has_key impls, add missing_keys(), clean up exception blocks
1075
    missing_keys = index._missing_keys_from_parent_map
1076
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1077
    def _extract_blocks(self, version_id, source, target):
1078
        return None
1079
1080
1081
class ThunkedVersionedFiles(VersionedFiles):
1082
    """Storage for many versioned files thunked onto a 'VersionedFile' class.
1083
1084
    This object allows a single keyspace for accessing the history graph and
1085
    contents of named bytestrings.
1086
1087
    Currently no implementation allows the graph of different key prefixes to
1088
    intersect, but the API does allow such implementations in the future.
1089
    """
1090
1091
    def __init__(self, transport, file_factory, mapper, is_locked):
1092
        """Create a ThunkedVersionedFiles."""
1093
        self._transport = transport
1094
        self._file_factory = file_factory
1095
        self._mapper = mapper
1096
        self._is_locked = is_locked
1097
1098
    def add_lines(self, key, parents, lines, parent_texts=None,
1099
        left_matching_blocks=None, nostore_sha=None, random_id=False,
1100
        check_content=True):
1101
        """See VersionedFiles.add_lines()."""
1102
        path = self._mapper.map(key)
1103
        version_id = key[-1]
1104
        parents = [parent[-1] for parent in parents]
1105
        vf = self._get_vf(path)
1106
        try:
1107
            try:
1108
                return vf.add_lines_with_ghosts(version_id, parents, lines,
1109
                    parent_texts=parent_texts,
1110
                    left_matching_blocks=left_matching_blocks,
1111
                    nostore_sha=nostore_sha, random_id=random_id,
1112
                    check_content=check_content)
1113
            except NotImplementedError:
1114
                return vf.add_lines(version_id, parents, lines,
1115
                    parent_texts=parent_texts,
1116
                    left_matching_blocks=left_matching_blocks,
1117
                    nostore_sha=nostore_sha, random_id=random_id,
1118
                    check_content=check_content)
1119
        except errors.NoSuchFile:
1120
            # parent directory may be missing, try again.
1121
            self._transport.mkdir(osutils.dirname(path))
1122
            try:
1123
                return vf.add_lines_with_ghosts(version_id, parents, lines,
1124
                    parent_texts=parent_texts,
1125
                    left_matching_blocks=left_matching_blocks,
1126
                    nostore_sha=nostore_sha, random_id=random_id,
1127
                    check_content=check_content)
1128
            except NotImplementedError:
1129
                return vf.add_lines(version_id, parents, lines,
1130
                    parent_texts=parent_texts,
1131
                    left_matching_blocks=left_matching_blocks,
1132
                    nostore_sha=nostore_sha, random_id=random_id,
1133
                    check_content=check_content)
1134
1135
    def annotate(self, key):
1136
        """Return a list of (version-key, line) tuples for the text of key.
1137
1138
        :raise RevisionNotPresent: If the key is not present.
1139
        """
1140
        prefix = key[:-1]
1141
        path = self._mapper.map(prefix)
1142
        vf = self._get_vf(path)
1143
        origins = vf.annotate(key[-1])
1144
        result = []
1145
        for origin, line in origins:
1146
            result.append((prefix + (origin,), line))
1147
        return result
1148
1149
    def check(self, progress_bar=None):
1150
        """See VersionedFiles.check()."""
1151
        for prefix, vf in self._iter_all_components():
1152
            vf.check()
1153
1154
    def get_parent_map(self, keys):
1155
        """Get a map of the parents of keys.
1156
1157
        :param keys: The keys to look up parents for.
1158
        :return: A mapping from keys to parents. Absent keys are absent from
1159
            the mapping.
1160
        """
1161
        prefixes = self._partition_keys(keys)
1162
        result = {}
1163
        for prefix, suffixes in prefixes.items():
1164
            path = self._mapper.map(prefix)
1165
            vf = self._get_vf(path)
1166
            parent_map = vf.get_parent_map(suffixes)
1167
            for key, parents in parent_map.items():
1168
                result[prefix + (key,)] = tuple(
1169
                    prefix + (parent,) for parent in parents)
1170
        return result
1171
1172
    def _get_vf(self, path):
1173
        if not self._is_locked():
1174
            raise errors.ObjectNotLocked(self)
1175
        return self._file_factory(path, self._transport, create=True,
1176
            get_scope=lambda:None)
1177
1178
    def _partition_keys(self, keys):
1179
        """Turn keys into a dict of prefix:suffix_list."""
1180
        result = {}
1181
        for key in keys:
1182
            prefix_keys = result.setdefault(key[:-1], [])
1183
            prefix_keys.append(key[-1])
1184
        return result
1185
1186
    def _get_all_prefixes(self):
1187
        # Identify all key prefixes.
1188
        # XXX: A bit hacky, needs polish.
1189
        if type(self._mapper) == ConstantMapper:
1190
            paths = [self._mapper.map(())]
1191
            prefixes = [()]
1192
        else:
1193
            relpaths = set()
1194
            for quoted_relpath in self._transport.iter_files_recursive():
1195
                path, ext = os.path.splitext(quoted_relpath)
1196
                relpaths.add(path)
1197
            paths = list(relpaths)
1198
            prefixes = [self._mapper.unmap(path) for path in paths]
1199
        return zip(paths, prefixes)
1200
1201
    def get_record_stream(self, keys, ordering, include_delta_closure):
1202
        """See VersionedFiles.get_record_stream()."""
1203
        # Ordering will be taken care of by each partitioned store; group keys
1204
        # by partition.
1205
        keys = sorted(keys)
1206
        for prefix, suffixes, vf in self._iter_keys_vf(keys):
1207
            suffixes = [(suffix,) for suffix in suffixes]
1208
            for record in vf.get_record_stream(suffixes, ordering,
1209
                include_delta_closure):
1210
                if record.parents is not None:
1211
                    record.parents = tuple(
1212
                        prefix + parent for parent in record.parents)
1213
                record.key = prefix + record.key
1214
                yield record
1215
1216
    def _iter_keys_vf(self, keys):
1217
        prefixes = self._partition_keys(keys)
1218
        sha1s = {}
1219
        for prefix, suffixes in prefixes.items():
1220
            path = self._mapper.map(prefix)
1221
            vf = self._get_vf(path)
1222
            yield prefix, suffixes, vf
1223
1224
    def get_sha1s(self, keys):
1225
        """See VersionedFiles.get_sha1s()."""
1226
        sha1s = {}
1227
        for prefix,suffixes, vf in self._iter_keys_vf(keys):
1228
            vf_sha1s = vf.get_sha1s(suffixes)
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1229
            for suffix, sha1 in vf_sha1s.iteritems():
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1230
                sha1s[prefix + (suffix,)] = sha1
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1231
        return sha1s
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1232
1233
    def insert_record_stream(self, stream):
1234
        """Insert a record stream into this container.
1235
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1236
        :param stream: A stream of records to insert.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1237
        :return: None
1238
        :seealso VersionedFile.get_record_stream:
1239
        """
1240
        for record in stream:
1241
            prefix = record.key[:-1]
1242
            key = record.key[-1:]
1243
            if record.parents is not None:
1244
                parents = [parent[-1:] for parent in record.parents]
1245
            else:
1246
                parents = None
1247
            thunk_record = AdapterFactory(key, parents, record)
1248
            path = self._mapper.map(prefix)
1249
            # Note that this parses the file many times; we can do better but
1250
            # as this only impacts weaves in terms of performance, it is
1251
            # tolerable.
1252
            vf = self._get_vf(path)
1253
            vf.insert_record_stream([thunk_record])
1254
1255
    def iter_lines_added_or_present_in_keys(self, keys, pb=None):
1256
        """Iterate over the lines in the versioned files from keys.
1257
1258
        This may return lines from other keys. Each item the returned
1259
        iterator yields is a tuple of a line and a text version that that line
1260
        is present in (not introduced in).
1261
1262
        Ordering of results is in whatever order is most suitable for the
1263
        underlying storage format.
1264
1265
        If a progress bar is supplied, it may be used to indicate progress.
1266
        The caller is responsible for cleaning up progress bars (because this
1267
        is an iterator).
1268
1269
        NOTES:
1270
         * Lines are normalised by the underlying store: they will all have \n
1271
           terminators.
1272
         * Lines are returned in arbitrary order.
1273
1274
        :return: An iterator over (line, key).
1275
        """
1276
        for prefix, suffixes, vf in self._iter_keys_vf(keys):
1277
            for line, version in vf.iter_lines_added_or_present_in_versions(suffixes):
1278
                yield line, prefix + (version,)
1279
1280
    def _iter_all_components(self):
1281
        for path, prefix in self._get_all_prefixes():
1282
            yield prefix, self._get_vf(path)
1283
1284
    def keys(self):
1285
        """See VersionedFiles.keys()."""
1286
        result = set()
1287
        for prefix, vf in self._iter_all_components():
1288
            for suffix in vf.versions():
1289
                result.add(prefix + (suffix,))
1290
        return result
1291
1292
1293
class _PlanMergeVersionedFile(VersionedFiles):
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1294
    """A VersionedFile for uncommitted and committed texts.
1295
1296
    It is intended to allow merges to be planned with working tree texts.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1297
    It implements only the small part of the VersionedFiles interface used by
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1298
    PlanMerge.  It falls back to multiple versionedfiles for data not stored in
1299
    _PlanMergeVersionedFile itself.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1300
1301
    :ivar: fallback_versionedfiles a list of VersionedFiles objects that can be
1302
        queried for missing texts.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1303
    """
1304
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1305
    def __init__(self, file_id):
1306
        """Create a _PlanMergeVersionedFile.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1307
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1308
        :param file_id: Used with _PlanMerge code which is not yet fully
1309
            tuple-keyspace aware.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1310
        """
1311
        self._file_id = file_id
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1312
        # fallback locations
1313
        self.fallback_versionedfiles = []
1314
        # Parents for locally held keys.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1315
        self._parents = {}
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1316
        # line data for locally held keys.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1317
        self._lines = {}
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1318
        # key lookup providers
1319
        self._providers = [DictParentsProvider(self._parents)]
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1320
3062.2.3 by Aaron Bentley
Sync up with bzr.dev API changes
1321
    def plan_merge(self, ver_a, ver_b, base=None):
3062.1.13 by Aaron Bentley
Make _PlanMerge an implementation detail of _PlanMergeVersionedFile
1322
        """See VersionedFile.plan_merge"""
3144.3.7 by Aaron Bentley
Update from review
1323
        from bzrlib.merge import _PlanMerge
3062.2.3 by Aaron Bentley
Sync up with bzr.dev API changes
1324
        if base is None:
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1325
            return _PlanMerge(ver_a, ver_b, self, (self._file_id,)).plan_merge()
1326
        old_plan = list(_PlanMerge(ver_a, base, self, (self._file_id,)).plan_merge())
1327
        new_plan = list(_PlanMerge(ver_a, ver_b, self, (self._file_id,)).plan_merge())
3062.2.3 by Aaron Bentley
Sync up with bzr.dev API changes
1328
        return _PlanMerge._subtract_plans(old_plan, new_plan)
1329
3144.3.1 by Aaron Bentley
Implement LCA merge, with problematic conflict markers
1330
    def plan_lca_merge(self, ver_a, ver_b, base=None):
3144.3.7 by Aaron Bentley
Update from review
1331
        from bzrlib.merge import _PlanLCAMerge
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1332
        graph = Graph(self)
1333
        new_plan = _PlanLCAMerge(ver_a, ver_b, self, (self._file_id,), graph).plan_merge()
3144.3.1 by Aaron Bentley
Implement LCA merge, with problematic conflict markers
1334
        if base is None:
1335
            return new_plan
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1336
        old_plan = _PlanLCAMerge(ver_a, base, self, (self._file_id,), graph).plan_merge()
3144.3.1 by Aaron Bentley
Implement LCA merge, with problematic conflict markers
1337
        return _PlanLCAMerge._subtract_plans(list(old_plan), list(new_plan))
3062.1.13 by Aaron Bentley
Make _PlanMerge an implementation detail of _PlanMergeVersionedFile
1338
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1339
    def add_lines(self, key, parents, lines):
1340
        """See VersionedFiles.add_lines
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1341
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1342
        Lines are added locally, not to fallback versionedfiles.  Also, ghosts
1343
        are permitted.  Only reserved ids are permitted.
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1344
        """
3350.6.8 by Martin Pool
Change stray pdb calls to exceptions
1345
        if type(key) is not tuple:
1346
            raise TypeError(key)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1347
        if not revision.is_reserved_id(key[-1]):
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1348
            raise ValueError('Only reserved ids may be used')
1349
        if parents is None:
1350
            raise ValueError('Parents may not be None')
1351
        if lines is None:
1352
            raise ValueError('Lines may not be None')
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1353
        self._parents[key] = tuple(parents)
1354
        self._lines[key] = lines
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1355
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1356
    def get_record_stream(self, keys, ordering, include_delta_closure):
1357
        pending = set(keys)
1358
        for key in keys:
1359
            if key in self._lines:
1360
                lines = self._lines[key]
1361
                parents = self._parents[key]
1362
                pending.remove(key)
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1363
                yield ChunkedContentFactory(key, parents, None, lines)
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1364
        for versionedfile in self.fallback_versionedfiles:
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1365
            for record in versionedfile.get_record_stream(
1366
                pending, 'unordered', True):
1367
                if record.storage_kind == 'absent':
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1368
                    continue
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1369
                else:
1370
                    pending.remove(record.key)
1371
                    yield record
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
1372
            if not pending:
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1373
                return
1374
        # report absent entries
1375
        for key in pending:
1376
            yield AbsentContentFactory(key)
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1377
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1378
    def get_parent_map(self, keys):
1379
        """See VersionedFiles.get_parent_map"""
1380
        # We create a new provider because a fallback may have been added.
1381
        # If we make fallbacks private we can update a stack list and avoid
1382
        # object creation thrashing.
3350.6.6 by Robert Collins
Fix test_plan_file_merge
1383
        keys = set(keys)
1384
        result = {}
1385
        if revision.NULL_REVISION in keys:
1386
            keys.remove(revision.NULL_REVISION)
1387
            result[revision.NULL_REVISION] = ()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1388
        self._providers = self._providers[:1] + self.fallback_versionedfiles
3350.6.6 by Robert Collins
Fix test_plan_file_merge
1389
        result.update(
4379.3.3 by Gary van der Merwe
Rename and add doc string for StackedParentsProvider.
1390
            StackedParentsProvider(self._providers).get_parent_map(keys))
3350.6.5 by Robert Collins
Update to bzr.dev.
1391
        for key, parents in result.iteritems():
1392
            if parents == ():
1393
                result[key] = (revision.NULL_REVISION,)
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
1394
        return result
3144.3.1 by Aaron Bentley
Implement LCA merge, with problematic conflict markers
1395
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
1396
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
1397
class PlanWeaveMerge(TextMerge):
1551.6.13 by Aaron Bentley
Cleanup
1398
    """Weave merge that takes a plan as its input.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1399
1551.6.14 by Aaron Bentley
Tweaks from merge review
1400
    This exists so that VersionedFile.plan_merge is implementable.
1401
    Most callers will want to use WeaveMerge instead.
1551.6.13 by Aaron Bentley
Cleanup
1402
    """
1403
1551.6.14 by Aaron Bentley
Tweaks from merge review
1404
    def __init__(self, plan, a_marker=TextMerge.A_MARKER,
1405
                 b_marker=TextMerge.B_MARKER):
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
1406
        TextMerge.__init__(self, a_marker, b_marker)
1407
        self.plan = plan
1408
1551.6.7 by Aaron Bentley
Implemented two-way merge, refactored weave merge
1409
    def _merge_struct(self):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1410
        lines_a = []
1411
        lines_b = []
1412
        ch_a = ch_b = False
1664.2.8 by Aaron Bentley
Fix WeaveMerge when plan doesn't end with unchanged lines
1413
1414
        def outstanding_struct():
1415
            if not lines_a and not lines_b:
1416
                return
1417
            elif ch_a and not ch_b:
1418
                # one-sided change:
1419
                yield(lines_a,)
1420
            elif ch_b and not ch_a:
1421
                yield (lines_b,)
1422
            elif lines_a == lines_b:
1423
                yield(lines_a,)
1424
            else:
1425
                yield (lines_a, lines_b)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1426
1616.1.18 by Martin Pool
(weave-merge) don't treat killed-both lines as points of agreement;
1427
        # We previously considered either 'unchanged' or 'killed-both' lines
1428
        # to be possible places to resynchronize.  However, assuming agreement
1759.2.1 by Jelmer Vernooij
Fix some types (found using aspell).
1429
        # on killed-both lines may be too aggressive. -- mbp 20060324
1551.6.7 by Aaron Bentley
Implemented two-way merge, refactored weave merge
1430
        for state, line in self.plan:
1616.1.18 by Martin Pool
(weave-merge) don't treat killed-both lines as points of agreement;
1431
            if state == 'unchanged':
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1432
                # resync and flush queued conflicts changes if any
1664.2.8 by Aaron Bentley
Fix WeaveMerge when plan doesn't end with unchanged lines
1433
                for struct in outstanding_struct():
1434
                    yield struct
1551.6.11 by Aaron Bentley
Switched TextMerge_lines to work on a list
1435
                lines_a = []
1436
                lines_b = []
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1437
                ch_a = ch_b = False
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1438
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1439
            if state == 'unchanged':
1440
                if line:
1551.6.5 by Aaron Bentley
Got weave merge producing structural output
1441
                    yield ([line],)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1442
            elif state == 'killed-a':
1443
                ch_a = True
1444
                lines_b.append(line)
1445
            elif state == 'killed-b':
1446
                ch_b = True
1447
                lines_a.append(line)
1448
            elif state == 'new-a':
1449
                ch_a = True
1450
                lines_a.append(line)
1451
            elif state == 'new-b':
1452
                ch_b = True
1453
                lines_b.append(line)
3144.3.2 by Aaron Bentley
Get conflict handling working
1454
            elif state == 'conflicted-a':
1455
                ch_b = ch_a = True
1456
                lines_a.append(line)
1457
            elif state == 'conflicted-b':
1458
                ch_b = ch_a = True
1459
                lines_b.append(line)
4312.1.1 by John Arbash Meinel
Add a per-implementation test that deleting lines conflicts with modifying lines.
1460
            elif state == 'killed-both':
1461
                # This counts as a change, even though there is no associated
1462
                # line
1463
                ch_b = ch_a = True
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
1464
            else:
3376.2.4 by Martin Pool
Remove every assert statement from bzrlib!
1465
                if state not in ('irrelevant', 'ghost-a', 'ghost-b',
4312.1.1 by John Arbash Meinel
Add a per-implementation test that deleting lines conflicts with modifying lines.
1466
                        'killed-base'):
3376.2.4 by Martin Pool
Remove every assert statement from bzrlib!
1467
                    raise AssertionError(state)
1664.2.8 by Aaron Bentley
Fix WeaveMerge when plan doesn't end with unchanged lines
1468
        for struct in outstanding_struct():
1469
            yield struct
1563.2.12 by Robert Collins
Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.
1470
1664.2.14 by Aaron Bentley
spacing fix
1471
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
1472
class WeaveMerge(PlanWeaveMerge):
2831.7.1 by Ian Clatworthy
versionedfile.py code cleanups
1473
    """Weave merge that takes a VersionedFile and two versions as its input."""
1551.6.13 by Aaron Bentley
Cleanup
1474
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1475
    def __init__(self, versionedfile, ver_a, ver_b,
1551.6.14 by Aaron Bentley
Tweaks from merge review
1476
        a_marker=PlanWeaveMerge.A_MARKER, b_marker=PlanWeaveMerge.B_MARKER):
1551.6.15 by Aaron Bentley
Moved plan_merge into Weave
1477
        plan = versionedfile.plan_merge(ver_a, ver_b)
1551.6.10 by Aaron Bentley
Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge
1478
        PlanWeaveMerge.__init__(self, plan, a_marker, b_marker)
1479
1480
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1481
class VirtualVersionedFiles(VersionedFiles):
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1482
    """Dummy implementation for VersionedFiles that uses other functions for
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1483
    obtaining fulltexts and parent maps.
1484
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1485
    This is always on the bottom of the stack and uses string keys
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1486
    (rather than tuples) internally.
1487
    """
1488
1489
    def __init__(self, get_parent_map, get_lines):
1490
        """Create a VirtualVersionedFiles.
1491
1492
        :param get_parent_map: Same signature as Repository.get_parent_map.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1493
        :param get_lines: Should return lines for specified key or None if
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1494
                          not available.
1495
        """
1496
        super(VirtualVersionedFiles, self).__init__()
1497
        self._get_parent_map = get_parent_map
1498
        self._get_lines = get_lines
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1499
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1500
    def check(self, progressbar=None):
1501
        """See VersionedFiles.check.
1502
1503
        :note: Always returns True for VirtualVersionedFiles.
1504
        """
1505
        return True
1506
1507
    def add_mpdiffs(self, records):
1508
        """See VersionedFiles.mpdiffs.
1509
1510
        :note: Not implemented for VirtualVersionedFiles.
1511
        """
1512
        raise NotImplementedError(self.add_mpdiffs)
1513
1514
    def get_parent_map(self, keys):
1515
        """See VersionedFiles.get_parent_map."""
3518.1.2 by Jelmer Vernooij
Fix some stylistic issues pointed out by Ian.
1516
        return dict([((k,), tuple([(p,) for p in v]))
1517
            for k,v in self._get_parent_map([k for (k,) in keys]).iteritems()])
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1518
1519
    def get_sha1s(self, keys):
1520
        """See VersionedFiles.get_sha1s."""
1521
        ret = {}
1522
        for (k,) in keys:
1523
            lines = self._get_lines(k)
1524
            if lines is not None:
3518.1.2 by Jelmer Vernooij
Fix some stylistic issues pointed out by Ian.
1525
                if not isinstance(lines, list):
1526
                    raise AssertionError
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1527
                ret[(k,)] = osutils.sha_strings(lines)
1528
        return ret
1529
1530
    def get_record_stream(self, keys, ordering, include_delta_closure):
1531
        """See VersionedFiles.get_record_stream."""
1532
        for (k,) in list(keys):
1533
            lines = self._get_lines(k)
1534
            if lines is not None:
3518.1.2 by Jelmer Vernooij
Fix some stylistic issues pointed out by Ian.
1535
                if not isinstance(lines, list):
1536
                    raise AssertionError
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1537
                yield ChunkedContentFactory((k,), None,
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1538
                        sha1=osutils.sha_strings(lines),
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1539
                        chunks=lines)
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
1540
            else:
1541
                yield AbsentContentFactory((k,))
1542
3949.4.1 by Jelmer Vernooij
Implement VirtualVersionedFiles.iter_lines_added_or_present_in_keys.
1543
    def iter_lines_added_or_present_in_keys(self, keys, pb=None):
1544
        """See VersionedFile.iter_lines_added_or_present_in_versions()."""
1545
        for i, (key,) in enumerate(keys):
1546
            if pb is not None:
4110.2.10 by Martin Pool
Tweak iter_lines progress messages
1547
                pb.update("Finding changed lines", i, len(keys))
3949.4.1 by Jelmer Vernooij
Implement VirtualVersionedFiles.iter_lines_added_or_present_in_keys.
1548
            for l in self._get_lines(key):
1549
                yield (l, key)
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
1550
1551
1552
def network_bytes_to_kind_and_offset(network_bytes):
1553
    """Strip of a record kind from the front of network_bytes.
1554
1555
    :param network_bytes: The bytes of a record.
1556
    :return: A tuple (storage_kind, offset_of_remaining_bytes)
1557
    """
1558
    line_end = network_bytes.find('\n')
1559
    storage_kind = network_bytes[:line_end]
1560
    return storage_kind, line_end + 1
1561
1562
1563
class NetworkRecordStream(object):
1564
    """A record_stream which reconstitures a serialised stream."""
1565
1566
    def __init__(self, bytes_iterator):
1567
        """Create a NetworkRecordStream.
1568
1569
        :param bytes_iterator: An iterator of bytes. Each item in this
1570
            iterator should have been obtained from a record_streams'
1571
            record.get_bytes_as(record.storage_kind) call.
1572
        """
1573
        self._bytes_iterator = bytes_iterator
1574
        self._kind_factory = {'knit-ft-gz':knit.knit_network_to_record,
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1575
            'knit-delta-gz':knit.knit_network_to_record,
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
1576
            'knit-annotated-ft-gz':knit.knit_network_to_record,
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1577
            'knit-annotated-delta-gz':knit.knit_network_to_record,
4005.3.6 by Robert Collins
Support delta_closure=True with NetworkRecordStream to transmit deltas over the wire when full text extraction is required on the far end.
1578
            'knit-delta-closure':knit.knit_delta_closure_to_records,
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1579
            'fulltext':fulltext_network_to_record,
3735.32.18 by John Arbash Meinel
We now support generating a network stream.
1580
            'groupcompress-block':groupcompress.network_block_to_records,
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
1581
            }
1582
1583
    def read(self):
1584
        """Read the stream.
1585
1586
        :return: An iterator as per VersionedFiles.get_record_stream().
1587
        """
1588
        for bytes in self._bytes_iterator:
1589
            storage_kind, line_end = network_bytes_to_kind_and_offset(bytes)
4005.3.6 by Robert Collins
Support delta_closure=True with NetworkRecordStream to transmit deltas over the wire when full text extraction is required on the far end.
1590
            for record in self._kind_factory[storage_kind](
1591
                storage_kind, bytes, line_end):
1592
                yield record
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1593
1594
1595
def fulltext_network_to_record(kind, bytes, line_end):
1596
    """Convert a network fulltext record to record."""
1597
    meta_len, = struct.unpack('!L', bytes[line_end:line_end+4])
4060.1.4 by Robert Collins
Streaming fetch from remote servers.
1598
    record_meta = bytes[line_end+4:line_end+4+meta_len]
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1599
    key, parents = bencode.bdecode_as_tuple(record_meta)
1600
    if parents == 'nil':
1601
        parents = None
4060.1.4 by Robert Collins
Streaming fetch from remote servers.
1602
    fulltext = bytes[line_end+4+meta_len:]
1603
    return [FulltextContentFactory(key, parents, None, fulltext)]
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1604
1605
1606
def _length_prefix(bytes):
1607
    return struct.pack('!L', len(bytes))
1608
1609
4060.1.4 by Robert Collins
Streaming fetch from remote servers.
1610
def record_to_fulltext_bytes(record):
4022.1.6 by Robert Collins
Cherrypick and polish the RemoteSink for streaming push.
1611
    if record.parents is None:
1612
        parents = 'nil'
1613
    else:
1614
        parents = record.parents
1615
    record_meta = bencode.bencode((record.key, parents))
1616
    record_content = record.get_bytes_as('fulltext')
1617
    return "fulltext\n%s%s%s" % (
1618
        _length_prefix(record_meta), record_meta, record_content)
4111.1.1 by Robert Collins
Add a groupcompress sort order.
1619
1620
1621
def sort_groupcompress(parent_map):
1622
    """Sort and group the keys in parent_map into groupcompress order.
1623
1624
    groupcompress is defined (currently) as reverse-topological order, grouped
1625
    by the key prefix.
1626
1627
    :return: A sorted-list of keys
1628
    """
1629
    # gc-optimal ordering is approximately reverse topological,
1630
    # properly grouped by file-id.
1631
    per_prefix_map = {}
1632
    for item in parent_map.iteritems():
1633
        key = item[0]
1634
        if isinstance(key, str) or len(key) == 1:
1635
            prefix = ''
1636
        else:
1637
            prefix = key[0]
1638
        try:
1639
            per_prefix_map[prefix].append(item)
1640
        except KeyError:
1641
            per_prefix_map[prefix] = [item]
1642
1643
    present_keys = []
1644
    for prefix in sorted(per_prefix_map):
1645
        present_keys.extend(reversed(tsort.topo_sort(per_prefix_map[prefix])))
1646
    return present_keys