bzr branch
http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
| 
3830.3.20
by John Arbash Meinel
 Minor PEP8 and copyright updates.  | 
1  | 
# Copyright (C) 2005, 2006, 2007, 2008 Canonical Ltd
 | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
2  | 
#
 | 
3  | 
# Authors:
 | 
|
4  | 
#   Johan Rydberg <jrydberg@gnu.org>
 | 
|
5  | 
#
 | 
|
6  | 
# This program is free software; you can redistribute it and/or modify
 | 
|
7  | 
# it under the terms of the GNU General Public License as published by
 | 
|
8  | 
# the Free Software Foundation; either version 2 of the License, or
 | 
|
9  | 
# (at your option) any later version.
 | 
|
| 
1887.1.1
by Adeodato Simó
 Do not separate paragraphs in the copyright statement with blank lines,  | 
10  | 
#
 | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
11  | 
# This program is distributed in the hope that it will be useful,
 | 
12  | 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
|
13  | 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
|
14  | 
# GNU General Public License for more details.
 | 
|
| 
1887.1.1
by Adeodato Simó
 Do not separate paragraphs in the copyright statement with blank lines,  | 
15  | 
#
 | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
16  | 
# You should have received a copy of the GNU General Public License
 | 
17  | 
# along with this program; if not, write to the Free Software
 | 
|
18  | 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 | 
|
19  | 
||
20  | 
"""Versioned text file storage api."""
 | 
|
21  | 
||
| 
3350.8.2
by Robert Collins
 stacked get_parent_map.  | 
22  | 
from copy import copy  | 
| 
3350.6.1
by Robert Collins
 * New ``versionedfile.KeyMapper`` interface to abstract out the access to  | 
23  | 
from cStringIO import StringIO  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
24  | 
import os  | 
| 
3350.6.1
by Robert Collins
 * New ``versionedfile.KeyMapper`` interface to abstract out the access to  | 
25  | 
from zlib import adler32  | 
26  | 
||
| 
1996.3.7
by John Arbash Meinel
 lazy import versionedfile, late-load bzrlib.merge  | 
27  | 
from bzrlib.lazy_import import lazy_import  | 
28  | 
lazy_import(globals(), """  | 
|
| 
3224.5.20
by Andrew Bennetts
 Remove or lazyify a couple more imports.  | 
29  | 
import urllib
 | 
| 
1996.3.7
by John Arbash Meinel
 lazy import versionedfile, late-load bzrlib.merge  | 
30  | 
|
31  | 
from bzrlib import (
 | 
|
32  | 
    errors,
 | 
|
| 
3830.3.12
by Martin Pool
 Review cleanups: unify has_key impls, add missing_keys(), clean up exception blocks  | 
33  | 
    index,
 | 
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
34  | 
    osutils,
 | 
| 
2520.4.3
by Aaron Bentley
 Implement plain strategy for extracting and installing multiparent diffs  | 
35  | 
    multiparent,
 | 
| 
1996.3.7
by John Arbash Meinel
 lazy import versionedfile, late-load bzrlib.merge  | 
36  | 
    tsort,
 | 
| 
2229.2.1
by Aaron Bentley
 Reject reserved ids in versiondfile, tree, branch and repository  | 
37  | 
    revision,
 | 
| 
1996.3.7
by John Arbash Meinel
 lazy import versionedfile, late-load bzrlib.merge  | 
38  | 
    ui,
 | 
39  | 
    )
 | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
40  | 
from bzrlib.graph import DictParentsProvider, Graph, _StackedParentsProvider
 | 
| 
1996.3.7
by John Arbash Meinel
 lazy import versionedfile, late-load bzrlib.merge  | 
41  | 
from bzrlib.transport.memory import MemoryTransport
 | 
42  | 
""")  | 
|
| 
1563.2.12
by Robert Collins
 Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.  | 
43  | 
from bzrlib.inter import InterObject  | 
| 
3350.3.7
by Robert Collins
 Create a registry of versioned file record adapters.  | 
44  | 
from bzrlib.registry import Registry  | 
| 
3287.5.2
by Robert Collins
 Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.  | 
45  | 
from bzrlib.symbol_versioning import *  | 
| 
1551.6.7
by Aaron Bentley
 Implemented two-way merge, refactored weave merge  | 
46  | 
from bzrlib.textmerge import TextMerge  | 
| 
1563.2.11
by Robert Collins
 Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis.  | 
47  | 
|
48  | 
||
| 
3350.3.7
by Robert Collins
 Create a registry of versioned file record adapters.  | 
49  | 
adapter_registry = Registry()  | 
50  | 
adapter_registry.register_lazy(('knit-delta-gz', 'fulltext'), 'bzrlib.knit',  | 
|
51  | 
'DeltaPlainToFullText')  | 
|
52  | 
adapter_registry.register_lazy(('knit-ft-gz', 'fulltext'), 'bzrlib.knit',  | 
|
53  | 
'FTPlainToFullText')  | 
|
54  | 
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'knit-delta-gz'),  | 
|
55  | 
'bzrlib.knit', 'DeltaAnnotatedToUnannotated')  | 
|
56  | 
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'fulltext'),  | 
|
57  | 
'bzrlib.knit', 'DeltaAnnotatedToFullText')  | 
|
58  | 
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'knit-ft-gz'),  | 
|
59  | 
'bzrlib.knit', 'FTAnnotatedToUnannotated')  | 
|
60  | 
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'fulltext'),  | 
|
61  | 
'bzrlib.knit', 'FTAnnotatedToFullText')  | 
|
| 
3890.2.1
by John Arbash Meinel
 Start working on a ChunkedContentFactory.  | 
62  | 
# adapter_registry.register_lazy(('knit-annotated-ft-gz', 'chunked'),
 | 
63  | 
#     'bzrlib.knit', 'FTAnnotatedToChunked')
 | 
|
| 
3350.3.7
by Robert Collins
 Create a registry of versioned file record adapters.  | 
64  | 
|
65  | 
||
| 
3350.3.3
by Robert Collins
 Functional get_record_stream interface tests covering full interface.  | 
66  | 
class ContentFactory(object):  | 
67  | 
"""Abstract interface for insertion and retrieval from a VersionedFile.  | 
|
68  | 
    
 | 
|
69  | 
    :ivar sha1: None, or the sha1 of the content fulltext.
 | 
|
70  | 
    :ivar storage_kind: The native storage kind of this factory. One of
 | 
|
71  | 
        'mpdiff', 'knit-annotated-ft', 'knit-annotated-delta', 'knit-ft',
 | 
|
72  | 
        'knit-delta', 'fulltext', 'knit-annotated-ft-gz',
 | 
|
73  | 
        'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'.
 | 
|
74  | 
    :ivar key: The key of this content. Each key is a tuple with a single
 | 
|
75  | 
        string in it.
 | 
|
76  | 
    :ivar parents: A tuple of parent keys for self.key. If the object has
 | 
|
77  | 
        no parent information, None (as opposed to () for an empty list of
 | 
|
78  | 
        parents).
 | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
79  | 
    """
 | 
| 
3350.3.3
by Robert Collins
 Functional get_record_stream interface tests covering full interface.  | 
80  | 
|
81  | 
def __init__(self):  | 
|
82  | 
"""Create a ContentFactory."""  | 
|
83  | 
self.sha1 = None  | 
|
84  | 
self.storage_kind = None  | 
|
85  | 
self.key = None  | 
|
86  | 
self.parents = None  | 
|
87  | 
||
88  | 
||
| 
3890.2.1
by John Arbash Meinel
 Start working on a ChunkedContentFactory.  | 
89  | 
class ChunkedContentFactory(ContentFactory):  | 
90  | 
"""Static data content factory.  | 
|
91  | 
||
92  | 
    This takes a 'chunked' list of strings. The only requirement on 'chunked' is
 | 
|
93  | 
    that ''.join(lines) becomes a valid fulltext. A tuple of a single string
 | 
|
94  | 
    satisfies this, as does a list of lines.
 | 
|
95  | 
||
96  | 
    :ivar sha1: None, or the sha1 of the content fulltext.
 | 
|
97  | 
    :ivar storage_kind: The native storage kind of this factory. Always
 | 
|
| 
3890.2.2
by John Arbash Meinel
 Change the signature to report the storage kind as 'chunked'  | 
98  | 
        'chunked'
 | 
| 
3890.2.1
by John Arbash Meinel
 Start working on a ChunkedContentFactory.  | 
99  | 
    :ivar key: The key of this content. Each key is a tuple with a single
 | 
100  | 
        string in it.
 | 
|
101  | 
    :ivar parents: A tuple of parent keys for self.key. If the object has
 | 
|
102  | 
        no parent information, None (as opposed to () for an empty list of
 | 
|
103  | 
        parents).
 | 
|
104  | 
     """
 | 
|
105  | 
||
106  | 
def __init__(self, key, parents, sha1, chunks):  | 
|
107  | 
"""Create a ContentFactory."""  | 
|
108  | 
self.sha1 = sha1  | 
|
| 
3890.2.2
by John Arbash Meinel
 Change the signature to report the storage kind as 'chunked'  | 
109  | 
self.storage_kind = 'chunked'  | 
| 
3890.2.1
by John Arbash Meinel
 Start working on a ChunkedContentFactory.  | 
110  | 
self.key = key  | 
111  | 
self.parents = parents  | 
|
112  | 
self._chunks = chunks  | 
|
113  | 
||
114  | 
def get_bytes_as(self, storage_kind):  | 
|
115  | 
if storage_kind == 'chunked':  | 
|
116  | 
return self._chunks  | 
|
117  | 
elif storage_kind == 'fulltext':  | 
|
118  | 
return ''.join(self._chunks)  | 
|
119  | 
raise errors.UnavailableRepresentation(self.key, storage_kind,  | 
|
120  | 
self.storage_kind)  | 
|
121  | 
||
122  | 
||
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
123  | 
class FulltextContentFactory(ContentFactory):  | 
124  | 
"""Static data content factory.  | 
|
125  | 
||
126  | 
    This takes a fulltext when created and just returns that during
 | 
|
127  | 
    get_bytes_as('fulltext').
 | 
|
| 
3890.2.1
by John Arbash Meinel
 Start working on a ChunkedContentFactory.  | 
128  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
129  | 
    :ivar sha1: None, or the sha1 of the content fulltext.
 | 
130  | 
    :ivar storage_kind: The native storage kind of this factory. Always
 | 
|
131  | 
        'fulltext'.
 | 
|
132  | 
    :ivar key: The key of this content. Each key is a tuple with a single
 | 
|
133  | 
        string in it.
 | 
|
134  | 
    :ivar parents: A tuple of parent keys for self.key. If the object has
 | 
|
135  | 
        no parent information, None (as opposed to () for an empty list of
 | 
|
136  | 
        parents).
 | 
|
137  | 
     """
 | 
|
138  | 
||
139  | 
def __init__(self, key, parents, sha1, text):  | 
|
140  | 
"""Create a ContentFactory."""  | 
|
141  | 
self.sha1 = sha1  | 
|
142  | 
self.storage_kind = 'fulltext'  | 
|
143  | 
self.key = key  | 
|
144  | 
self.parents = parents  | 
|
145  | 
self._text = text  | 
|
146  | 
||
147  | 
def get_bytes_as(self, storage_kind):  | 
|
148  | 
if storage_kind == self.storage_kind:  | 
|
149  | 
return self._text  | 
|
| 
3890.2.1
by John Arbash Meinel
 Start working on a ChunkedContentFactory.  | 
150  | 
elif storage_kind == 'chunked':  | 
151  | 
return (self._text,)  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
152  | 
raise errors.UnavailableRepresentation(self.key, storage_kind,  | 
153  | 
self.storage_kind)  | 
|
154  | 
||
155  | 
||
156  | 
class AbsentContentFactory(ContentFactory):  | 
|
| 
3350.3.12
by Robert Collins
 Generate streams with absent records.  | 
157  | 
"""A placeholder content factory for unavailable texts.  | 
158  | 
    
 | 
|
159  | 
    :ivar sha1: None.
 | 
|
160  | 
    :ivar storage_kind: 'absent'.
 | 
|
161  | 
    :ivar key: The key of this content. Each key is a tuple with a single
 | 
|
162  | 
        string in it.
 | 
|
163  | 
    :ivar parents: None.
 | 
|
164  | 
    """
 | 
|
165  | 
||
166  | 
def __init__(self, key):  | 
|
167  | 
"""Create a ContentFactory."""  | 
|
168  | 
self.sha1 = None  | 
|
169  | 
self.storage_kind = 'absent'  | 
|
170  | 
self.key = key  | 
|
171  | 
self.parents = None  | 
|
172  | 
||
173  | 
||
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
174  | 
class AdapterFactory(ContentFactory):  | 
175  | 
"""A content factory to adapt between key prefix's."""  | 
|
176  | 
||
177  | 
def __init__(self, key, parents, adapted):  | 
|
178  | 
"""Create an adapter factory instance."""  | 
|
179  | 
self.key = key  | 
|
180  | 
self.parents = parents  | 
|
181  | 
self._adapted = adapted  | 
|
182  | 
||
183  | 
def __getattr__(self, attr):  | 
|
184  | 
"""Return a member from the adapted object."""  | 
|
185  | 
if attr in ('key', 'parents'):  | 
|
186  | 
return self.__dict__[attr]  | 
|
187  | 
else:  | 
|
188  | 
return getattr(self._adapted, attr)  | 
|
189  | 
||
190  | 
||
| 
3350.3.14
by Robert Collins
 Deprecate VersionedFile.join.  | 
191  | 
def filter_absent(record_stream):  | 
192  | 
"""Adapt a record stream to remove absent records."""  | 
|
193  | 
for record in record_stream:  | 
|
194  | 
if record.storage_kind != 'absent':  | 
|
195  | 
yield record  | 
|
196  | 
||
197  | 
||
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
198  | 
class VersionedFile(object):  | 
199  | 
"""Versioned text file storage.  | 
|
200  | 
    
 | 
|
201  | 
    A versioned file manages versions of line-based text files,
 | 
|
202  | 
    keeping track of the originating version for each line.
 | 
|
203  | 
||
204  | 
    To clients the "lines" of the file are represented as a list of
 | 
|
205  | 
    strings. These strings will typically have terminal newline
 | 
|
206  | 
    characters, but this is not required.  In particular files commonly
 | 
|
207  | 
    do not have a newline at the end of the file.
 | 
|
208  | 
||
209  | 
    Texts are identified by a version-id string.
 | 
|
210  | 
    """
 | 
|
211  | 
||
| 
2229.2.1
by Aaron Bentley
 Reject reserved ids in versiondfile, tree, branch and repository  | 
212  | 
    @staticmethod
 | 
| 
2229.2.3
by Aaron Bentley
 change reserved_id to is_reserved_id, add check_not_reserved for DRY  | 
213  | 
def check_not_reserved_id(version_id):  | 
214  | 
revision.check_not_reserved_id(version_id)  | 
|
| 
2229.2.1
by Aaron Bentley
 Reject reserved ids in versiondfile, tree, branch and repository  | 
215  | 
|
| 
1563.2.15
by Robert Collins
 remove the weavestore assumptions about the number and nature of files it manages.  | 
216  | 
def copy_to(self, name, transport):  | 
217  | 
"""Copy this versioned file to name on transport."""  | 
|
218  | 
raise NotImplementedError(self.copy_to)  | 
|
| 
1863.1.1
by John Arbash Meinel
 Allow Versioned files to do caching if explicitly asked, and implement for Knit  | 
219  | 
|
| 
3350.3.3
by Robert Collins
 Functional get_record_stream interface tests covering full interface.  | 
220  | 
def get_record_stream(self, versions, ordering, include_delta_closure):  | 
221  | 
"""Get a stream of records for versions.  | 
|
222  | 
||
223  | 
        :param versions: The versions to include. Each version is a tuple
 | 
|
224  | 
            (version,).
 | 
|
225  | 
        :param ordering: Either 'unordered' or 'topological'. A topologically
 | 
|
226  | 
            sorted stream has compression parents strictly before their
 | 
|
227  | 
            children.
 | 
|
228  | 
        :param include_delta_closure: If True then the closure across any
 | 
|
| 
3350.3.22
by Robert Collins
 Review feedback.  | 
229  | 
            compression parents will be included (in the data content of the
 | 
230  | 
            stream, not in the emitted records). This guarantees that
 | 
|
231  | 
            'fulltext' can be used successfully on every record.
 | 
|
| 
3350.3.3
by Robert Collins
 Functional get_record_stream interface tests covering full interface.  | 
232  | 
        :return: An iterator of ContentFactory objects, each of which is only
 | 
233  | 
            valid until the iterator is advanced.
 | 
|
234  | 
        """
 | 
|
235  | 
raise NotImplementedError(self.get_record_stream)  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
236  | 
|
237  | 
def has_version(self, version_id):  | 
|
238  | 
"""Returns whether version is present."""  | 
|
239  | 
raise NotImplementedError(self.has_version)  | 
|
240  | 
||
| 
3350.3.8
by Robert Collins
 Basic stream insertion, no fast path yet for knit to knit.  | 
241  | 
def insert_record_stream(self, stream):  | 
242  | 
"""Insert a record stream into this versioned file.  | 
|
243  | 
||
244  | 
        :param stream: A stream of records to insert. 
 | 
|
245  | 
        :return: None
 | 
|
246  | 
        :seealso VersionedFile.get_record_stream:
 | 
|
247  | 
        """
 | 
|
248  | 
raise NotImplementedError  | 
|
249  | 
||
| 
2520.4.140
by Aaron Bentley
 Use matching blocks from mpdiff for knit delta creation  | 
250  | 
def add_lines(self, version_id, parents, lines, parent_texts=None,  | 
| 
2805.6.7
by Robert Collins
 Review feedback.  | 
251  | 
left_matching_blocks=None, nostore_sha=None, random_id=False,  | 
252  | 
check_content=True):  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
253  | 
"""Add a single text on top of the versioned file.  | 
254  | 
||
255  | 
        Must raise RevisionAlreadyPresent if the new version is
 | 
|
256  | 
        already present in file history.
 | 
|
257  | 
||
258  | 
        Must raise RevisionNotPresent if any of the given parents are
 | 
|
| 
1594.2.21
by Robert Collins
 Teach versioned files to prevent mutation after finishing.  | 
259  | 
        not present in file history.
 | 
| 
2805.6.3
by Robert Collins
 * The ``VersionedFile`` interface no longer protects against misuse when  | 
260  | 
|
261  | 
        :param lines: A list of lines. Each line must be a bytestring. And all
 | 
|
262  | 
            of them except the last must be terminated with \n and contain no
 | 
|
263  | 
            other \n's. The last line may either contain no \n's or a single
 | 
|
264  | 
            terminated \n. If the lines list does meet this constraint the add
 | 
|
265  | 
            routine may error or may succeed - but you will be unable to read
 | 
|
266  | 
            the data back accurately. (Checking the lines have been split
 | 
|
| 
2805.6.7
by Robert Collins
 Review feedback.  | 
267  | 
            correctly is expensive and extremely unlikely to catch bugs so it
 | 
268  | 
            is not done at runtime unless check_content is True.)
 | 
|
| 
1596.2.32
by Robert Collins
 Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.  | 
269  | 
        :param parent_texts: An optional dictionary containing the opaque 
 | 
| 
2805.6.3
by Robert Collins
 * The ``VersionedFile`` interface no longer protects against misuse when  | 
270  | 
            representations of some or all of the parents of version_id to
 | 
271  | 
            allow delta optimisations.  VERY IMPORTANT: the texts must be those
 | 
|
272  | 
            returned by add_lines or data corruption can be caused.
 | 
|
| 
2520.4.148
by Aaron Bentley
 Updates from review  | 
273  | 
        :param left_matching_blocks: a hint about which areas are common
 | 
274  | 
            between the text and its left-hand-parent.  The format is
 | 
|
275  | 
            the SequenceMatcher.get_matching_blocks format.
 | 
|
| 
2794.1.1
by Robert Collins
 Allow knits to be instructed not to add a text based on a sha, for commit.  | 
276  | 
        :param nostore_sha: Raise ExistingContent and do not add the lines to
 | 
277  | 
            the versioned file if the digest of the lines matches this.
 | 
|
| 
2805.6.4
by Robert Collins
 Don't check for existing versions when adding texts with random revision ids.  | 
278  | 
        :param random_id: If True a random id has been selected rather than
 | 
279  | 
            an id determined by some deterministic process such as a converter
 | 
|
280  | 
            from a foreign VCS. When True the backend may choose not to check
 | 
|
281  | 
            for uniqueness of the resulting key within the versioned file, so
 | 
|
282  | 
            this should only be done when the result is expected to be unique
 | 
|
283  | 
            anyway.
 | 
|
| 
2805.6.7
by Robert Collins
 Review feedback.  | 
284  | 
        :param check_content: If True, the lines supplied are verified to be
 | 
285  | 
            bytestrings that are correctly formed lines.
 | 
|
| 
2776.1.1
by Robert Collins
 * The ``add_lines`` methods on ``VersionedFile`` implementations has changed  | 
286  | 
        :return: The text sha1, the number of bytes in the text, and an opaque
 | 
287  | 
                 representation of the inserted version which can be provided
 | 
|
288  | 
                 back to future add_lines calls in the parent_texts dictionary.
 | 
|
| 
1594.2.21
by Robert Collins
 Teach versioned files to prevent mutation after finishing.  | 
289  | 
        """
 | 
| 
1594.2.23
by Robert Collins
 Test versioned file storage handling of clean/dirty status for accessed versioned files.  | 
290  | 
self._check_write_ok()  | 
| 
2520.4.140
by Aaron Bentley
 Use matching blocks from mpdiff for knit delta creation  | 
291  | 
return self._add_lines(version_id, parents, lines, parent_texts,  | 
| 
2805.6.7
by Robert Collins
 Review feedback.  | 
292  | 
left_matching_blocks, nostore_sha, random_id, check_content)  | 
| 
1594.2.21
by Robert Collins
 Teach versioned files to prevent mutation after finishing.  | 
293  | 
|
| 
2520.4.140
by Aaron Bentley
 Use matching blocks from mpdiff for knit delta creation  | 
294  | 
def _add_lines(self, version_id, parents, lines, parent_texts,  | 
| 
2805.6.7
by Robert Collins
 Review feedback.  | 
295  | 
left_matching_blocks, nostore_sha, random_id, check_content):  | 
| 
1594.2.21
by Robert Collins
 Teach versioned files to prevent mutation after finishing.  | 
296  | 
"""Helper to do the class specific add_lines."""  | 
| 
1563.2.4
by Robert Collins
 First cut at including the knit implementation of versioned_file.  | 
297  | 
raise NotImplementedError(self.add_lines)  | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
298  | 
|
| 
1596.2.32
by Robert Collins
 Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.  | 
299  | 
def add_lines_with_ghosts(self, version_id, parents, lines,  | 
| 
2805.6.7
by Robert Collins
 Review feedback.  | 
300  | 
parent_texts=None, nostore_sha=None, random_id=False,  | 
| 
3287.5.2
by Robert Collins
 Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.  | 
301  | 
check_content=True, left_matching_blocks=None):  | 
| 
1596.2.32
by Robert Collins
 Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.  | 
302  | 
"""Add lines to the versioned file, allowing ghosts to be present.  | 
303  | 
        
 | 
|
| 
2794.1.1
by Robert Collins
 Allow knits to be instructed not to add a text based on a sha, for commit.  | 
304  | 
        This takes the same parameters as add_lines and returns the same.
 | 
| 
1596.2.32
by Robert Collins
 Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.  | 
305  | 
        """
 | 
| 
1594.2.23
by Robert Collins
 Test versioned file storage handling of clean/dirty status for accessed versioned files.  | 
306  | 
self._check_write_ok()  | 
| 
1596.2.32
by Robert Collins
 Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.  | 
307  | 
return self._add_lines_with_ghosts(version_id, parents, lines,  | 
| 
3287.5.2
by Robert Collins
 Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.  | 
308  | 
parent_texts, nostore_sha, random_id, check_content, left_matching_blocks)  | 
| 
1594.2.21
by Robert Collins
 Teach versioned files to prevent mutation after finishing.  | 
309  | 
|
| 
2794.1.1
by Robert Collins
 Allow knits to be instructed not to add a text based on a sha, for commit.  | 
310  | 
def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts,  | 
| 
3287.5.2
by Robert Collins
 Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.  | 
311  | 
nostore_sha, random_id, check_content, left_matching_blocks):  | 
| 
1594.2.21
by Robert Collins
 Teach versioned files to prevent mutation after finishing.  | 
312  | 
"""Helper to do class specific add_lines_with_ghosts."""  | 
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
313  | 
raise NotImplementedError(self.add_lines_with_ghosts)  | 
314  | 
||
| 
1563.2.19
by Robert Collins
 stub out a check for knits.  | 
315  | 
def check(self, progress_bar=None):  | 
316  | 
"""Check the versioned file for integrity."""  | 
|
317  | 
raise NotImplementedError(self.check)  | 
|
318  | 
||
| 
1666.1.6
by Robert Collins
 Make knit the default format.  | 
319  | 
def _check_lines_not_unicode(self, lines):  | 
320  | 
"""Check that lines being added to a versioned file are not unicode."""  | 
|
321  | 
for line in lines:  | 
|
322  | 
if line.__class__ is not str:  | 
|
323  | 
raise errors.BzrBadParameterUnicode("lines")  | 
|
324  | 
||
325  | 
def _check_lines_are_lines(self, lines):  | 
|
326  | 
"""Check that the lines really are full lines without inline EOL."""  | 
|
327  | 
for line in lines:  | 
|
328  | 
if '\n' in line[:-1]:  | 
|
329  | 
raise errors.BzrBadParameterContainsNewline("lines")  | 
|
330  | 
||
| 
2535.3.1
by Andrew Bennetts
 Add get_format_signature to VersionedFile  | 
331  | 
def get_format_signature(self):  | 
332  | 
"""Get a text description of the data encoding in this file.  | 
|
333  | 
        
 | 
|
| 
2831.7.1
by Ian Clatworthy
 versionedfile.py code cleanups  | 
334  | 
        :since: 0.90
 | 
| 
2535.3.1
by Andrew Bennetts
 Add get_format_signature to VersionedFile  | 
335  | 
        """
 | 
336  | 
raise NotImplementedError(self.get_format_signature)  | 
|
337  | 
||
| 
2520.4.41
by Aaron Bentley
 Accelerate mpdiff generation  | 
338  | 
def make_mpdiffs(self, version_ids):  | 
| 
2831.7.1
by Ian Clatworthy
 versionedfile.py code cleanups  | 
339  | 
"""Create multiparent diffs for specified versions."""  | 
| 
2520.4.41
by Aaron Bentley
 Accelerate mpdiff generation  | 
340  | 
knit_versions = set()  | 
| 
3287.5.2
by Robert Collins
 Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.  | 
341  | 
knit_versions.update(version_ids)  | 
342  | 
parent_map = self.get_parent_map(version_ids)  | 
|
| 
2520.4.41
by Aaron Bentley
 Accelerate mpdiff generation  | 
343  | 
for version_id in version_ids:  | 
| 
3287.5.2
by Robert Collins
 Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.  | 
344  | 
try:  | 
345  | 
knit_versions.update(parent_map[version_id])  | 
|
346  | 
except KeyError:  | 
|
| 
3453.3.1
by Daniel Fischer
 Raise the right exception in make_mpdiffs (bug #235687)  | 
347  | 
raise errors.RevisionNotPresent(version_id, self)  | 
| 
3287.5.2
by Robert Collins
 Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.  | 
348  | 
        # We need to filter out ghosts, because we can't diff against them.
 | 
349  | 
knit_versions = set(self.get_parent_map(knit_versions).keys())  | 
|
| 
2520.4.90
by Aaron Bentley
 Handle \r terminated lines in Weaves properly  | 
350  | 
lines = dict(zip(knit_versions,  | 
351  | 
self._get_lf_split_line_list(knit_versions)))  | 
|
| 
2520.4.41
by Aaron Bentley
 Accelerate mpdiff generation  | 
352  | 
diffs = []  | 
353  | 
for version_id in version_ids:  | 
|
354  | 
target = lines[version_id]  | 
|
| 
3287.5.2
by Robert Collins
 Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.  | 
355  | 
try:  | 
356  | 
parents = [lines[p] for p in parent_map[version_id] if p in  | 
|
357  | 
knit_versions]  | 
|
358  | 
except KeyError:  | 
|
| 
3453.3.2
by John Arbash Meinel
 Add a test case for the first loop, unable to find a way to trigger the second loop  | 
359  | 
                # I don't know how this could ever trigger.
 | 
360  | 
                # parent_map[version_id] was already triggered in the previous
 | 
|
361  | 
                # for loop, and lines[p] has the 'if p in knit_versions' check,
 | 
|
362  | 
                # so we again won't have a KeyError.
 | 
|
| 
3453.3.1
by Daniel Fischer
 Raise the right exception in make_mpdiffs (bug #235687)  | 
363  | 
raise errors.RevisionNotPresent(version_id, self)  | 
| 
2520.4.48
by Aaron Bentley
 Support getting blocks from knit deltas with no final EOL  | 
364  | 
if len(parents) > 0:  | 
365  | 
left_parent_blocks = self._extract_blocks(version_id,  | 
|
366  | 
parents[0], target)  | 
|
367  | 
else:  | 
|
368  | 
left_parent_blocks = None  | 
|
| 
2520.4.41
by Aaron Bentley
 Accelerate mpdiff generation  | 
369  | 
diffs.append(multiparent.MultiParent.from_lines(target, parents,  | 
370  | 
left_parent_blocks))  | 
|
371  | 
return diffs  | 
|
372  | 
||
| 
2520.4.48
by Aaron Bentley
 Support getting blocks from knit deltas with no final EOL  | 
373  | 
def _extract_blocks(self, version_id, source, target):  | 
| 
2520.4.41
by Aaron Bentley
 Accelerate mpdiff generation  | 
374  | 
return None  | 
| 
2520.4.3
by Aaron Bentley
 Implement plain strategy for extracting and installing multiparent diffs  | 
375  | 
|
| 
2520.4.61
by Aaron Bentley
 Do bulk insertion of records  | 
376  | 
def add_mpdiffs(self, records):  | 
| 
2831.7.1
by Ian Clatworthy
 versionedfile.py code cleanups  | 
377  | 
"""Add mpdiffs to this VersionedFile.  | 
| 
2520.4.126
by Aaron Bentley
 Add more docs  | 
378  | 
|
379  | 
        Records should be iterables of version, parents, expected_sha1,
 | 
|
| 
2831.7.1
by Ian Clatworthy
 versionedfile.py code cleanups  | 
380  | 
        mpdiff. mpdiff should be a MultiParent instance.
 | 
| 
2520.4.126
by Aaron Bentley
 Add more docs  | 
381  | 
        """
 | 
| 
2831.7.1
by Ian Clatworthy
 versionedfile.py code cleanups  | 
382  | 
        # Does this need to call self._check_write_ok()? (IanC 20070919)
 | 
| 
2520.4.61
by Aaron Bentley
 Do bulk insertion of records  | 
383  | 
vf_parents = {}  | 
| 
2520.4.141
by Aaron Bentley
 More batch operations adding mpdiffs  | 
384  | 
mpvf = multiparent.MultiMemoryVersionedFile()  | 
385  | 
versions = []  | 
|
386  | 
for version, parent_ids, expected_sha1, mpdiff in records:  | 
|
387  | 
versions.append(version)  | 
|
388  | 
mpvf.add_diff(mpdiff, version, parent_ids)  | 
|
389  | 
needed_parents = set()  | 
|
| 
2520.4.142
by Aaron Bentley
 Clean up installation of inventory records  | 
390  | 
for version, parent_ids, expected_sha1, mpdiff in records:  | 
| 
2520.4.141
by Aaron Bentley
 More batch operations adding mpdiffs  | 
391  | 
needed_parents.update(p for p in parent_ids  | 
392  | 
if not mpvf.has_version(p))  | 
|
| 
3287.5.2
by Robert Collins
 Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.  | 
393  | 
present_parents = set(self.get_parent_map(needed_parents).keys())  | 
394  | 
for parent_id, lines in zip(present_parents,  | 
|
395  | 
self._get_lf_split_line_list(present_parents)):  | 
|
| 
2520.4.141
by Aaron Bentley
 More batch operations adding mpdiffs  | 
396  | 
mpvf.add_version(lines, parent_id, [])  | 
397  | 
for (version, parent_ids, expected_sha1, mpdiff), lines in\  | 
|
398  | 
zip(records, mpvf.get_line_list(versions)):  | 
|
399  | 
if len(parent_ids) == 1:  | 
|
| 
2520.4.140
by Aaron Bentley
 Use matching blocks from mpdiff for knit delta creation  | 
400  | 
left_matching_blocks = list(mpdiff.get_matching_blocks(0,  | 
| 
2520.4.141
by Aaron Bentley
 More batch operations adding mpdiffs  | 
401  | 
mpvf.get_diff(parent_ids[0]).num_lines()))  | 
| 
2520.4.140
by Aaron Bentley
 Use matching blocks from mpdiff for knit delta creation  | 
402  | 
else:  | 
403  | 
left_matching_blocks = None  | 
|
| 
3287.5.2
by Robert Collins
 Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.  | 
404  | 
try:  | 
405  | 
_, _, version_text = self.add_lines_with_ghosts(version,  | 
|
406  | 
parent_ids, lines, vf_parents,  | 
|
407  | 
left_matching_blocks=left_matching_blocks)  | 
|
408  | 
except NotImplementedError:  | 
|
409  | 
                # The vf can't handle ghosts, so add lines normally, which will
 | 
|
410  | 
                # (reasonably) fail if there are ghosts in the data.
 | 
|
411  | 
_, _, version_text = self.add_lines(version,  | 
|
412  | 
parent_ids, lines, vf_parents,  | 
|
413  | 
left_matching_blocks=left_matching_blocks)  | 
|
| 
2520.4.61
by Aaron Bentley
 Do bulk insertion of records  | 
414  | 
vf_parents[version] = version_text  | 
| 
3350.8.3
by Robert Collins
 VF.get_sha1s needed changing to be stackable.  | 
415  | 
sha1s = self.get_sha1s(versions)  | 
416  | 
for version, parent_ids, expected_sha1, mpdiff in records:  | 
|
417  | 
if expected_sha1 != sha1s[version]:  | 
|
| 
2520.4.71
by Aaron Bentley
 Update test to accept VersionedFileInvalidChecksum instead of TestamentMismatch  | 
418  | 
raise errors.VersionedFileInvalidChecksum(version)  | 
| 
2520.4.3
by Aaron Bentley
 Implement plain strategy for extracting and installing multiparent diffs  | 
419  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
420  | 
def get_text(self, version_id):  | 
421  | 
"""Return version contents as a text string.  | 
|
422  | 
||
423  | 
        Raises RevisionNotPresent if version is not present in
 | 
|
424  | 
        file history.
 | 
|
425  | 
        """
 | 
|
426  | 
return ''.join(self.get_lines(version_id))  | 
|
427  | 
get_string = get_text  | 
|
428  | 
||
| 
1756.2.1
by Aaron Bentley
 Implement get_texts  | 
429  | 
def get_texts(self, version_ids):  | 
430  | 
"""Return the texts of listed versions as a list of strings.  | 
|
431  | 
||
432  | 
        Raises RevisionNotPresent if version is not present in
 | 
|
433  | 
        file history.
 | 
|
434  | 
        """
 | 
|
435  | 
return [''.join(self.get_lines(v)) for v in version_ids]  | 
|
436  | 
||
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
437  | 
def get_lines(self, version_id):  | 
438  | 
"""Return version contents as a sequence of lines.  | 
|
439  | 
||
440  | 
        Raises RevisionNotPresent if version is not present in
 | 
|
441  | 
        file history.
 | 
|
442  | 
        """
 | 
|
443  | 
raise NotImplementedError(self.get_lines)  | 
|
444  | 
||
| 
2520.4.90
by Aaron Bentley
 Handle \r terminated lines in Weaves properly  | 
445  | 
def _get_lf_split_line_list(self, version_ids):  | 
446  | 
return [StringIO(t).readlines() for t in self.get_texts(version_ids)]  | 
|
| 
2520.4.3
by Aaron Bentley
 Implement plain strategy for extracting and installing multiparent diffs  | 
447  | 
|
| 
2530.1.1
by Aaron Bentley
 Make topological sorting optional for get_ancestry  | 
448  | 
def get_ancestry(self, version_ids, topo_sorted=True):  | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
449  | 
"""Return a list of all ancestors of given version(s). This  | 
450  | 
        will not include the null revision.
 | 
|
451  | 
||
| 
2490.2.32
by Aaron Bentley
 Merge of not-sorting-ancestry branch  | 
452  | 
        This list will not be topologically sorted if topo_sorted=False is
 | 
453  | 
        passed.
 | 
|
| 
2530.1.1
by Aaron Bentley
 Make topological sorting optional for get_ancestry  | 
454  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
455  | 
        Must raise RevisionNotPresent if any of the given versions are
 | 
456  | 
        not present in file history."""
 | 
|
457  | 
if isinstance(version_ids, basestring):  | 
|
458  | 
version_ids = [version_ids]  | 
|
459  | 
raise NotImplementedError(self.get_ancestry)  | 
|
460  | 
||
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
461  | 
def get_ancestry_with_ghosts(self, version_ids):  | 
462  | 
"""Return a list of all ancestors of given version(s). This  | 
|
463  | 
        will not include the null revision.
 | 
|
464  | 
||
465  | 
        Must raise RevisionNotPresent if any of the given versions are
 | 
|
466  | 
        not present in file history.
 | 
|
467  | 
        
 | 
|
468  | 
        Ghosts that are known about will be included in ancestry list,
 | 
|
469  | 
        but are not explicitly marked.
 | 
|
470  | 
        """
 | 
|
471  | 
raise NotImplementedError(self.get_ancestry_with_ghosts)  | 
|
| 
3316.2.7
by Robert Collins
 Actually deprecated VersionedFile.get_graph.  | 
472  | 
|
| 
3287.5.1
by Robert Collins
 Add VersionedFile.get_parent_map.  | 
473  | 
def get_parent_map(self, version_ids):  | 
474  | 
"""Get a map of the parents of version_ids.  | 
|
475  | 
||
476  | 
        :param version_ids: The version ids to look up parents for.
 | 
|
477  | 
        :return: A mapping from version id to parents.
 | 
|
478  | 
        """
 | 
|
479  | 
raise NotImplementedError(self.get_parent_map)  | 
|
480  | 
||
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
481  | 
def get_parents_with_ghosts(self, version_id):  | 
482  | 
"""Return version names for parents of version_id.  | 
|
483  | 
||
484  | 
        Will raise RevisionNotPresent if version_id is not present
 | 
|
485  | 
        in the history.
 | 
|
486  | 
||
487  | 
        Ghosts that are known about will be included in the parent list,
 | 
|
488  | 
        but are not explicitly marked.
 | 
|
489  | 
        """
 | 
|
| 
3287.5.1
by Robert Collins
 Add VersionedFile.get_parent_map.  | 
490  | 
try:  | 
491  | 
return list(self.get_parent_map([version_id])[version_id])  | 
|
492  | 
except KeyError:  | 
|
493  | 
raise errors.RevisionNotPresent(version_id, self)  | 
|
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
494  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
495  | 
def annotate(self, version_id):  | 
| 
3316.2.13
by Robert Collins
 * ``VersionedFile.annotate_iter`` is deprecated. While in principal this  | 
496  | 
"""Return a list of (version-id, line) tuples for version_id.  | 
497  | 
||
498  | 
        :raise RevisionNotPresent: If the given version is
 | 
|
499  | 
        not present in file history.
 | 
|
500  | 
        """
 | 
|
501  | 
raise NotImplementedError(self.annotate)  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
502  | 
|
| 
2975.3.2
by Robert Collins
 Review feedback - document the API change and improve readability in pack's _do_copy_nodes.  | 
503  | 
def iter_lines_added_or_present_in_versions(self, version_ids=None,  | 
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
504  | 
pb=None):  | 
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
505  | 
"""Iterate over the lines in the versioned file from version_ids.  | 
506  | 
||
| 
2975.3.2
by Robert Collins
 Review feedback - document the API change and improve readability in pack's _do_copy_nodes.  | 
507  | 
        This may return lines from other versions. Each item the returned
 | 
508  | 
        iterator yields is a tuple of a line and a text version that that line
 | 
|
509  | 
        is present in (not introduced in).
 | 
|
510  | 
||
511  | 
        Ordering of results is in whatever order is most suitable for the
 | 
|
512  | 
        underlying storage format.
 | 
|
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
513  | 
|
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
514  | 
        If a progress bar is supplied, it may be used to indicate progress.
 | 
515  | 
        The caller is responsible for cleaning up progress bars (because this
 | 
|
516  | 
        is an iterator).
 | 
|
517  | 
||
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
518  | 
        NOTES: Lines are normalised: they will all have \n terminators.
 | 
519  | 
               Lines are returned in arbitrary order.
 | 
|
| 
2975.3.2
by Robert Collins
 Review feedback - document the API change and improve readability in pack's _do_copy_nodes.  | 
520  | 
|
521  | 
        :return: An iterator over (line, version_id).
 | 
|
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
522  | 
        """
 | 
523  | 
raise NotImplementedError(self.iter_lines_added_or_present_in_versions)  | 
|
524  | 
||
| 
1551.6.15
by Aaron Bentley
 Moved plan_merge into Weave  | 
525  | 
def plan_merge(self, ver_a, ver_b):  | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
526  | 
"""Return pseudo-annotation indicating how the two versions merge.  | 
527  | 
||
528  | 
        This is computed between versions a and b and their common
 | 
|
529  | 
        base.
 | 
|
530  | 
||
531  | 
        Weave lines present in none of them are skipped entirely.
 | 
|
| 
1664.2.2
by Aaron Bentley
 Added legend for plan-merge output  | 
532  | 
|
533  | 
        Legend:
 | 
|
534  | 
        killed-base Dead in base revision
 | 
|
535  | 
        killed-both Killed in each revision
 | 
|
536  | 
        killed-a    Killed in a
 | 
|
537  | 
        killed-b    Killed in b
 | 
|
538  | 
        unchanged   Alive in both a and b (possibly created in both)
 | 
|
539  | 
        new-a       Created in a
 | 
|
540  | 
        new-b       Created in b
 | 
|
| 
1664.2.5
by Aaron Bentley
 Update plan-merge legend  | 
541  | 
        ghost-a     Killed in a, unborn in b    
 | 
542  | 
        ghost-b     Killed in b, unborn in a
 | 
|
| 
1664.2.2
by Aaron Bentley
 Added legend for plan-merge output  | 
543  | 
        irrelevant  Not in either revision
 | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
544  | 
        """
 | 
| 
1551.6.15
by Aaron Bentley
 Moved plan_merge into Weave  | 
545  | 
raise NotImplementedError(VersionedFile.plan_merge)  | 
546  | 
||
| 
1996.3.7
by John Arbash Meinel
 lazy import versionedfile, late-load bzrlib.merge  | 
547  | 
def weave_merge(self, plan, a_marker=TextMerge.A_MARKER,  | 
| 
1551.6.14
by Aaron Bentley
 Tweaks from merge review  | 
548  | 
b_marker=TextMerge.B_MARKER):  | 
| 
1551.6.12
by Aaron Bentley
 Indicate conflicts from merge_lines, insead of guessing  | 
549  | 
return PlanWeaveMerge(plan, a_marker, b_marker).merge_lines()[0]  | 
| 
1551.6.10
by Aaron Bentley
 Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge  | 
550  | 
|
| 
1664.2.7
by Aaron Bentley
 Merge bzr.dev  | 
551  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
552  | 
class RecordingVersionedFilesDecorator(object):  | 
553  | 
"""A minimal versioned files that records calls made on it.  | 
|
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
554  | 
    
 | 
555  | 
    Only enough methods have been added to support tests using it to date.
 | 
|
556  | 
||
557  | 
    :ivar calls: A list of the calls made; can be reset at any time by
 | 
|
558  | 
        assigning [] to it.
 | 
|
559  | 
    """
 | 
|
560  | 
||
561  | 
def __init__(self, backing_vf):  | 
|
| 
3871.4.1
by John Arbash Meinel
 Add a VFDecorator that can yield records in a specified order  | 
562  | 
"""Create a RecordingVersionedFilesDecorator decorating backing_vf.  | 
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
563  | 
        
 | 
564  | 
        :param backing_vf: The versioned file to answer all methods.
 | 
|
565  | 
        """
 | 
|
566  | 
self._backing_vf = backing_vf  | 
|
567  | 
self.calls = []  | 
|
568  | 
||
| 
3350.8.2
by Robert Collins
 stacked get_parent_map.  | 
569  | 
def add_lines(self, key, parents, lines, parent_texts=None,  | 
570  | 
left_matching_blocks=None, nostore_sha=None, random_id=False,  | 
|
571  | 
check_content=True):  | 
|
572  | 
self.calls.append(("add_lines", key, parents, lines, parent_texts,  | 
|
573  | 
left_matching_blocks, nostore_sha, random_id, check_content))  | 
|
574  | 
return self._backing_vf.add_lines(key, parents, lines, parent_texts,  | 
|
575  | 
left_matching_blocks, nostore_sha, random_id, check_content)  | 
|
576  | 
||
| 
3517.4.19
by Martin Pool
 Update test for knit.check() to expect it to recurse into fallback vfs  | 
577  | 
def check(self):  | 
578  | 
self._backing_vf.check()  | 
|
579  | 
||
| 
3350.8.2
by Robert Collins
 stacked get_parent_map.  | 
580  | 
def get_parent_map(self, keys):  | 
581  | 
self.calls.append(("get_parent_map", copy(keys)))  | 
|
582  | 
return self._backing_vf.get_parent_map(keys)  | 
|
583  | 
||
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
584  | 
def get_record_stream(self, keys, sort_order, include_delta_closure):  | 
| 
3350.8.7
by Robert Collins
 get_record_stream for fulltexts working (but note extreme memory use!).  | 
585  | 
self.calls.append(("get_record_stream", list(keys), sort_order,  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
586  | 
include_delta_closure))  | 
587  | 
return self._backing_vf.get_record_stream(keys, sort_order,  | 
|
588  | 
include_delta_closure)  | 
|
589  | 
||
| 
3350.8.3
by Robert Collins
 VF.get_sha1s needed changing to be stackable.  | 
590  | 
def get_sha1s(self, keys):  | 
591  | 
self.calls.append(("get_sha1s", copy(keys)))  | 
|
592  | 
return self._backing_vf.get_sha1s(keys)  | 
|
593  | 
||
| 
3350.8.5
by Robert Collins
 Iter_lines_added_or_present_in_keys stacks.  | 
594  | 
def iter_lines_added_or_present_in_keys(self, keys, pb=None):  | 
595  | 
self.calls.append(("iter_lines_added_or_present_in_keys", copy(keys)))  | 
|
| 
3350.8.14
by Robert Collins
 Review feedback.  | 
596  | 
return self._backing_vf.iter_lines_added_or_present_in_keys(keys, pb=pb)  | 
| 
3350.8.5
by Robert Collins
 Iter_lines_added_or_present_in_keys stacks.  | 
597  | 
|
| 
3350.8.4
by Robert Collins
 Vf.keys() stacking support.  | 
598  | 
def keys(self):  | 
599  | 
self.calls.append(("keys",))  | 
|
600  | 
return self._backing_vf.keys()  | 
|
601  | 
||
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
602  | 
|
| 
3871.4.1
by John Arbash Meinel
 Add a VFDecorator that can yield records in a specified order  | 
603  | 
class OrderingVersionedFilesDecorator(RecordingVersionedFilesDecorator):  | 
604  | 
"""A VF that records calls, and returns keys in specific order.  | 
|
605  | 
||
606  | 
    :ivar calls: A list of the calls made; can be reset at any time by
 | 
|
607  | 
        assigning [] to it.
 | 
|
608  | 
    """
 | 
|
609  | 
||
610  | 
def __init__(self, backing_vf, key_priority):  | 
|
611  | 
"""Create a RecordingVersionedFilesDecorator decorating backing_vf.  | 
|
612  | 
||
613  | 
        :param backing_vf: The versioned file to answer all methods.
 | 
|
614  | 
        :param key_priority: A dictionary defining what order keys should be
 | 
|
615  | 
            returned from an 'unordered' get_record_stream request.
 | 
|
616  | 
            Keys with lower priority are returned first, keys not present in
 | 
|
617  | 
            the map get an implicit priority of 0, and are returned in
 | 
|
618  | 
            lexicographical order.
 | 
|
619  | 
        """
 | 
|
620  | 
RecordingVersionedFilesDecorator.__init__(self, backing_vf)  | 
|
621  | 
self._key_priority = key_priority  | 
|
622  | 
||
623  | 
def get_record_stream(self, keys, sort_order, include_delta_closure):  | 
|
624  | 
self.calls.append(("get_record_stream", list(keys), sort_order,  | 
|
625  | 
include_delta_closure))  | 
|
626  | 
if sort_order == 'unordered':  | 
|
627  | 
def sort_key(key):  | 
|
628  | 
return (self._key_priority.get(key, 0), key)  | 
|
629  | 
            # Use a defined order by asking for the keys one-by-one from the
 | 
|
630  | 
            # backing_vf
 | 
|
631  | 
for key in sorted(keys, key=sort_key):  | 
|
632  | 
for record in self._backing_vf.get_record_stream([key],  | 
|
633  | 
'unordered', include_delta_closure):  | 
|
634  | 
yield record  | 
|
635  | 
else:  | 
|
636  | 
for record in self._backing_vf.get_record_stream(keys, sort_order,  | 
|
637  | 
include_delta_closure):  | 
|
638  | 
yield record  | 
|
639  | 
||
640  | 
||
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
641  | 
class KeyMapper(object):  | 
| 
3350.6.10
by Martin Pool
 VersionedFiles review cleanups  | 
642  | 
"""KeyMappers map between keys and underlying partitioned storage."""  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
643  | 
|
644  | 
def map(self, key):  | 
|
645  | 
"""Map key to an underlying storage identifier.  | 
|
646  | 
||
647  | 
        :param key: A key tuple e.g. ('file-id', 'revision-id').
 | 
|
648  | 
        :return: An underlying storage identifier, specific to the partitioning
 | 
|
649  | 
            mechanism.
 | 
|
650  | 
        """
 | 
|
651  | 
raise NotImplementedError(self.map)  | 
|
652  | 
||
653  | 
def unmap(self, partition_id):  | 
|
654  | 
"""Map a partitioned storage id back to a key prefix.  | 
|
655  | 
        
 | 
|
656  | 
        :param partition_id: The underlying partition id.
 | 
|
| 
3350.6.10
by Martin Pool
 VersionedFiles review cleanups  | 
657  | 
        :return: As much of a key (or prefix) as is derivable from the partition
 | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
658  | 
            id.
 | 
659  | 
        """
 | 
|
660  | 
raise NotImplementedError(self.unmap)  | 
|
661  | 
||
662  | 
||
663  | 
class ConstantMapper(KeyMapper):  | 
|
664  | 
"""A key mapper that maps to a constant result."""  | 
|
665  | 
||
666  | 
def __init__(self, result):  | 
|
667  | 
"""Create a ConstantMapper which will return result for all maps."""  | 
|
668  | 
self._result = result  | 
|
669  | 
||
670  | 
def map(self, key):  | 
|
671  | 
"""See KeyMapper.map()."""  | 
|
672  | 
return self._result  | 
|
673  | 
||
674  | 
||
675  | 
class URLEscapeMapper(KeyMapper):  | 
|
676  | 
"""Base class for use with transport backed storage.  | 
|
677  | 
||
678  | 
    This provides a map and unmap wrapper that respectively url escape and
 | 
|
679  | 
    unescape their outputs and inputs.
 | 
|
680  | 
    """
 | 
|
681  | 
||
682  | 
def map(self, key):  | 
|
683  | 
"""See KeyMapper.map()."""  | 
|
684  | 
return urllib.quote(self._map(key))  | 
|
685  | 
||
686  | 
def unmap(self, partition_id):  | 
|
687  | 
"""See KeyMapper.unmap()."""  | 
|
688  | 
return self._unmap(urllib.unquote(partition_id))  | 
|
689  | 
||
690  | 
||
691  | 
class PrefixMapper(URLEscapeMapper):  | 
|
692  | 
"""A key mapper that extracts the first component of a key.  | 
|
693  | 
    
 | 
|
694  | 
    This mapper is for use with a transport based backend.
 | 
|
695  | 
    """
 | 
|
696  | 
||
697  | 
def _map(self, key):  | 
|
698  | 
"""See KeyMapper.map()."""  | 
|
699  | 
return key[0]  | 
|
700  | 
||
701  | 
def _unmap(self, partition_id):  | 
|
702  | 
"""See KeyMapper.unmap()."""  | 
|
703  | 
return (partition_id,)  | 
|
704  | 
||
705  | 
||
706  | 
class HashPrefixMapper(URLEscapeMapper):  | 
|
707  | 
"""A key mapper that combines the first component of a key with a hash.  | 
|
708  | 
||
709  | 
    This mapper is for use with a transport based backend.
 | 
|
710  | 
    """
 | 
|
711  | 
||
712  | 
def _map(self, key):  | 
|
713  | 
"""See KeyMapper.map()."""  | 
|
714  | 
prefix = self._escape(key[0])  | 
|
715  | 
return "%02x/%s" % (adler32(prefix) & 0xff, prefix)  | 
|
716  | 
||
717  | 
def _escape(self, prefix):  | 
|
718  | 
"""No escaping needed here."""  | 
|
719  | 
return prefix  | 
|
720  | 
||
721  | 
def _unmap(self, partition_id):  | 
|
722  | 
"""See KeyMapper.unmap()."""  | 
|
723  | 
return (self._unescape(osutils.basename(partition_id)),)  | 
|
724  | 
||
725  | 
def _unescape(self, basename):  | 
|
726  | 
"""No unescaping needed for HashPrefixMapper."""  | 
|
727  | 
return basename  | 
|
728  | 
||
729  | 
||
730  | 
class HashEscapedPrefixMapper(HashPrefixMapper):  | 
|
731  | 
"""Combines the escaped first component of a key with a hash.  | 
|
732  | 
    
 | 
|
733  | 
    This mapper is for use with a transport based backend.
 | 
|
734  | 
    """
 | 
|
735  | 
||
736  | 
_safe = "abcdefghijklmnopqrstuvwxyz0123456789-_@,."  | 
|
737  | 
||
738  | 
def _escape(self, prefix):  | 
|
739  | 
"""Turn a key element into a filesystem safe string.  | 
|
740  | 
||
741  | 
        This is similar to a plain urllib.quote, except
 | 
|
742  | 
        it uses specific safe characters, so that it doesn't
 | 
|
743  | 
        have to translate a lot of valid file ids.
 | 
|
744  | 
        """
 | 
|
745  | 
        # @ does not get escaped. This is because it is a valid
 | 
|
746  | 
        # filesystem character we use all the time, and it looks
 | 
|
747  | 
        # a lot better than seeing %40 all the time.
 | 
|
748  | 
r = [((c in self._safe) and c or ('%%%02x' % ord(c)))  | 
|
749  | 
for c in prefix]  | 
|
750  | 
return ''.join(r)  | 
|
751  | 
||
752  | 
def _unescape(self, basename):  | 
|
753  | 
"""Escaped names are easily unescaped by urlutils."""  | 
|
754  | 
return urllib.unquote(basename)  | 
|
755  | 
||
756  | 
||
757  | 
def make_versioned_files_factory(versioned_file_factory, mapper):  | 
|
758  | 
"""Create a ThunkedVersionedFiles factory.  | 
|
759  | 
||
760  | 
    This will create a callable which when called creates a
 | 
|
761  | 
    ThunkedVersionedFiles on a transport, using mapper to access individual
 | 
|
762  | 
    versioned files, and versioned_file_factory to create each individual file.
 | 
|
763  | 
    """
 | 
|
764  | 
def factory(transport):  | 
|
765  | 
return ThunkedVersionedFiles(transport, versioned_file_factory, mapper,  | 
|
766  | 
lambda:True)  | 
|
767  | 
return factory  | 
|
768  | 
||
769  | 
||
770  | 
class VersionedFiles(object):  | 
|
771  | 
"""Storage for many versioned files.  | 
|
772  | 
||
773  | 
    This object allows a single keyspace for accessing the history graph and
 | 
|
774  | 
    contents of named bytestrings.
 | 
|
775  | 
||
776  | 
    Currently no implementation allows the graph of different key prefixes to
 | 
|
777  | 
    intersect, but the API does allow such implementations in the future.
 | 
|
| 
3350.6.7
by Robert Collins
 Review feedback, making things more clear, adding documentation on what is used where.  | 
778  | 
|
779  | 
    The keyspace is expressed via simple tuples. Any instance of VersionedFiles
 | 
|
780  | 
    may have a different length key-size, but that size will be constant for
 | 
|
781  | 
    all texts added to or retrieved from it. For instance, bzrlib uses
 | 
|
782  | 
    instances with a key-size of 2 for storing user files in a repository, with
 | 
|
783  | 
    the first element the fileid, and the second the version of that file.
 | 
|
784  | 
||
785  | 
    The use of tuples allows a single code base to support several different
 | 
|
786  | 
    uses with only the mapping logic changing from instance to instance.
 | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
787  | 
    """
 | 
788  | 
||
789  | 
def add_lines(self, key, parents, lines, parent_texts=None,  | 
|
790  | 
left_matching_blocks=None, nostore_sha=None, random_id=False,  | 
|
791  | 
check_content=True):  | 
|
792  | 
"""Add a text to the store.  | 
|
793  | 
||
794  | 
        :param key: The key tuple of the text to add.
 | 
|
795  | 
        :param parents: The parents key tuples of the text to add.
 | 
|
796  | 
        :param lines: A list of lines. Each line must be a bytestring. And all
 | 
|
797  | 
            of them except the last must be terminated with \n and contain no
 | 
|
798  | 
            other \n's. The last line may either contain no \n's or a single
 | 
|
799  | 
            terminating \n. If the lines list does meet this constraint the add
 | 
|
800  | 
            routine may error or may succeed - but you will be unable to read
 | 
|
801  | 
            the data back accurately. (Checking the lines have been split
 | 
|
802  | 
            correctly is expensive and extremely unlikely to catch bugs so it
 | 
|
803  | 
            is not done at runtime unless check_content is True.)
 | 
|
804  | 
        :param parent_texts: An optional dictionary containing the opaque 
 | 
|
805  | 
            representations of some or all of the parents of version_id to
 | 
|
806  | 
            allow delta optimisations.  VERY IMPORTANT: the texts must be those
 | 
|
807  | 
            returned by add_lines or data corruption can be caused.
 | 
|
808  | 
        :param left_matching_blocks: a hint about which areas are common
 | 
|
809  | 
            between the text and its left-hand-parent.  The format is
 | 
|
810  | 
            the SequenceMatcher.get_matching_blocks format.
 | 
|
811  | 
        :param nostore_sha: Raise ExistingContent and do not add the lines to
 | 
|
812  | 
            the versioned file if the digest of the lines matches this.
 | 
|
813  | 
        :param random_id: If True a random id has been selected rather than
 | 
|
814  | 
            an id determined by some deterministic process such as a converter
 | 
|
815  | 
            from a foreign VCS. When True the backend may choose not to check
 | 
|
816  | 
            for uniqueness of the resulting key within the versioned file, so
 | 
|
817  | 
            this should only be done when the result is expected to be unique
 | 
|
818  | 
            anyway.
 | 
|
819  | 
        :param check_content: If True, the lines supplied are verified to be
 | 
|
820  | 
            bytestrings that are correctly formed lines.
 | 
|
821  | 
        :return: The text sha1, the number of bytes in the text, and an opaque
 | 
|
822  | 
                 representation of the inserted version which can be provided
 | 
|
823  | 
                 back to future add_lines calls in the parent_texts dictionary.
 | 
|
824  | 
        """
 | 
|
825  | 
raise NotImplementedError(self.add_lines)  | 
|
826  | 
||
827  | 
def add_mpdiffs(self, records):  | 
|
828  | 
"""Add mpdiffs to this VersionedFile.  | 
|
829  | 
||
830  | 
        Records should be iterables of version, parents, expected_sha1,
 | 
|
831  | 
        mpdiff. mpdiff should be a MultiParent instance.
 | 
|
832  | 
        """
 | 
|
833  | 
vf_parents = {}  | 
|
834  | 
mpvf = multiparent.MultiMemoryVersionedFile()  | 
|
835  | 
versions = []  | 
|
836  | 
for version, parent_ids, expected_sha1, mpdiff in records:  | 
|
837  | 
versions.append(version)  | 
|
838  | 
mpvf.add_diff(mpdiff, version, parent_ids)  | 
|
839  | 
needed_parents = set()  | 
|
840  | 
for version, parent_ids, expected_sha1, mpdiff in records:  | 
|
841  | 
needed_parents.update(p for p in parent_ids  | 
|
842  | 
if not mpvf.has_version(p))  | 
|
843  | 
        # It seems likely that adding all the present parents as fulltexts can
 | 
|
844  | 
        # easily exhaust memory.
 | 
|
| 
3890.2.9
by John Arbash Meinel
 Start using osutils.chunks_as_lines rather than osutils.split_lines.  | 
845  | 
chunks_to_lines = osutils.chunks_to_lines  | 
| 
3350.8.11
by Robert Collins
 Stacked add_mpdiffs.  | 
846  | 
for record in self.get_record_stream(needed_parents, 'unordered',  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
847  | 
True):  | 
| 
3350.8.11
by Robert Collins
 Stacked add_mpdiffs.  | 
848  | 
if record.storage_kind == 'absent':  | 
849  | 
                continue
 | 
|
| 
3890.2.9
by John Arbash Meinel
 Start using osutils.chunks_as_lines rather than osutils.split_lines.  | 
850  | 
mpvf.add_version(chunks_to_lines(record.get_bytes_as('chunked')),  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
851  | 
record.key, [])  | 
852  | 
for (key, parent_keys, expected_sha1, mpdiff), lines in\  | 
|
853  | 
zip(records, mpvf.get_line_list(versions)):  | 
|
854  | 
if len(parent_keys) == 1:  | 
|
855  | 
left_matching_blocks = list(mpdiff.get_matching_blocks(0,  | 
|
856  | 
mpvf.get_diff(parent_keys[0]).num_lines()))  | 
|
857  | 
else:  | 
|
858  | 
left_matching_blocks = None  | 
|
859  | 
version_sha1, _, version_text = self.add_lines(key,  | 
|
860  | 
parent_keys, lines, vf_parents,  | 
|
861  | 
left_matching_blocks=left_matching_blocks)  | 
|
862  | 
if version_sha1 != expected_sha1:  | 
|
863  | 
raise errors.VersionedFileInvalidChecksum(version)  | 
|
864  | 
vf_parents[key] = version_text  | 
|
865  | 
||
866  | 
def annotate(self, key):  | 
|
867  | 
"""Return a list of (version-key, line) tuples for the text of key.  | 
|
868  | 
||
869  | 
        :raise RevisionNotPresent: If the key is not present.
 | 
|
870  | 
        """
 | 
|
871  | 
raise NotImplementedError(self.annotate)  | 
|
872  | 
||
873  | 
def check(self, progress_bar=None):  | 
|
874  | 
"""Check this object for integrity."""  | 
|
875  | 
raise NotImplementedError(self.check)  | 
|
876  | 
||
877  | 
    @staticmethod
 | 
|
878  | 
def check_not_reserved_id(version_id):  | 
|
879  | 
revision.check_not_reserved_id(version_id)  | 
|
880  | 
||
881  | 
def _check_lines_not_unicode(self, lines):  | 
|
882  | 
"""Check that lines being added to a versioned file are not unicode."""  | 
|
883  | 
for line in lines:  | 
|
884  | 
if line.__class__ is not str:  | 
|
885  | 
raise errors.BzrBadParameterUnicode("lines")  | 
|
886  | 
||
887  | 
def _check_lines_are_lines(self, lines):  | 
|
888  | 
"""Check that the lines really are full lines without inline EOL."""  | 
|
889  | 
for line in lines:  | 
|
890  | 
if '\n' in line[:-1]:  | 
|
891  | 
raise errors.BzrBadParameterContainsNewline("lines")  | 
|
892  | 
||
893  | 
def get_parent_map(self, keys):  | 
|
894  | 
"""Get a map of the parents of keys.  | 
|
895  | 
||
896  | 
        :param keys: The keys to look up parents for.
 | 
|
897  | 
        :return: A mapping from keys to parents. Absent keys are absent from
 | 
|
898  | 
            the mapping.
 | 
|
899  | 
        """
 | 
|
900  | 
raise NotImplementedError(self.get_parent_map)  | 
|
901  | 
||
902  | 
def get_record_stream(self, keys, ordering, include_delta_closure):  | 
|
903  | 
"""Get a stream of records for keys.  | 
|
904  | 
||
905  | 
        :param keys: The keys to include.
 | 
|
906  | 
        :param ordering: Either 'unordered' or 'topological'. A topologically
 | 
|
907  | 
            sorted stream has compression parents strictly before their
 | 
|
908  | 
            children.
 | 
|
909  | 
        :param include_delta_closure: If True then the closure across any
 | 
|
910  | 
            compression parents will be included (in the opaque data).
 | 
|
911  | 
        :return: An iterator of ContentFactory objects, each of which is only
 | 
|
912  | 
            valid until the iterator is advanced.
 | 
|
913  | 
        """
 | 
|
914  | 
raise NotImplementedError(self.get_record_stream)  | 
|
915  | 
||
916  | 
def get_sha1s(self, keys):  | 
|
917  | 
"""Get the sha1's of the texts for the given keys.  | 
|
918  | 
||
919  | 
        :param keys: The names of the keys to lookup
 | 
|
| 
3350.8.3
by Robert Collins
 VF.get_sha1s needed changing to be stackable.  | 
920  | 
        :return: a dict from key to sha1 digest. Keys of texts which are not
 | 
| 
3350.8.14
by Robert Collins
 Review feedback.  | 
921  | 
            present in the store are not present in the returned
 | 
| 
3350.8.3
by Robert Collins
 VF.get_sha1s needed changing to be stackable.  | 
922  | 
            dictionary.
 | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
923  | 
        """
 | 
924  | 
raise NotImplementedError(self.get_sha1s)  | 
|
925  | 
||
| 
3830.3.12
by Martin Pool
 Review cleanups: unify has_key impls, add missing_keys(), clean up exception blocks  | 
926  | 
has_key = index._has_key_from_parent_map  | 
927  | 
||
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
928  | 
def insert_record_stream(self, stream):  | 
929  | 
"""Insert a record stream into this container.  | 
|
930  | 
||
931  | 
        :param stream: A stream of records to insert. 
 | 
|
932  | 
        :return: None
 | 
|
933  | 
        :seealso VersionedFile.get_record_stream:
 | 
|
934  | 
        """
 | 
|
935  | 
raise NotImplementedError  | 
|
936  | 
||
937  | 
def iter_lines_added_or_present_in_keys(self, keys, pb=None):  | 
|
938  | 
"""Iterate over the lines in the versioned files from keys.  | 
|
939  | 
||
940  | 
        This may return lines from other keys. Each item the returned
 | 
|
941  | 
        iterator yields is a tuple of a line and a text version that that line
 | 
|
942  | 
        is present in (not introduced in).
 | 
|
943  | 
||
944  | 
        Ordering of results is in whatever order is most suitable for the
 | 
|
945  | 
        underlying storage format.
 | 
|
946  | 
||
947  | 
        If a progress bar is supplied, it may be used to indicate progress.
 | 
|
948  | 
        The caller is responsible for cleaning up progress bars (because this
 | 
|
949  | 
        is an iterator).
 | 
|
950  | 
||
951  | 
        NOTES:
 | 
|
952  | 
         * Lines are normalised by the underlying store: they will all have \n
 | 
|
953  | 
           terminators.
 | 
|
954  | 
         * Lines are returned in arbitrary order.
 | 
|
955  | 
||
956  | 
        :return: An iterator over (line, key).
 | 
|
957  | 
        """
 | 
|
958  | 
raise NotImplementedError(self.iter_lines_added_or_present_in_keys)  | 
|
959  | 
||
960  | 
def keys(self):  | 
|
961  | 
"""Return a iterable of the keys for all the contained texts."""  | 
|
962  | 
raise NotImplementedError(self.keys)  | 
|
963  | 
||
964  | 
def make_mpdiffs(self, keys):  | 
|
965  | 
"""Create multiparent diffs for specified keys."""  | 
|
966  | 
keys_order = tuple(keys)  | 
|
967  | 
keys = frozenset(keys)  | 
|
968  | 
knit_keys = set(keys)  | 
|
969  | 
parent_map = self.get_parent_map(keys)  | 
|
970  | 
for parent_keys in parent_map.itervalues():  | 
|
971  | 
if parent_keys:  | 
|
972  | 
knit_keys.update(parent_keys)  | 
|
973  | 
missing_keys = keys - set(parent_map)  | 
|
974  | 
if missing_keys:  | 
|
| 
3530.3.2
by Robert Collins
 Handling frozen set inputs in mpdiff generation when a key is missing  | 
975  | 
raise errors.RevisionNotPresent(list(missing_keys)[0], self)  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
976  | 
        # We need to filter out ghosts, because we can't diff against them.
 | 
977  | 
maybe_ghosts = knit_keys - keys  | 
|
978  | 
ghosts = maybe_ghosts - set(self.get_parent_map(maybe_ghosts))  | 
|
979  | 
knit_keys.difference_update(ghosts)  | 
|
980  | 
lines = {}  | 
|
| 
3890.2.9
by John Arbash Meinel
 Start using osutils.chunks_as_lines rather than osutils.split_lines.  | 
981  | 
chunks_to_lines = osutils.chunks_to_lines  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
982  | 
for record in self.get_record_stream(knit_keys, 'topological', True):  | 
| 
3890.2.9
by John Arbash Meinel
 Start using osutils.chunks_as_lines rather than osutils.split_lines.  | 
983  | 
lines[record.key] = chunks_to_lines(record.get_bytes_as('chunked'))  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
984  | 
            # line_block_dict = {}
 | 
985  | 
            # for parent, blocks in record.extract_line_blocks():
 | 
|
986  | 
            #   line_blocks[parent] = blocks
 | 
|
987  | 
            # line_blocks[record.key] = line_block_dict
 | 
|
988  | 
diffs = []  | 
|
989  | 
for key in keys_order:  | 
|
990  | 
target = lines[key]  | 
|
991  | 
parents = parent_map[key] or []  | 
|
992  | 
            # Note that filtering knit_keys can lead to a parent difference
 | 
|
993  | 
            # between the creation and the application of the mpdiff.
 | 
|
994  | 
parent_lines = [lines[p] for p in parents if p in knit_keys]  | 
|
995  | 
if len(parent_lines) > 0:  | 
|
996  | 
left_parent_blocks = self._extract_blocks(key, parent_lines[0],  | 
|
997  | 
target)  | 
|
998  | 
else:  | 
|
999  | 
left_parent_blocks = None  | 
|
1000  | 
diffs.append(multiparent.MultiParent.from_lines(target,  | 
|
1001  | 
parent_lines, left_parent_blocks))  | 
|
1002  | 
return diffs  | 
|
1003  | 
||
| 
3830.3.12
by Martin Pool
 Review cleanups: unify has_key impls, add missing_keys(), clean up exception blocks  | 
1004  | 
missing_keys = index._missing_keys_from_parent_map  | 
1005  | 
||
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1006  | 
def _extract_blocks(self, version_id, source, target):  | 
1007  | 
return None  | 
|
1008  | 
||
1009  | 
||
1010  | 
class ThunkedVersionedFiles(VersionedFiles):  | 
|
1011  | 
"""Storage for many versioned files thunked onto a 'VersionedFile' class.  | 
|
1012  | 
||
1013  | 
    This object allows a single keyspace for accessing the history graph and
 | 
|
1014  | 
    contents of named bytestrings.
 | 
|
1015  | 
||
1016  | 
    Currently no implementation allows the graph of different key prefixes to
 | 
|
1017  | 
    intersect, but the API does allow such implementations in the future.
 | 
|
1018  | 
    """
 | 
|
1019  | 
||
1020  | 
def __init__(self, transport, file_factory, mapper, is_locked):  | 
|
1021  | 
"""Create a ThunkedVersionedFiles."""  | 
|
1022  | 
self._transport = transport  | 
|
1023  | 
self._file_factory = file_factory  | 
|
1024  | 
self._mapper = mapper  | 
|
1025  | 
self._is_locked = is_locked  | 
|
1026  | 
||
1027  | 
def add_lines(self, key, parents, lines, parent_texts=None,  | 
|
1028  | 
left_matching_blocks=None, nostore_sha=None, random_id=False,  | 
|
1029  | 
check_content=True):  | 
|
1030  | 
"""See VersionedFiles.add_lines()."""  | 
|
1031  | 
path = self._mapper.map(key)  | 
|
1032  | 
version_id = key[-1]  | 
|
1033  | 
parents = [parent[-1] for parent in parents]  | 
|
1034  | 
vf = self._get_vf(path)  | 
|
1035  | 
try:  | 
|
1036  | 
try:  | 
|
1037  | 
return vf.add_lines_with_ghosts(version_id, parents, lines,  | 
|
1038  | 
parent_texts=parent_texts,  | 
|
1039  | 
left_matching_blocks=left_matching_blocks,  | 
|
1040  | 
nostore_sha=nostore_sha, random_id=random_id,  | 
|
1041  | 
check_content=check_content)  | 
|
1042  | 
except NotImplementedError:  | 
|
1043  | 
return vf.add_lines(version_id, parents, lines,  | 
|
1044  | 
parent_texts=parent_texts,  | 
|
1045  | 
left_matching_blocks=left_matching_blocks,  | 
|
1046  | 
nostore_sha=nostore_sha, random_id=random_id,  | 
|
1047  | 
check_content=check_content)  | 
|
1048  | 
except errors.NoSuchFile:  | 
|
1049  | 
            # parent directory may be missing, try again.
 | 
|
1050  | 
self._transport.mkdir(osutils.dirname(path))  | 
|
1051  | 
try:  | 
|
1052  | 
return vf.add_lines_with_ghosts(version_id, parents, lines,  | 
|
1053  | 
parent_texts=parent_texts,  | 
|
1054  | 
left_matching_blocks=left_matching_blocks,  | 
|
1055  | 
nostore_sha=nostore_sha, random_id=random_id,  | 
|
1056  | 
check_content=check_content)  | 
|
1057  | 
except NotImplementedError:  | 
|
1058  | 
return vf.add_lines(version_id, parents, lines,  | 
|
1059  | 
parent_texts=parent_texts,  | 
|
1060  | 
left_matching_blocks=left_matching_blocks,  | 
|
1061  | 
nostore_sha=nostore_sha, random_id=random_id,  | 
|
1062  | 
check_content=check_content)  | 
|
1063  | 
||
1064  | 
def annotate(self, key):  | 
|
1065  | 
"""Return a list of (version-key, line) tuples for the text of key.  | 
|
1066  | 
||
1067  | 
        :raise RevisionNotPresent: If the key is not present.
 | 
|
1068  | 
        """
 | 
|
1069  | 
prefix = key[:-1]  | 
|
1070  | 
path = self._mapper.map(prefix)  | 
|
1071  | 
vf = self._get_vf(path)  | 
|
1072  | 
origins = vf.annotate(key[-1])  | 
|
1073  | 
result = []  | 
|
1074  | 
for origin, line in origins:  | 
|
1075  | 
result.append((prefix + (origin,), line))  | 
|
1076  | 
return result  | 
|
1077  | 
||
1078  | 
def check(self, progress_bar=None):  | 
|
1079  | 
"""See VersionedFiles.check()."""  | 
|
1080  | 
for prefix, vf in self._iter_all_components():  | 
|
1081  | 
vf.check()  | 
|
1082  | 
||
1083  | 
def get_parent_map(self, keys):  | 
|
1084  | 
"""Get a map of the parents of keys.  | 
|
1085  | 
||
1086  | 
        :param keys: The keys to look up parents for.
 | 
|
1087  | 
        :return: A mapping from keys to parents. Absent keys are absent from
 | 
|
1088  | 
            the mapping.
 | 
|
1089  | 
        """
 | 
|
1090  | 
prefixes = self._partition_keys(keys)  | 
|
1091  | 
result = {}  | 
|
1092  | 
for prefix, suffixes in prefixes.items():  | 
|
1093  | 
path = self._mapper.map(prefix)  | 
|
1094  | 
vf = self._get_vf(path)  | 
|
1095  | 
parent_map = vf.get_parent_map(suffixes)  | 
|
1096  | 
for key, parents in parent_map.items():  | 
|
1097  | 
result[prefix + (key,)] = tuple(  | 
|
1098  | 
prefix + (parent,) for parent in parents)  | 
|
1099  | 
return result  | 
|
1100  | 
||
1101  | 
def _get_vf(self, path):  | 
|
1102  | 
if not self._is_locked():  | 
|
1103  | 
raise errors.ObjectNotLocked(self)  | 
|
1104  | 
return self._file_factory(path, self._transport, create=True,  | 
|
1105  | 
get_scope=lambda:None)  | 
|
1106  | 
||
1107  | 
def _partition_keys(self, keys):  | 
|
1108  | 
"""Turn keys into a dict of prefix:suffix_list."""  | 
|
1109  | 
result = {}  | 
|
1110  | 
for key in keys:  | 
|
1111  | 
prefix_keys = result.setdefault(key[:-1], [])  | 
|
1112  | 
prefix_keys.append(key[-1])  | 
|
1113  | 
return result  | 
|
1114  | 
||
1115  | 
def _get_all_prefixes(self):  | 
|
1116  | 
        # Identify all key prefixes.
 | 
|
1117  | 
        # XXX: A bit hacky, needs polish.
 | 
|
1118  | 
if type(self._mapper) == ConstantMapper:  | 
|
1119  | 
paths = [self._mapper.map(())]  | 
|
1120  | 
prefixes = [()]  | 
|
1121  | 
else:  | 
|
1122  | 
relpaths = set()  | 
|
1123  | 
for quoted_relpath in self._transport.iter_files_recursive():  | 
|
1124  | 
path, ext = os.path.splitext(quoted_relpath)  | 
|
1125  | 
relpaths.add(path)  | 
|
1126  | 
paths = list(relpaths)  | 
|
1127  | 
prefixes = [self._mapper.unmap(path) for path in paths]  | 
|
1128  | 
return zip(paths, prefixes)  | 
|
1129  | 
||
1130  | 
def get_record_stream(self, keys, ordering, include_delta_closure):  | 
|
1131  | 
"""See VersionedFiles.get_record_stream()."""  | 
|
1132  | 
        # Ordering will be taken care of by each partitioned store; group keys
 | 
|
1133  | 
        # by partition.
 | 
|
1134  | 
keys = sorted(keys)  | 
|
1135  | 
for prefix, suffixes, vf in self._iter_keys_vf(keys):  | 
|
1136  | 
suffixes = [(suffix,) for suffix in suffixes]  | 
|
1137  | 
for record in vf.get_record_stream(suffixes, ordering,  | 
|
1138  | 
include_delta_closure):  | 
|
1139  | 
if record.parents is not None:  | 
|
1140  | 
record.parents = tuple(  | 
|
1141  | 
prefix + parent for parent in record.parents)  | 
|
1142  | 
record.key = prefix + record.key  | 
|
1143  | 
yield record  | 
|
1144  | 
||
1145  | 
def _iter_keys_vf(self, keys):  | 
|
1146  | 
prefixes = self._partition_keys(keys)  | 
|
1147  | 
sha1s = {}  | 
|
1148  | 
for prefix, suffixes in prefixes.items():  | 
|
1149  | 
path = self._mapper.map(prefix)  | 
|
1150  | 
vf = self._get_vf(path)  | 
|
1151  | 
yield prefix, suffixes, vf  | 
|
1152  | 
||
1153  | 
def get_sha1s(self, keys):  | 
|
1154  | 
"""See VersionedFiles.get_sha1s()."""  | 
|
1155  | 
sha1s = {}  | 
|
1156  | 
for prefix,suffixes, vf in self._iter_keys_vf(keys):  | 
|
1157  | 
vf_sha1s = vf.get_sha1s(suffixes)  | 
|
| 
3350.8.3
by Robert Collins
 VF.get_sha1s needed changing to be stackable.  | 
1158  | 
for suffix, sha1 in vf_sha1s.iteritems():  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1159  | 
sha1s[prefix + (suffix,)] = sha1  | 
| 
3350.8.3
by Robert Collins
 VF.get_sha1s needed changing to be stackable.  | 
1160  | 
return sha1s  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1161  | 
|
1162  | 
def insert_record_stream(self, stream):  | 
|
1163  | 
"""Insert a record stream into this container.  | 
|
1164  | 
||
1165  | 
        :param stream: A stream of records to insert. 
 | 
|
1166  | 
        :return: None
 | 
|
1167  | 
        :seealso VersionedFile.get_record_stream:
 | 
|
1168  | 
        """
 | 
|
1169  | 
for record in stream:  | 
|
1170  | 
prefix = record.key[:-1]  | 
|
1171  | 
key = record.key[-1:]  | 
|
1172  | 
if record.parents is not None:  | 
|
1173  | 
parents = [parent[-1:] for parent in record.parents]  | 
|
1174  | 
else:  | 
|
1175  | 
parents = None  | 
|
1176  | 
thunk_record = AdapterFactory(key, parents, record)  | 
|
1177  | 
path = self._mapper.map(prefix)  | 
|
1178  | 
            # Note that this parses the file many times; we can do better but
 | 
|
1179  | 
            # as this only impacts weaves in terms of performance, it is
 | 
|
1180  | 
            # tolerable.
 | 
|
1181  | 
vf = self._get_vf(path)  | 
|
1182  | 
vf.insert_record_stream([thunk_record])  | 
|
1183  | 
||
1184  | 
def iter_lines_added_or_present_in_keys(self, keys, pb=None):  | 
|
1185  | 
"""Iterate over the lines in the versioned files from keys.  | 
|
1186  | 
||
1187  | 
        This may return lines from other keys. Each item the returned
 | 
|
1188  | 
        iterator yields is a tuple of a line and a text version that that line
 | 
|
1189  | 
        is present in (not introduced in).
 | 
|
1190  | 
||
1191  | 
        Ordering of results is in whatever order is most suitable for the
 | 
|
1192  | 
        underlying storage format.
 | 
|
1193  | 
||
1194  | 
        If a progress bar is supplied, it may be used to indicate progress.
 | 
|
1195  | 
        The caller is responsible for cleaning up progress bars (because this
 | 
|
1196  | 
        is an iterator).
 | 
|
1197  | 
||
1198  | 
        NOTES:
 | 
|
1199  | 
         * Lines are normalised by the underlying store: they will all have \n
 | 
|
1200  | 
           terminators.
 | 
|
1201  | 
         * Lines are returned in arbitrary order.
 | 
|
1202  | 
||
1203  | 
        :return: An iterator over (line, key).
 | 
|
1204  | 
        """
 | 
|
1205  | 
for prefix, suffixes, vf in self._iter_keys_vf(keys):  | 
|
1206  | 
for line, version in vf.iter_lines_added_or_present_in_versions(suffixes):  | 
|
1207  | 
yield line, prefix + (version,)  | 
|
1208  | 
||
1209  | 
def _iter_all_components(self):  | 
|
1210  | 
for path, prefix in self._get_all_prefixes():  | 
|
1211  | 
yield prefix, self._get_vf(path)  | 
|
1212  | 
||
1213  | 
def keys(self):  | 
|
1214  | 
"""See VersionedFiles.keys()."""  | 
|
1215  | 
result = set()  | 
|
1216  | 
for prefix, vf in self._iter_all_components():  | 
|
1217  | 
for suffix in vf.versions():  | 
|
1218  | 
result.add(prefix + (suffix,))  | 
|
1219  | 
return result  | 
|
1220  | 
||
1221  | 
||
1222  | 
class _PlanMergeVersionedFile(VersionedFiles):  | 
|
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
1223  | 
"""A VersionedFile for uncommitted and committed texts.  | 
1224  | 
||
1225  | 
    It is intended to allow merges to be planned with working tree texts.
 | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1226  | 
    It implements only the small part of the VersionedFiles interface used by
 | 
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
1227  | 
    PlanMerge.  It falls back to multiple versionedfiles for data not stored in
 | 
1228  | 
    _PlanMergeVersionedFile itself.
 | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1229  | 
|
1230  | 
    :ivar: fallback_versionedfiles a list of VersionedFiles objects that can be
 | 
|
1231  | 
        queried for missing texts.
 | 
|
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
1232  | 
    """
 | 
1233  | 
||
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1234  | 
def __init__(self, file_id):  | 
1235  | 
"""Create a _PlanMergeVersionedFile.  | 
|
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
1236  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1237  | 
        :param file_id: Used with _PlanMerge code which is not yet fully
 | 
1238  | 
            tuple-keyspace aware.
 | 
|
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
1239  | 
        """
 | 
1240  | 
self._file_id = file_id  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1241  | 
        # fallback locations
 | 
1242  | 
self.fallback_versionedfiles = []  | 
|
1243  | 
        # Parents for locally held keys.
 | 
|
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
1244  | 
self._parents = {}  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1245  | 
        # line data for locally held keys.
 | 
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
1246  | 
self._lines = {}  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1247  | 
        # key lookup providers
 | 
1248  | 
self._providers = [DictParentsProvider(self._parents)]  | 
|
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
1249  | 
|
| 
3062.2.3
by Aaron Bentley
 Sync up with bzr.dev API changes  | 
1250  | 
def plan_merge(self, ver_a, ver_b, base=None):  | 
| 
3062.1.13
by Aaron Bentley
 Make _PlanMerge an implementation detail of _PlanMergeVersionedFile  | 
1251  | 
"""See VersionedFile.plan_merge"""  | 
| 
3144.3.7
by Aaron Bentley
 Update from review  | 
1252  | 
from bzrlib.merge import _PlanMerge  | 
| 
3062.2.3
by Aaron Bentley
 Sync up with bzr.dev API changes  | 
1253  | 
if base is None:  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1254  | 
return _PlanMerge(ver_a, ver_b, self, (self._file_id,)).plan_merge()  | 
1255  | 
old_plan = list(_PlanMerge(ver_a, base, self, (self._file_id,)).plan_merge())  | 
|
1256  | 
new_plan = list(_PlanMerge(ver_a, ver_b, self, (self._file_id,)).plan_merge())  | 
|
| 
3062.2.3
by Aaron Bentley
 Sync up with bzr.dev API changes  | 
1257  | 
return _PlanMerge._subtract_plans(old_plan, new_plan)  | 
1258  | 
||
| 
3144.3.1
by Aaron Bentley
 Implement LCA merge, with problematic conflict markers  | 
1259  | 
def plan_lca_merge(self, ver_a, ver_b, base=None):  | 
| 
3144.3.7
by Aaron Bentley
 Update from review  | 
1260  | 
from bzrlib.merge import _PlanLCAMerge  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1261  | 
graph = Graph(self)  | 
1262  | 
new_plan = _PlanLCAMerge(ver_a, ver_b, self, (self._file_id,), graph).plan_merge()  | 
|
| 
3144.3.1
by Aaron Bentley
 Implement LCA merge, with problematic conflict markers  | 
1263  | 
if base is None:  | 
1264  | 
return new_plan  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1265  | 
old_plan = _PlanLCAMerge(ver_a, base, self, (self._file_id,), graph).plan_merge()  | 
| 
3144.3.1
by Aaron Bentley
 Implement LCA merge, with problematic conflict markers  | 
1266  | 
return _PlanLCAMerge._subtract_plans(list(old_plan), list(new_plan))  | 
| 
3062.1.13
by Aaron Bentley
 Make _PlanMerge an implementation detail of _PlanMergeVersionedFile  | 
1267  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1268  | 
def add_lines(self, key, parents, lines):  | 
1269  | 
"""See VersionedFiles.add_lines  | 
|
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
1270  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1271  | 
        Lines are added locally, not to fallback versionedfiles.  Also, ghosts
 | 
1272  | 
        are permitted.  Only reserved ids are permitted.
 | 
|
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
1273  | 
        """
 | 
| 
3350.6.8
by Martin Pool
 Change stray pdb calls to exceptions  | 
1274  | 
if type(key) is not tuple:  | 
1275  | 
raise TypeError(key)  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1276  | 
if not revision.is_reserved_id(key[-1]):  | 
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
1277  | 
raise ValueError('Only reserved ids may be used')  | 
1278  | 
if parents is None:  | 
|
1279  | 
raise ValueError('Parents may not be None')  | 
|
1280  | 
if lines is None:  | 
|
1281  | 
raise ValueError('Lines may not be None')  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1282  | 
self._parents[key] = tuple(parents)  | 
1283  | 
self._lines[key] = lines  | 
|
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
1284  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1285  | 
def get_record_stream(self, keys, ordering, include_delta_closure):  | 
1286  | 
pending = set(keys)  | 
|
1287  | 
for key in keys:  | 
|
1288  | 
if key in self._lines:  | 
|
1289  | 
lines = self._lines[key]  | 
|
1290  | 
parents = self._parents[key]  | 
|
1291  | 
pending.remove(key)  | 
|
| 
3890.2.1
by John Arbash Meinel
 Start working on a ChunkedContentFactory.  | 
1292  | 
yield ChunkedContentFactory(key, parents, None, lines)  | 
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
1293  | 
for versionedfile in self.fallback_versionedfiles:  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1294  | 
for record in versionedfile.get_record_stream(  | 
1295  | 
pending, 'unordered', True):  | 
|
1296  | 
if record.storage_kind == 'absent':  | 
|
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
1297  | 
                    continue
 | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1298  | 
else:  | 
1299  | 
pending.remove(record.key)  | 
|
1300  | 
yield record  | 
|
| 
3287.5.2
by Robert Collins
 Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.  | 
1301  | 
if not pending:  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1302  | 
                return
 | 
1303  | 
        # report absent entries
 | 
|
1304  | 
for key in pending:  | 
|
1305  | 
yield AbsentContentFactory(key)  | 
|
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
1306  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1307  | 
def get_parent_map(self, keys):  | 
1308  | 
"""See VersionedFiles.get_parent_map"""  | 
|
1309  | 
        # We create a new provider because a fallback may have been added.
 | 
|
1310  | 
        # If we make fallbacks private we can update a stack list and avoid
 | 
|
1311  | 
        # object creation thrashing.
 | 
|
| 
3350.6.6
by Robert Collins
 Fix test_plan_file_merge  | 
1312  | 
keys = set(keys)  | 
1313  | 
result = {}  | 
|
1314  | 
if revision.NULL_REVISION in keys:  | 
|
1315  | 
keys.remove(revision.NULL_REVISION)  | 
|
1316  | 
result[revision.NULL_REVISION] = ()  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1317  | 
self._providers = self._providers[:1] + self.fallback_versionedfiles  | 
| 
3350.6.6
by Robert Collins
 Fix test_plan_file_merge  | 
1318  | 
result.update(  | 
1319  | 
_StackedParentsProvider(self._providers).get_parent_map(keys))  | 
|
| 
3350.6.5
by Robert Collins
 Update to bzr.dev.  | 
1320  | 
for key, parents in result.iteritems():  | 
1321  | 
if parents == ():  | 
|
1322  | 
result[key] = (revision.NULL_REVISION,)  | 
|
| 
3287.5.2
by Robert Collins
 Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.  | 
1323  | 
return result  | 
| 
3144.3.1
by Aaron Bentley
 Implement LCA merge, with problematic conflict markers  | 
1324  | 
|
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
1325  | 
|
| 
1551.6.10
by Aaron Bentley
 Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge  | 
1326  | 
class PlanWeaveMerge(TextMerge):  | 
| 
1551.6.13
by Aaron Bentley
 Cleanup  | 
1327  | 
"""Weave merge that takes a plan as its input.  | 
1328  | 
    
 | 
|
| 
1551.6.14
by Aaron Bentley
 Tweaks from merge review  | 
1329  | 
    This exists so that VersionedFile.plan_merge is implementable.
 | 
1330  | 
    Most callers will want to use WeaveMerge instead.
 | 
|
| 
1551.6.13
by Aaron Bentley
 Cleanup  | 
1331  | 
    """
 | 
1332  | 
||
| 
1551.6.14
by Aaron Bentley
 Tweaks from merge review  | 
1333  | 
def __init__(self, plan, a_marker=TextMerge.A_MARKER,  | 
1334  | 
b_marker=TextMerge.B_MARKER):  | 
|
| 
1551.6.10
by Aaron Bentley
 Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge  | 
1335  | 
TextMerge.__init__(self, a_marker, b_marker)  | 
1336  | 
self.plan = plan  | 
|
1337  | 
||
| 
1551.6.7
by Aaron Bentley
 Implemented two-way merge, refactored weave merge  | 
1338  | 
def _merge_struct(self):  | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
1339  | 
lines_a = []  | 
1340  | 
lines_b = []  | 
|
1341  | 
ch_a = ch_b = False  | 
|
| 
1664.2.8
by Aaron Bentley
 Fix WeaveMerge when plan doesn't end with unchanged lines  | 
1342  | 
|
1343  | 
def outstanding_struct():  | 
|
1344  | 
if not lines_a and not lines_b:  | 
|
1345  | 
                return
 | 
|
1346  | 
elif ch_a and not ch_b:  | 
|
1347  | 
                # one-sided change:
 | 
|
1348  | 
yield(lines_a,)  | 
|
1349  | 
elif ch_b and not ch_a:  | 
|
1350  | 
yield (lines_b,)  | 
|
1351  | 
elif lines_a == lines_b:  | 
|
1352  | 
yield(lines_a,)  | 
|
1353  | 
else:  | 
|
1354  | 
yield (lines_a, lines_b)  | 
|
| 
1551.6.13
by Aaron Bentley
 Cleanup  | 
1355  | 
|
| 
1616.1.18
by Martin Pool
 (weave-merge) don't treat killed-both lines as points of agreement;  | 
1356  | 
        # We previously considered either 'unchanged' or 'killed-both' lines
 | 
1357  | 
        # to be possible places to resynchronize.  However, assuming agreement
 | 
|
| 
1759.2.1
by Jelmer Vernooij
 Fix some types (found using aspell).  | 
1358  | 
        # on killed-both lines may be too aggressive. -- mbp 20060324
 | 
| 
1551.6.7
by Aaron Bentley
 Implemented two-way merge, refactored weave merge  | 
1359  | 
for state, line in self.plan:  | 
| 
1616.1.18
by Martin Pool
 (weave-merge) don't treat killed-both lines as points of agreement;  | 
1360  | 
if state == 'unchanged':  | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
1361  | 
                # resync and flush queued conflicts changes if any
 | 
| 
1664.2.8
by Aaron Bentley
 Fix WeaveMerge when plan doesn't end with unchanged lines  | 
1362  | 
for struct in outstanding_struct():  | 
1363  | 
yield struct  | 
|
| 
1551.6.11
by Aaron Bentley
 Switched TextMerge_lines to work on a list  | 
1364  | 
lines_a = []  | 
1365  | 
lines_b = []  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
1366  | 
ch_a = ch_b = False  | 
1367  | 
||
1368  | 
if state == 'unchanged':  | 
|
1369  | 
if line:  | 
|
| 
1551.6.5
by Aaron Bentley
 Got weave merge producing structural output  | 
1370  | 
yield ([line],)  | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
1371  | 
elif state == 'killed-a':  | 
1372  | 
ch_a = True  | 
|
1373  | 
lines_b.append(line)  | 
|
1374  | 
elif state == 'killed-b':  | 
|
1375  | 
ch_b = True  | 
|
1376  | 
lines_a.append(line)  | 
|
1377  | 
elif state == 'new-a':  | 
|
1378  | 
ch_a = True  | 
|
1379  | 
lines_a.append(line)  | 
|
1380  | 
elif state == 'new-b':  | 
|
1381  | 
ch_b = True  | 
|
1382  | 
lines_b.append(line)  | 
|
| 
3144.3.2
by Aaron Bentley
 Get conflict handling working  | 
1383  | 
elif state == 'conflicted-a':  | 
1384  | 
ch_b = ch_a = True  | 
|
1385  | 
lines_a.append(line)  | 
|
1386  | 
elif state == 'conflicted-b':  | 
|
1387  | 
ch_b = ch_a = True  | 
|
1388  | 
lines_b.append(line)  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
1389  | 
else:  | 
| 
3376.2.4
by Martin Pool
 Remove every assert statement from bzrlib!  | 
1390  | 
if state not in ('irrelevant', 'ghost-a', 'ghost-b',  | 
1391  | 
'killed-base', 'killed-both'):  | 
|
1392  | 
raise AssertionError(state)  | 
|
| 
1664.2.8
by Aaron Bentley
 Fix WeaveMerge when plan doesn't end with unchanged lines  | 
1393  | 
for struct in outstanding_struct():  | 
1394  | 
yield struct  | 
|
| 
1563.2.12
by Robert Collins
 Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.  | 
1395  | 
|
| 
1664.2.14
by Aaron Bentley
 spacing fix  | 
1396  | 
|
| 
1551.6.10
by Aaron Bentley
 Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge  | 
1397  | 
class WeaveMerge(PlanWeaveMerge):  | 
| 
2831.7.1
by Ian Clatworthy
 versionedfile.py code cleanups  | 
1398  | 
"""Weave merge that takes a VersionedFile and two versions as its input."""  | 
| 
1551.6.13
by Aaron Bentley
 Cleanup  | 
1399  | 
|
| 
1551.6.10
by Aaron Bentley
 Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge  | 
1400  | 
def __init__(self, versionedfile, ver_a, ver_b,  | 
| 
1551.6.14
by Aaron Bentley
 Tweaks from merge review  | 
1401  | 
a_marker=PlanWeaveMerge.A_MARKER, b_marker=PlanWeaveMerge.B_MARKER):  | 
| 
1551.6.15
by Aaron Bentley
 Moved plan_merge into Weave  | 
1402  | 
plan = versionedfile.plan_merge(ver_a, ver_b)  | 
| 
1551.6.10
by Aaron Bentley
 Renamed WeaveMerge to PlanMerge, added plan method, created planless WeaveMerge  | 
1403  | 
PlanWeaveMerge.__init__(self, plan, a_marker, b_marker)  | 
1404  | 
||
1405  | 
||
| 
3518.1.1
by Jelmer Vernooij
 Add VirtualVersionedFiles class.  | 
1406  | 
class VirtualVersionedFiles(VersionedFiles):  | 
1407  | 
"""Dummy implementation for VersionedFiles that uses other functions for  | 
|
1408  | 
    obtaining fulltexts and parent maps.
 | 
|
1409  | 
||
1410  | 
    This is always on the bottom of the stack and uses string keys 
 | 
|
1411  | 
    (rather than tuples) internally.
 | 
|
1412  | 
    """
 | 
|
1413  | 
||
1414  | 
def __init__(self, get_parent_map, get_lines):  | 
|
1415  | 
"""Create a VirtualVersionedFiles.  | 
|
1416  | 
||
1417  | 
        :param get_parent_map: Same signature as Repository.get_parent_map.
 | 
|
1418  | 
        :param get_lines: Should return lines for specified key or None if 
 | 
|
1419  | 
                          not available.
 | 
|
1420  | 
        """
 | 
|
1421  | 
super(VirtualVersionedFiles, self).__init__()  | 
|
1422  | 
self._get_parent_map = get_parent_map  | 
|
1423  | 
self._get_lines = get_lines  | 
|
1424  | 
||
1425  | 
def check(self, progressbar=None):  | 
|
1426  | 
"""See VersionedFiles.check.  | 
|
1427  | 
||
1428  | 
        :note: Always returns True for VirtualVersionedFiles.
 | 
|
1429  | 
        """
 | 
|
1430  | 
return True  | 
|
1431  | 
||
1432  | 
def add_mpdiffs(self, records):  | 
|
1433  | 
"""See VersionedFiles.mpdiffs.  | 
|
1434  | 
||
1435  | 
        :note: Not implemented for VirtualVersionedFiles.
 | 
|
1436  | 
        """
 | 
|
1437  | 
raise NotImplementedError(self.add_mpdiffs)  | 
|
1438  | 
||
1439  | 
def get_parent_map(self, keys):  | 
|
1440  | 
"""See VersionedFiles.get_parent_map."""  | 
|
| 
3518.1.2
by Jelmer Vernooij
 Fix some stylistic issues pointed out by Ian.  | 
1441  | 
return dict([((k,), tuple([(p,) for p in v]))  | 
1442  | 
for k,v in self._get_parent_map([k for (k,) in keys]).iteritems()])  | 
|
| 
3518.1.1
by Jelmer Vernooij
 Add VirtualVersionedFiles class.  | 
1443  | 
|
1444  | 
def get_sha1s(self, keys):  | 
|
1445  | 
"""See VersionedFiles.get_sha1s."""  | 
|
1446  | 
ret = {}  | 
|
1447  | 
for (k,) in keys:  | 
|
1448  | 
lines = self._get_lines(k)  | 
|
1449  | 
if lines is not None:  | 
|
| 
3518.1.2
by Jelmer Vernooij
 Fix some stylistic issues pointed out by Ian.  | 
1450  | 
if not isinstance(lines, list):  | 
1451  | 
raise AssertionError  | 
|
| 
3518.1.1
by Jelmer Vernooij
 Add VirtualVersionedFiles class.  | 
1452  | 
ret[(k,)] = osutils.sha_strings(lines)  | 
1453  | 
return ret  | 
|
1454  | 
||
1455  | 
def get_record_stream(self, keys, ordering, include_delta_closure):  | 
|
1456  | 
"""See VersionedFiles.get_record_stream."""  | 
|
1457  | 
for (k,) in list(keys):  | 
|
1458  | 
lines = self._get_lines(k)  | 
|
1459  | 
if lines is not None:  | 
|
| 
3518.1.2
by Jelmer Vernooij
 Fix some stylistic issues pointed out by Ian.  | 
1460  | 
if not isinstance(lines, list):  | 
1461  | 
raise AssertionError  | 
|
| 
3890.2.1
by John Arbash Meinel
 Start working on a ChunkedContentFactory.  | 
1462  | 
yield ChunkedContentFactory((k,), None,  | 
| 
3518.1.1
by Jelmer Vernooij
 Add VirtualVersionedFiles class.  | 
1463  | 
sha1=osutils.sha_strings(lines),  | 
| 
3890.2.1
by John Arbash Meinel
 Start working on a ChunkedContentFactory.  | 
1464  | 
chunks=lines)  | 
| 
3518.1.1
by Jelmer Vernooij
 Add VirtualVersionedFiles class.  | 
1465  | 
else:  | 
1466  | 
yield AbsentContentFactory((k,))  | 
|
1467  | 
||
1468  | 
||
1469  |