bzr branch
http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
| 
2052.3.2
by John Arbash Meinel
 Change Copyright .. by Canonical to Copyright ... Canonical  | 
1  | 
# Copyright (C) 2005 Canonical Ltd
 | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
2  | 
#
 | 
3  | 
# Authors:
 | 
|
4  | 
#   Johan Rydberg <jrydberg@gnu.org>
 | 
|
5  | 
#
 | 
|
6  | 
# This program is free software; you can redistribute it and/or modify
 | 
|
7  | 
# it under the terms of the GNU General Public License as published by
 | 
|
8  | 
# the Free Software Foundation; either version 2 of the License, or
 | 
|
9  | 
# (at your option) any later version.
 | 
|
| 
1887.1.1
by Adeodato Simó
 Do not separate paragraphs in the copyright statement with blank lines,  | 
10  | 
#
 | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
11  | 
# This program is distributed in the hope that it will be useful,
 | 
12  | 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
|
13  | 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
|
14  | 
# GNU General Public License for more details.
 | 
|
| 
1887.1.1
by Adeodato Simó
 Do not separate paragraphs in the copyright statement with blank lines,  | 
15  | 
#
 | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
16  | 
# You should have received a copy of the GNU General Public License
 | 
17  | 
# along with this program; if not, write to the Free Software
 | 
|
18  | 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 | 
|
19  | 
||
20  | 
||
| 
1704.2.15
by Martin Pool
 Remove TODO about knit testing printed from test suite  | 
21  | 
# TODO: might be nice to create a versionedfile with some type of corruption
 | 
22  | 
# considered typical and check that it can be detected/corrected.
 | 
|
23  | 
||
| 
3350.3.16
by Robert Collins
 Add test that out of order insertion fails with a clean error/does not fail.  | 
24  | 
from itertools import chain  | 
| 
1664.2.9
by Aaron Bentley
 Ported weave merge test to versionedfile  | 
25  | 
from StringIO import StringIO  | 
26  | 
||
| 
1563.2.6
by Robert Collins
 Start check tests for knits (pending), and remove dead code.  | 
27  | 
import bzrlib  | 
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
28  | 
from bzrlib import (  | 
29  | 
errors,  | 
|
| 
2309.4.7
by John Arbash Meinel
 Update VersionedFile tests to ensure that they can take Unicode,  | 
30  | 
osutils,  | 
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
31  | 
progress,  | 
32  | 
    )
 | 
|
| 
1563.2.11
by Robert Collins
 Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis.  | 
33  | 
from bzrlib.errors import (  | 
| 
3316.2.3
by Robert Collins
 Remove manual notification of transaction finishing on versioned files.  | 
34  | 
RevisionNotPresent,  | 
| 
1563.2.11
by Robert Collins
 Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis.  | 
35  | 
RevisionAlreadyPresent,  | 
36  | 
                           WeaveParentMismatch
 | 
|
37  | 
                           )
 | 
|
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
38  | 
from bzrlib import knit as _mod_knit  | 
| 
2770.1.1
by Aaron Bentley
 Initial implmentation of plain knit annotation  | 
39  | 
from bzrlib.knit import (  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
40  | 
cleanup_pack_knit,  | 
41  | 
make_file_factory,  | 
|
42  | 
make_pack_factory,  | 
|
| 
2770.1.1
by Aaron Bentley
 Initial implmentation of plain knit annotation  | 
43  | 
KnitAnnotateFactory,  | 
| 
2770.1.10
by Aaron Bentley
 Merge bzr.dev  | 
44  | 
KnitPlainFactory,  | 
| 
2770.1.1
by Aaron Bentley
 Initial implmentation of plain knit annotation  | 
45  | 
    )
 | 
| 
3350.3.14
by Robert Collins
 Deprecate VersionedFile.join.  | 
46  | 
from bzrlib.symbol_versioning import one_four, one_five  | 
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
47  | 
from bzrlib.tests import (  | 
| 
3518.1.1
by Jelmer Vernooij
 Add VirtualVersionedFiles class.  | 
48  | 
TestCase,  | 
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
49  | 
TestCaseWithMemoryTransport,  | 
50  | 
TestScenarioApplier,  | 
|
51  | 
TestSkipped,  | 
|
52  | 
condition_isinstance,  | 
|
53  | 
split_suite_by_condition,  | 
|
54  | 
iter_suite_tests,  | 
|
55  | 
    )
 | 
|
| 
3102.1.1
by Vincent Ladeuil
 Rename bzrlib/test/HTTPTestUtils.py to bzrlib/tests/http_utils.py and fix  | 
56  | 
from bzrlib.tests.http_utils import TestCaseWithWebserver  | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
57  | 
from bzrlib.trace import mutter  | 
| 
1563.2.16
by Robert Collins
 Change WeaveStore into VersionedFileStore and make its versoined file class parameterisable.  | 
58  | 
from bzrlib.transport import get_transport  | 
| 
1563.2.13
by Robert Collins
 InterVersionedFile implemented.  | 
59  | 
from bzrlib.transport.memory import MemoryTransport  | 
| 
1684.3.1
by Robert Collins
 Fix versioned file joins with empty targets.  | 
60  | 
from bzrlib.tsort import topo_sort  | 
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
61  | 
from bzrlib.tuned_gzip import GzipFile  | 
| 
1563.2.12
by Robert Collins
 Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.  | 
62  | 
import bzrlib.versionedfile as versionedfile  | 
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
63  | 
from bzrlib.versionedfile import (  | 
64  | 
ConstantMapper,  | 
|
65  | 
HashEscapedPrefixMapper,  | 
|
66  | 
PrefixMapper,  | 
|
| 
3518.1.1
by Jelmer Vernooij
 Add VirtualVersionedFiles class.  | 
67  | 
VirtualVersionedFiles,  | 
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
68  | 
make_versioned_files_factory,  | 
69  | 
    )
 | 
|
| 
1563.2.9
by Robert Collins
 Update versionedfile api tests to ensure that data is available after every operation.  | 
70  | 
from bzrlib.weave import WeaveFile  | 
| 
1664.2.9
by Aaron Bentley
 Ported weave merge test to versionedfile  | 
71  | 
from bzrlib.weavefile import read_weave, write_weave  | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
72  | 
|
73  | 
||
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
74  | 
def load_tests(standard_tests, module, loader):  | 
75  | 
"""Parameterize VersionedFiles tests for different implementations."""  | 
|
76  | 
to_adapt, result = split_suite_by_condition(  | 
|
77  | 
standard_tests, condition_isinstance(TestVersionedFiles))  | 
|
78  | 
len_one_adapter = TestScenarioApplier()  | 
|
79  | 
len_two_adapter = TestScenarioApplier()  | 
|
80  | 
    # We want to be sure of behaviour for:
 | 
|
81  | 
    # weaves prefix layout (weave texts)
 | 
|
82  | 
    # individually named weaves (weave inventories)
 | 
|
83  | 
    # annotated knits - prefix|hash|hash-escape layout, we test the third only
 | 
|
84  | 
    #                   as it is the most complex mapper.
 | 
|
85  | 
    # individually named knits
 | 
|
86  | 
    # individual no-graph knits in packs (signatures)
 | 
|
87  | 
    # individual graph knits in packs (inventories)
 | 
|
88  | 
    # individual graph nocompression knits in packs (revisions)
 | 
|
89  | 
    # plain text knits in packs (texts)
 | 
|
90  | 
len_one_adapter.scenarios = [  | 
|
91  | 
('weave-named', {  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
92  | 
'cleanup':None,  | 
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
93  | 
'factory':make_versioned_files_factory(WeaveFile,  | 
94  | 
ConstantMapper('inventory')),  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
95  | 
'graph':True,  | 
96  | 
'key_length':1,  | 
|
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
97  | 
            }),
 | 
98  | 
('named-knit', {  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
99  | 
'cleanup':None,  | 
100  | 
'factory':make_file_factory(False, ConstantMapper('revisions')),  | 
|
101  | 
'graph':True,  | 
|
102  | 
'key_length':1,  | 
|
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
103  | 
            }),
 | 
104  | 
('named-nograph-knit-pack', {  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
105  | 
'cleanup':cleanup_pack_knit,  | 
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
106  | 
'factory':make_pack_factory(False, False, 1),  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
107  | 
'graph':False,  | 
108  | 
'key_length':1,  | 
|
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
109  | 
            }),
 | 
110  | 
('named-graph-knit-pack', {  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
111  | 
'cleanup':cleanup_pack_knit,  | 
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
112  | 
'factory':make_pack_factory(True, True, 1),  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
113  | 
'graph':True,  | 
114  | 
'key_length':1,  | 
|
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
115  | 
            }),
 | 
116  | 
('named-graph-nodelta-knit-pack', {  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
117  | 
'cleanup':cleanup_pack_knit,  | 
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
118  | 
'factory':make_pack_factory(True, False, 1),  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
119  | 
'graph':True,  | 
120  | 
'key_length':1,  | 
|
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
121  | 
            }),
 | 
122  | 
        ]
 | 
|
123  | 
len_two_adapter.scenarios = [  | 
|
124  | 
('weave-prefix', {  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
125  | 
'cleanup':None,  | 
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
126  | 
'factory':make_versioned_files_factory(WeaveFile,  | 
127  | 
PrefixMapper()),  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
128  | 
'graph':True,  | 
129  | 
'key_length':2,  | 
|
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
130  | 
            }),
 | 
131  | 
('annotated-knit-escape', {  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
132  | 
'cleanup':None,  | 
133  | 
'factory':make_file_factory(True, HashEscapedPrefixMapper()),  | 
|
134  | 
'graph':True,  | 
|
135  | 
'key_length':2,  | 
|
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
136  | 
            }),
 | 
137  | 
('plain-knit-pack', {  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
138  | 
'cleanup':cleanup_pack_knit,  | 
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
139  | 
'factory':make_pack_factory(True, True, 2),  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
140  | 
'graph':True,  | 
141  | 
'key_length':2,  | 
|
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
142  | 
            }),
 | 
143  | 
        ]
 | 
|
144  | 
for test in iter_suite_tests(to_adapt):  | 
|
145  | 
result.addTests(len_one_adapter.adapt(test))  | 
|
146  | 
result.addTests(len_two_adapter.adapt(test))  | 
|
147  | 
return result  | 
|
148  | 
||
149  | 
||
| 
3350.3.11
by Robert Collins
 Test inserting a stream that overlaps the current content of a knit does not error.  | 
150  | 
def get_diamond_vf(f, trailing_eol=True, left_only=False):  | 
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
151  | 
"""Get a diamond graph to exercise deltas and merges.  | 
152  | 
    
 | 
|
153  | 
    :param trailing_eol: If True end the last line with \n.
 | 
|
154  | 
    """
 | 
|
155  | 
parents = {  | 
|
156  | 
'origin': (),  | 
|
157  | 
'base': (('origin',),),  | 
|
158  | 
'left': (('base',),),  | 
|
159  | 
'right': (('base',),),  | 
|
160  | 
'merged': (('left',), ('right',)),  | 
|
161  | 
        }
 | 
|
162  | 
    # insert a diamond graph to exercise deltas and merges.
 | 
|
163  | 
if trailing_eol:  | 
|
164  | 
last_char = '\n'  | 
|
165  | 
else:  | 
|
166  | 
last_char = ''  | 
|
167  | 
f.add_lines('origin', [], ['origin' + last_char])  | 
|
168  | 
f.add_lines('base', ['origin'], ['base' + last_char])  | 
|
169  | 
f.add_lines('left', ['base'], ['base\n', 'left' + last_char])  | 
|
| 
3350.3.11
by Robert Collins
 Test inserting a stream that overlaps the current content of a knit does not error.  | 
170  | 
if not left_only:  | 
171  | 
f.add_lines('right', ['base'],  | 
|
172  | 
['base\n', 'right' + last_char])  | 
|
173  | 
f.add_lines('merged', ['left', 'right'],  | 
|
174  | 
['base\n', 'left\n', 'right\n', 'merged' + last_char])  | 
|
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
175  | 
return f, parents  | 
176  | 
||
177  | 
||
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
178  | 
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,  | 
179  | 
nograph=False):  | 
|
180  | 
"""Get a diamond graph to exercise deltas and merges.  | 
|
181  | 
||
182  | 
    This creates a 5-node graph in files. If files supports 2-length keys two
 | 
|
183  | 
    graphs are made to exercise the support for multiple ids.
 | 
|
184  | 
    
 | 
|
185  | 
    :param trailing_eol: If True end the last line with \n.
 | 
|
186  | 
    :param key_length: The length of keys in files. Currently supports length 1
 | 
|
187  | 
        and 2 keys.
 | 
|
188  | 
    :param left_only: If True do not add the right and merged nodes.
 | 
|
189  | 
    :param nograph: If True, do not provide parents to the add_lines calls;
 | 
|
190  | 
        this is useful for tests that need inserted data but have graphless
 | 
|
191  | 
        stores.
 | 
|
192  | 
    :return: The results of the add_lines calls.
 | 
|
193  | 
    """
 | 
|
194  | 
if key_length == 1:  | 
|
195  | 
prefixes = [()]  | 
|
196  | 
else:  | 
|
197  | 
prefixes = [('FileA',), ('FileB',)]  | 
|
198  | 
    # insert a diamond graph to exercise deltas and merges.
 | 
|
199  | 
if trailing_eol:  | 
|
200  | 
last_char = '\n'  | 
|
201  | 
else:  | 
|
202  | 
last_char = ''  | 
|
203  | 
result = []  | 
|
204  | 
def get_parents(suffix_list):  | 
|
205  | 
if nograph:  | 
|
206  | 
return ()  | 
|
207  | 
else:  | 
|
208  | 
result = [prefix + suffix for suffix in suffix_list]  | 
|
209  | 
return result  | 
|
210  | 
    # we loop over each key because that spreads the inserts across prefixes,
 | 
|
211  | 
    # which is how commit operates.
 | 
|
212  | 
for prefix in prefixes:  | 
|
213  | 
result.append(files.add_lines(prefix + ('origin',), (),  | 
|
214  | 
['origin' + last_char]))  | 
|
215  | 
for prefix in prefixes:  | 
|
216  | 
result.append(files.add_lines(prefix + ('base',),  | 
|
217  | 
get_parents([('origin',)]), ['base' + last_char]))  | 
|
218  | 
for prefix in prefixes:  | 
|
219  | 
result.append(files.add_lines(prefix + ('left',),  | 
|
220  | 
get_parents([('base',)]),  | 
|
221  | 
['base\n', 'left' + last_char]))  | 
|
222  | 
if not left_only:  | 
|
223  | 
for prefix in prefixes:  | 
|
224  | 
result.append(files.add_lines(prefix + ('right',),  | 
|
225  | 
get_parents([('base',)]),  | 
|
226  | 
['base\n', 'right' + last_char]))  | 
|
227  | 
for prefix in prefixes:  | 
|
228  | 
result.append(files.add_lines(prefix + ('merged',),  | 
|
229  | 
get_parents([('left',), ('right',)]),  | 
|
230  | 
['base\n', 'left\n', 'right\n', 'merged' + last_char]))  | 
|
231  | 
return result  | 
|
232  | 
||
233  | 
||
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
234  | 
class VersionedFileTestMixIn(object):  | 
235  | 
"""A mixin test class for testing VersionedFiles.  | 
|
236  | 
||
237  | 
    This is not an adaptor-style test at this point because
 | 
|
238  | 
    theres no dynamic substitution of versioned file implementations,
 | 
|
239  | 
    they are strictly controlled by their owning repositories.
 | 
|
240  | 
    """
 | 
|
241  | 
||
| 
3316.2.3
by Robert Collins
 Remove manual notification of transaction finishing on versioned files.  | 
242  | 
def get_transaction(self):  | 
243  | 
if not hasattr(self, '_transaction'):  | 
|
244  | 
self._transaction = None  | 
|
245  | 
return self._transaction  | 
|
246  | 
||
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
247  | 
def test_add(self):  | 
248  | 
f = self.get_file()  | 
|
249  | 
f.add_lines('r0', [], ['a\n', 'b\n'])  | 
|
250  | 
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])  | 
|
| 
1563.2.9
by Robert Collins
 Update versionedfile api tests to ensure that data is available after every operation.  | 
251  | 
def verify_file(f):  | 
252  | 
versions = f.versions()  | 
|
253  | 
self.assertTrue('r0' in versions)  | 
|
254  | 
self.assertTrue('r1' in versions)  | 
|
255  | 
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])  | 
|
256  | 
self.assertEquals(f.get_text('r0'), 'a\nb\n')  | 
|
257  | 
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])  | 
|
| 
1563.2.18
by Robert Collins
 get knit repositories really using knits for text storage.  | 
258  | 
self.assertEqual(2, len(f))  | 
259  | 
self.assertEqual(2, f.num_versions())  | 
|
| 
1563.2.9
by Robert Collins
 Update versionedfile api tests to ensure that data is available after every operation.  | 
260  | 
|
261  | 
self.assertRaises(RevisionNotPresent,  | 
|
262  | 
f.add_lines, 'r2', ['foo'], [])  | 
|
263  | 
self.assertRaises(RevisionAlreadyPresent,  | 
|
264  | 
f.add_lines, 'r1', [], [])  | 
|
265  | 
verify_file(f)  | 
|
| 
1666.1.6
by Robert Collins
 Make knit the default format.  | 
266  | 
        # this checks that reopen with create=True does not break anything.
 | 
267  | 
f = self.reopen_file(create=True)  | 
|
| 
1563.2.9
by Robert Collins
 Update versionedfile api tests to ensure that data is available after every operation.  | 
268  | 
verify_file(f)  | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
269  | 
|
| 
1596.2.32
by Robert Collins
 Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.  | 
270  | 
def test_adds_with_parent_texts(self):  | 
271  | 
f = self.get_file()  | 
|
272  | 
parent_texts = {}  | 
|
| 
2776.1.1
by Robert Collins
 * The ``add_lines`` methods on ``VersionedFile`` implementations has changed  | 
273  | 
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])  | 
| 
1596.2.32
by Robert Collins
 Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.  | 
274  | 
try:  | 
| 
2776.1.1
by Robert Collins
 * The ``add_lines`` methods on ``VersionedFile`` implementations has changed  | 
275  | 
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',  | 
276  | 
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)  | 
|
| 
1596.2.32
by Robert Collins
 Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.  | 
277  | 
except NotImplementedError:  | 
278  | 
            # if the format doesn't support ghosts, just add normally.
 | 
|
| 
2776.1.1
by Robert Collins
 * The ``add_lines`` methods on ``VersionedFile`` implementations has changed  | 
279  | 
_, _, parent_texts['r1'] = f.add_lines('r1',  | 
280  | 
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)  | 
|
| 
1596.2.32
by Robert Collins
 Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.  | 
281  | 
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)  | 
282  | 
self.assertNotEqual(None, parent_texts['r0'])  | 
|
283  | 
self.assertNotEqual(None, parent_texts['r1'])  | 
|
284  | 
def verify_file(f):  | 
|
285  | 
versions = f.versions()  | 
|
286  | 
self.assertTrue('r0' in versions)  | 
|
287  | 
self.assertTrue('r1' in versions)  | 
|
288  | 
self.assertTrue('r2' in versions)  | 
|
289  | 
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])  | 
|
290  | 
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])  | 
|
291  | 
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])  | 
|
292  | 
self.assertEqual(3, f.num_versions())  | 
|
293  | 
origins = f.annotate('r1')  | 
|
294  | 
self.assertEquals(origins[0][0], 'r0')  | 
|
295  | 
self.assertEquals(origins[1][0], 'r1')  | 
|
296  | 
origins = f.annotate('r2')  | 
|
297  | 
self.assertEquals(origins[0][0], 'r1')  | 
|
298  | 
self.assertEquals(origins[1][0], 'r2')  | 
|
299  | 
||
300  | 
verify_file(f)  | 
|
301  | 
f = self.reopen_file()  | 
|
302  | 
verify_file(f)  | 
|
303  | 
||
| 
2805.6.7
by Robert Collins
 Review feedback.  | 
304  | 
def test_add_unicode_content(self):  | 
305  | 
        # unicode content is not permitted in versioned files. 
 | 
|
306  | 
        # versioned files version sequences of bytes only.
 | 
|
307  | 
vf = self.get_file()  | 
|
308  | 
self.assertRaises(errors.BzrBadParameterUnicode,  | 
|
309  | 
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])  | 
|
310  | 
self.assertRaises(  | 
|
311  | 
(errors.BzrBadParameterUnicode, NotImplementedError),  | 
|
312  | 
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])  | 
|
313  | 
||
| 
2520.4.150
by Aaron Bentley
 Test that non-Weave uses left_matching_blocks for add_lines  | 
314  | 
def test_add_follows_left_matching_blocks(self):  | 
315  | 
"""If we change left_matching_blocks, delta changes  | 
|
316  | 
||
317  | 
        Note: There are multiple correct deltas in this case, because
 | 
|
318  | 
        we start with 1 "a" and we get 3.
 | 
|
319  | 
        """
 | 
|
320  | 
vf = self.get_file()  | 
|
321  | 
if isinstance(vf, WeaveFile):  | 
|
322  | 
raise TestSkipped("WeaveFile ignores left_matching_blocks")  | 
|
323  | 
vf.add_lines('1', [], ['a\n'])  | 
|
324  | 
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],  | 
|
325  | 
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])  | 
|
| 
2794.1.2
by Robert Collins
 Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.  | 
326  | 
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))  | 
| 
2520.4.150
by Aaron Bentley
 Test that non-Weave uses left_matching_blocks for add_lines  | 
327  | 
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],  | 
328  | 
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])  | 
|
| 
2794.1.2
by Robert Collins
 Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.  | 
329  | 
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))  | 
| 
2520.4.150
by Aaron Bentley
 Test that non-Weave uses left_matching_blocks for add_lines  | 
330  | 
|
| 
2805.6.7
by Robert Collins
 Review feedback.  | 
331  | 
def test_inline_newline_throws(self):  | 
332  | 
        # \r characters are not permitted in lines being added
 | 
|
333  | 
vf = self.get_file()  | 
|
334  | 
self.assertRaises(errors.BzrBadParameterContainsNewline,  | 
|
335  | 
vf.add_lines, 'a', [], ['a\n\n'])  | 
|
336  | 
self.assertRaises(  | 
|
337  | 
(errors.BzrBadParameterContainsNewline, NotImplementedError),  | 
|
338  | 
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])  | 
|
339  | 
        # but inline CR's are allowed
 | 
|
340  | 
vf.add_lines('a', [], ['a\r\n'])  | 
|
341  | 
try:  | 
|
342  | 
vf.add_lines_with_ghosts('b', [], ['a\r\n'])  | 
|
343  | 
except NotImplementedError:  | 
|
344  | 
            pass
 | 
|
345  | 
||
| 
2229.2.1
by Aaron Bentley
 Reject reserved ids in versiondfile, tree, branch and repository  | 
346  | 
def test_add_reserved(self):  | 
347  | 
vf = self.get_file()  | 
|
348  | 
self.assertRaises(errors.ReservedId,  | 
|
349  | 
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])  | 
|
350  | 
||
| 
2794.1.1
by Robert Collins
 Allow knits to be instructed not to add a text based on a sha, for commit.  | 
351  | 
def test_add_lines_nostoresha(self):  | 
352  | 
"""When nostore_sha is supplied using old content raises."""  | 
|
353  | 
vf = self.get_file()  | 
|
354  | 
empty_text = ('a', [])  | 
|
355  | 
sample_text_nl = ('b', ["foo\n", "bar\n"])  | 
|
356  | 
sample_text_no_nl = ('c', ["foo\n", "bar"])  | 
|
357  | 
shas = []  | 
|
358  | 
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):  | 
|
359  | 
sha, _, _ = vf.add_lines(version, [], lines)  | 
|
360  | 
shas.append(sha)  | 
|
361  | 
        # we now have a copy of all the lines in the vf.
 | 
|
362  | 
for sha, (version, lines) in zip(  | 
|
363  | 
shas, (empty_text, sample_text_nl, sample_text_no_nl)):  | 
|
364  | 
self.assertRaises(errors.ExistingContent,  | 
|
365  | 
vf.add_lines, version + "2", [], lines,  | 
|
366  | 
nostore_sha=sha)  | 
|
367  | 
            # and no new version should have been added.
 | 
|
368  | 
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,  | 
|
369  | 
version + "2")  | 
|
370  | 
||
| 
2803.1.1
by Robert Collins
 Fix typo in ghosts version of test_add_lines_nostoresha.  | 
371  | 
def test_add_lines_with_ghosts_nostoresha(self):  | 
| 
2794.1.1
by Robert Collins
 Allow knits to be instructed not to add a text based on a sha, for commit.  | 
372  | 
"""When nostore_sha is supplied using old content raises."""  | 
373  | 
vf = self.get_file()  | 
|
374  | 
empty_text = ('a', [])  | 
|
375  | 
sample_text_nl = ('b', ["foo\n", "bar\n"])  | 
|
376  | 
sample_text_no_nl = ('c', ["foo\n", "bar"])  | 
|
377  | 
shas = []  | 
|
378  | 
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):  | 
|
379  | 
sha, _, _ = vf.add_lines(version, [], lines)  | 
|
380  | 
shas.append(sha)  | 
|
381  | 
        # we now have a copy of all the lines in the vf.
 | 
|
382  | 
        # is the test applicable to this vf implementation?
 | 
|
383  | 
try:  | 
|
384  | 
vf.add_lines_with_ghosts('d', [], [])  | 
|
385  | 
except NotImplementedError:  | 
|
386  | 
raise TestSkipped("add_lines_with_ghosts is optional")  | 
|
387  | 
for sha, (version, lines) in zip(  | 
|
388  | 
shas, (empty_text, sample_text_nl, sample_text_no_nl)):  | 
|
389  | 
self.assertRaises(errors.ExistingContent,  | 
|
390  | 
vf.add_lines_with_ghosts, version + "2", [], lines,  | 
|
391  | 
nostore_sha=sha)  | 
|
392  | 
            # and no new version should have been added.
 | 
|
393  | 
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,  | 
|
394  | 
version + "2")  | 
|
395  | 
||
| 
2776.1.1
by Robert Collins
 * The ``add_lines`` methods on ``VersionedFile`` implementations has changed  | 
396  | 
def test_add_lines_return_value(self):  | 
397  | 
        # add_lines should return the sha1 and the text size.
 | 
|
398  | 
vf = self.get_file()  | 
|
399  | 
empty_text = ('a', [])  | 
|
400  | 
sample_text_nl = ('b', ["foo\n", "bar\n"])  | 
|
401  | 
sample_text_no_nl = ('c', ["foo\n", "bar"])  | 
|
402  | 
        # check results for the three cases:
 | 
|
403  | 
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):  | 
|
404  | 
            # the first two elements are the same for all versioned files:
 | 
|
405  | 
            # - the digest and the size of the text. For some versioned files
 | 
|
406  | 
            #   additional data is returned in additional tuple elements.
 | 
|
407  | 
result = vf.add_lines(version, [], lines)  | 
|
408  | 
self.assertEqual(3, len(result))  | 
|
409  | 
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),  | 
|
410  | 
result[0:2])  | 
|
411  | 
        # parents should not affect the result:
 | 
|
412  | 
lines = sample_text_nl[1]  | 
|
413  | 
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),  | 
|
414  | 
vf.add_lines('d', ['b', 'c'], lines)[0:2])  | 
|
415  | 
||
| 
2229.2.1
by Aaron Bentley
 Reject reserved ids in versiondfile, tree, branch and repository  | 
416  | 
def test_get_reserved(self):  | 
417  | 
vf = self.get_file()  | 
|
418  | 
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])  | 
|
419  | 
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')  | 
|
420  | 
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')  | 
|
421  | 
||
| 
3468.2.4
by Martin Pool
 Test and fix #234748 problems in trailing newline diffs  | 
422  | 
def test_add_unchanged_last_line_noeol_snapshot(self):  | 
423  | 
"""Add a text with an unchanged last line with no eol should work."""  | 
|
424  | 
        # Test adding this in a number of chain lengths; because the interface
 | 
|
425  | 
        # for VersionedFile does not allow forcing a specific chain length, we
 | 
|
426  | 
        # just use a small base to get the first snapshot, then a much longer
 | 
|
427  | 
        # first line for the next add (which will make the third add snapshot)
 | 
|
428  | 
        # and so on. 20 has been chosen as an aribtrary figure - knits use 200
 | 
|
429  | 
        # as a capped delta length, but ideally we would have some way of
 | 
|
430  | 
        # tuning the test to the store (e.g. keep going until a snapshot
 | 
|
431  | 
        # happens).
 | 
|
432  | 
for length in range(20):  | 
|
433  | 
version_lines = {}  | 
|
434  | 
vf = self.get_file('case-%d' % length)  | 
|
435  | 
prefix = 'step-%d'  | 
|
436  | 
parents = []  | 
|
437  | 
for step in range(length):  | 
|
438  | 
version = prefix % step  | 
|
439  | 
lines = (['prelude \n'] * step) + ['line']  | 
|
440  | 
vf.add_lines(version, parents, lines)  | 
|
441  | 
version_lines[version] = lines  | 
|
442  | 
parents = [version]  | 
|
443  | 
vf.add_lines('no-eol', parents, ['line'])  | 
|
444  | 
vf.get_texts(version_lines.keys())  | 
|
445  | 
self.assertEqualDiff('line', vf.get_text('no-eol'))  | 
|
446  | 
||
447  | 
def test_get_texts_eol_variation(self):  | 
|
448  | 
        # similar to the failure in <http://bugs.launchpad.net/234748>
 | 
|
449  | 
vf = self.get_file()  | 
|
450  | 
sample_text_nl = ["line\n"]  | 
|
451  | 
sample_text_no_nl = ["line"]  | 
|
452  | 
versions = []  | 
|
453  | 
version_lines = {}  | 
|
454  | 
parents = []  | 
|
455  | 
for i in range(4):  | 
|
456  | 
version = 'v%d' % i  | 
|
457  | 
if i % 2:  | 
|
458  | 
lines = sample_text_nl  | 
|
459  | 
else:  | 
|
460  | 
lines = sample_text_no_nl  | 
|
461  | 
            # left_matching blocks is an internal api; it operates on the
 | 
|
462  | 
            # *internal* representation for a knit, which is with *all* lines
 | 
|
463  | 
            # being normalised to end with \n - even the final line in a no_nl
 | 
|
464  | 
            # file. Using it here ensures that a broken internal implementation
 | 
|
465  | 
            # (which is what this test tests) will generate a correct line
 | 
|
466  | 
            # delta (which is to say, an empty delta).
 | 
|
467  | 
vf.add_lines(version, parents, lines,  | 
|
468  | 
left_matching_blocks=[(0, 0, 1)])  | 
|
469  | 
parents = [version]  | 
|
470  | 
versions.append(version)  | 
|
471  | 
version_lines[version] = lines  | 
|
472  | 
vf.check()  | 
|
473  | 
vf.get_texts(versions)  | 
|
474  | 
vf.get_texts(reversed(versions))  | 
|
475  | 
||
| 
3460.2.1
by Robert Collins
 * Inserting a bundle which changes the contents of a file with no trailing  | 
476  | 
def test_add_lines_with_matching_blocks_noeol_last_line(self):  | 
477  | 
"""Add a text with an unchanged last line with no eol should work."""  | 
|
478  | 
from bzrlib import multiparent  | 
|
479  | 
        # Hand verified sha1 of the text we're adding.
 | 
|
480  | 
sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'  | 
|
481  | 
        # Create a mpdiff which adds a new line before the trailing line, and
 | 
|
482  | 
        # reuse the last line unaltered (which can cause annotation reuse).
 | 
|
483  | 
        # Test adding this in two situations:
 | 
|
484  | 
        # On top of a new insertion
 | 
|
485  | 
vf = self.get_file('fulltext')  | 
|
486  | 
vf.add_lines('noeol', [], ['line'])  | 
|
487  | 
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],  | 
|
488  | 
left_matching_blocks=[(0, 1, 1)])  | 
|
489  | 
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))  | 
|
490  | 
        # On top of a delta
 | 
|
491  | 
vf = self.get_file('delta')  | 
|
492  | 
vf.add_lines('base', [], ['line'])  | 
|
493  | 
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])  | 
|
494  | 
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],  | 
|
495  | 
left_matching_blocks=[(1, 1, 1)])  | 
|
496  | 
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))  | 
|
497  | 
||
| 
2520.4.85
by Aaron Bentley
 Get all test passing (which just proves there aren't enough tests!)  | 
498  | 
def test_make_mpdiffs(self):  | 
| 
2520.4.3
by Aaron Bentley
 Implement plain strategy for extracting and installing multiparent diffs  | 
499  | 
from bzrlib import multiparent  | 
500  | 
vf = self.get_file('foo')  | 
|
501  | 
sha1s = self._setup_for_deltas(vf)  | 
|
502  | 
new_vf = self.get_file('bar')  | 
|
503  | 
for version in multiparent.topo_iter(vf):  | 
|
| 
2520.4.85
by Aaron Bentley
 Get all test passing (which just proves there aren't enough tests!)  | 
504  | 
mpdiff = vf.make_mpdiffs([version])[0]  | 
| 
3287.5.2
by Robert Collins
 Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.  | 
505  | 
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],  | 
| 
3350.8.3
by Robert Collins
 VF.get_sha1s needed changing to be stackable.  | 
506  | 
vf.get_sha1s([version])[version], mpdiff)])  | 
| 
2520.4.3
by Aaron Bentley
 Implement plain strategy for extracting and installing multiparent diffs  | 
507  | 
self.assertEqualDiff(vf.get_text(version),  | 
508  | 
new_vf.get_text(version))  | 
|
509  | 
||
| 
3453.3.2
by John Arbash Meinel
 Add a test case for the first loop, unable to find a way to trigger the second loop  | 
510  | 
def test_make_mpdiffs_with_ghosts(self):  | 
511  | 
vf = self.get_file('foo')  | 
|
| 
3453.3.4
by John Arbash Meinel
 Skip the new test for old weave formats that don't support ghosts  | 
512  | 
try:  | 
513  | 
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])  | 
|
514  | 
except NotImplementedError:  | 
|
515  | 
            # old Weave formats do not allow ghosts
 | 
|
516  | 
            return
 | 
|
| 
3453.3.2
by John Arbash Meinel
 Add a test case for the first loop, unable to find a way to trigger the second loop  | 
517  | 
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])  | 
518  | 
||
| 
1596.2.38
by Robert Collins
 rollback from using deltas to using fulltexts - deltas need more work to be ready.  | 
519  | 
def _setup_for_deltas(self, f):  | 
| 
2794.1.2
by Robert Collins
 Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.  | 
520  | 
self.assertFalse(f.has_version('base'))  | 
| 
1596.2.36
by Robert Collins
 add a get_delta api to versioned_file.  | 
521  | 
        # add texts that should trip the knit maximum delta chain threshold
 | 
522  | 
        # as well as doing parallel chains of data in knits.
 | 
|
523  | 
        # this is done by two chains of 25 insertions
 | 
|
524  | 
f.add_lines('base', [], ['line\n'])  | 
|
| 
1596.2.38
by Robert Collins
 rollback from using deltas to using fulltexts - deltas need more work to be ready.  | 
525  | 
f.add_lines('noeol', ['base'], ['line'])  | 
526  | 
        # detailed eol tests:
 | 
|
527  | 
        # shared last line with parent no-eol
 | 
|
528  | 
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])  | 
|
529  | 
        # differing last line with parent, both no-eol
 | 
|
530  | 
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])  | 
|
531  | 
        # add eol following a noneol parent, change content
 | 
|
532  | 
f.add_lines('eol', ['noeol'], ['phone\n'])  | 
|
533  | 
        # add eol following a noneol parent, no change content
 | 
|
534  | 
f.add_lines('eolline', ['noeol'], ['line\n'])  | 
|
535  | 
        # noeol with no parents:
 | 
|
536  | 
f.add_lines('noeolbase', [], ['line'])  | 
|
537  | 
        # noeol preceeding its leftmost parent in the output:
 | 
|
538  | 
        # this is done by making it a merge of two parents with no common
 | 
|
539  | 
        # anestry: noeolbase and noeol with the 
 | 
|
540  | 
        # later-inserted parent the leftmost.
 | 
|
541  | 
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])  | 
|
542  | 
        # two identical eol texts
 | 
|
543  | 
f.add_lines('noeoldup', ['noeol'], ['line'])  | 
|
| 
1596.2.36
by Robert Collins
 add a get_delta api to versioned_file.  | 
544  | 
next_parent = 'base'  | 
545  | 
text_name = 'chain1-'  | 
|
546  | 
text = ['line\n']  | 
|
547  | 
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',  | 
|
548  | 
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',  | 
|
549  | 
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',  | 
|
550  | 
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',  | 
|
551  | 
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',  | 
|
552  | 
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',  | 
|
553  | 
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',  | 
|
554  | 
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',  | 
|
555  | 
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',  | 
|
556  | 
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',  | 
|
557  | 
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',  | 
|
558  | 
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',  | 
|
559  | 
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',  | 
|
560  | 
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',  | 
|
561  | 
14:'2c4b1736566b8ca6051e668de68650686a3922f2',  | 
|
562  | 
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',  | 
|
563  | 
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',  | 
|
564  | 
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',  | 
|
565  | 
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',  | 
|
566  | 
19:'1ebed371807ba5935958ad0884595126e8c4e823',  | 
|
567  | 
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',  | 
|
568  | 
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',  | 
|
569  | 
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',  | 
|
570  | 
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',  | 
|
571  | 
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',  | 
|
572  | 
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',  | 
|
573  | 
                 }
 | 
|
574  | 
for depth in range(26):  | 
|
575  | 
new_version = text_name + '%s' % depth  | 
|
576  | 
text = text + ['line\n']  | 
|
577  | 
f.add_lines(new_version, [next_parent], text)  | 
|
578  | 
next_parent = new_version  | 
|
579  | 
next_parent = 'base'  | 
|
580  | 
text_name = 'chain2-'  | 
|
581  | 
text = ['line\n']  | 
|
582  | 
for depth in range(26):  | 
|
583  | 
new_version = text_name + '%s' % depth  | 
|
584  | 
text = text + ['line\n']  | 
|
585  | 
f.add_lines(new_version, [next_parent], text)  | 
|
586  | 
next_parent = new_version  | 
|
| 
1596.2.38
by Robert Collins
 rollback from using deltas to using fulltexts - deltas need more work to be ready.  | 
587  | 
return sha1s  | 
| 
1596.2.37
by Robert Collins
 Switch to delta based content copying in the generic versioned file copier.  | 
588  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
589  | 
def test_ancestry(self):  | 
590  | 
f = self.get_file()  | 
|
| 
1563.2.29
by Robert Collins
 Remove all but fetch references to repository.revision_store.  | 
591  | 
self.assertEqual([], f.get_ancestry([]))  | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
592  | 
f.add_lines('r0', [], ['a\n', 'b\n'])  | 
593  | 
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])  | 
|
594  | 
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])  | 
|
595  | 
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])  | 
|
596  | 
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])  | 
|
| 
1563.2.29
by Robert Collins
 Remove all but fetch references to repository.revision_store.  | 
597  | 
self.assertEqual([], f.get_ancestry([]))  | 
| 
1563.2.35
by Robert Collins
 cleanup deprecation warnings and finish conversion so the inventory is knit based too.  | 
598  | 
versions = f.get_ancestry(['rM'])  | 
599  | 
        # there are some possibilities:
 | 
|
600  | 
        # r0 r1 r2 rM r3
 | 
|
601  | 
        # r0 r1 r2 r3 rM
 | 
|
602  | 
        # etc
 | 
|
603  | 
        # so we check indexes
 | 
|
604  | 
r0 = versions.index('r0')  | 
|
605  | 
r1 = versions.index('r1')  | 
|
606  | 
r2 = versions.index('r2')  | 
|
607  | 
self.assertFalse('r3' in versions)  | 
|
608  | 
rM = versions.index('rM')  | 
|
609  | 
self.assertTrue(r0 < r1)  | 
|
610  | 
self.assertTrue(r0 < r2)  | 
|
611  | 
self.assertTrue(r1 < rM)  | 
|
612  | 
self.assertTrue(r2 < rM)  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
613  | 
|
614  | 
self.assertRaises(RevisionNotPresent,  | 
|
615  | 
f.get_ancestry, ['rM', 'rX'])  | 
|
| 
1594.2.21
by Robert Collins
 Teach versioned files to prevent mutation after finishing.  | 
616  | 
|
| 
2530.1.1
by Aaron Bentley
 Make topological sorting optional for get_ancestry  | 
617  | 
self.assertEqual(set(f.get_ancestry('rM')),  | 
618  | 
set(f.get_ancestry('rM', topo_sorted=False)))  | 
|
619  | 
||
| 
1594.2.21
by Robert Collins
 Teach versioned files to prevent mutation after finishing.  | 
620  | 
def test_mutate_after_finish(self):  | 
| 
3316.2.3
by Robert Collins
 Remove manual notification of transaction finishing on versioned files.  | 
621  | 
self._transaction = 'before'  | 
| 
1594.2.21
by Robert Collins
 Teach versioned files to prevent mutation after finishing.  | 
622  | 
f = self.get_file()  | 
| 
3316.2.3
by Robert Collins
 Remove manual notification of transaction finishing on versioned files.  | 
623  | 
self._transaction = 'after'  | 
| 
1594.2.21
by Robert Collins
 Teach versioned files to prevent mutation after finishing.  | 
624  | 
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])  | 
625  | 
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])  | 
|
| 
1563.2.7
by Robert Collins
 add versioned file clear_cache entry.  | 
626  | 
|
| 
1563.2.15
by Robert Collins
 remove the weavestore assumptions about the number and nature of files it manages.  | 
627  | 
def test_copy_to(self):  | 
628  | 
f = self.get_file()  | 
|
629  | 
f.add_lines('0', [], ['a\n'])  | 
|
630  | 
t = MemoryTransport()  | 
|
631  | 
f.copy_to('foo', t)  | 
|
| 
3316.2.3
by Robert Collins
 Remove manual notification of transaction finishing on versioned files.  | 
632  | 
for suffix in self.get_factory().get_suffixes():  | 
| 
1563.2.15
by Robert Collins
 remove the weavestore assumptions about the number and nature of files it manages.  | 
633  | 
self.assertTrue(t.has('foo' + suffix))  | 
634  | 
||
635  | 
def test_get_suffixes(self):  | 
|
636  | 
f = self.get_file()  | 
|
637  | 
        # and should be a list
 | 
|
| 
3316.2.3
by Robert Collins
 Remove manual notification of transaction finishing on versioned files.  | 
638  | 
self.assertTrue(isinstance(self.get_factory().get_suffixes(), list))  | 
| 
1563.2.15
by Robert Collins
 remove the weavestore assumptions about the number and nature of files it manages.  | 
639  | 
|
| 
3287.5.1
by Robert Collins
 Add VersionedFile.get_parent_map.  | 
640  | 
def test_get_parent_map(self):  | 
641  | 
f = self.get_file()  | 
|
642  | 
f.add_lines('r0', [], ['a\n', 'b\n'])  | 
|
643  | 
self.assertEqual(  | 
|
644  | 
{'r0':()}, f.get_parent_map(['r0']))  | 
|
645  | 
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])  | 
|
646  | 
self.assertEqual(  | 
|
647  | 
{'r1':('r0',)}, f.get_parent_map(['r1']))  | 
|
648  | 
self.assertEqual(  | 
|
649  | 
{'r0':(),  | 
|
650  | 
'r1':('r0',)},  | 
|
651  | 
f.get_parent_map(['r0', 'r1']))  | 
|
652  | 
f.add_lines('r2', [], ['a\n', 'b\n'])  | 
|
653  | 
f.add_lines('r3', [], ['a\n', 'b\n'])  | 
|
654  | 
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])  | 
|
655  | 
self.assertEqual(  | 
|
656  | 
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))  | 
|
657  | 
self.assertEqual({}, f.get_parent_map('y'))  | 
|
658  | 
self.assertEqual(  | 
|
659  | 
{'r0':(),  | 
|
660  | 
'r1':('r0',)},  | 
|
661  | 
f.get_parent_map(['r0', 'y', 'r1']))  | 
|
662  | 
||
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
663  | 
def test_annotate(self):  | 
664  | 
f = self.get_file()  | 
|
665  | 
f.add_lines('r0', [], ['a\n', 'b\n'])  | 
|
666  | 
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])  | 
|
667  | 
origins = f.annotate('r1')  | 
|
668  | 
self.assertEquals(origins[0][0], 'r1')  | 
|
669  | 
self.assertEquals(origins[1][0], 'r0')  | 
|
670  | 
||
671  | 
self.assertRaises(RevisionNotPresent,  | 
|
672  | 
f.annotate, 'foo')  | 
|
673  | 
||
| 
1563.2.6
by Robert Collins
 Start check tests for knits (pending), and remove dead code.  | 
674  | 
def test_detection(self):  | 
675  | 
        # Test weaves detect corruption.
 | 
|
676  | 
        #
 | 
|
677  | 
        # Weaves contain a checksum of their texts.
 | 
|
678  | 
        # When a text is extracted, this checksum should be
 | 
|
679  | 
        # verified.
 | 
|
680  | 
||
681  | 
w = self.get_file_corrupted_text()  | 
|
682  | 
||
683  | 
self.assertEqual('hello\n', w.get_text('v1'))  | 
|
684  | 
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')  | 
|
685  | 
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')  | 
|
686  | 
self.assertRaises(errors.WeaveInvalidChecksum, w.check)  | 
|
687  | 
||
688  | 
w = self.get_file_corrupted_checksum()  | 
|
689  | 
||
690  | 
self.assertEqual('hello\n', w.get_text('v1'))  | 
|
691  | 
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')  | 
|
692  | 
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')  | 
|
693  | 
self.assertRaises(errors.WeaveInvalidChecksum, w.check)  | 
|
694  | 
||
695  | 
def get_file_corrupted_text(self):  | 
|
696  | 
"""Return a versioned file with corrupt text but valid metadata."""  | 
|
697  | 
raise NotImplementedError(self.get_file_corrupted_text)  | 
|
698  | 
||
| 
1563.2.9
by Robert Collins
 Update versionedfile api tests to ensure that data is available after every operation.  | 
699  | 
def reopen_file(self, name='foo'):  | 
700  | 
"""Open the versioned file from disk again."""  | 
|
701  | 
raise NotImplementedError(self.reopen_file)  | 
|
702  | 
||
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
703  | 
def test_iter_lines_added_or_present_in_versions(self):  | 
704  | 
        # test that we get at least an equalset of the lines added by
 | 
|
705  | 
        # versions in the weave 
 | 
|
706  | 
        # the ordering here is to make a tree so that dumb searches have
 | 
|
707  | 
        # more changes to muck up.
 | 
|
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
708  | 
|
709  | 
class InstrumentedProgress(progress.DummyProgress):  | 
|
710  | 
||
711  | 
def __init__(self):  | 
|
712  | 
||
713  | 
progress.DummyProgress.__init__(self)  | 
|
714  | 
self.updates = []  | 
|
715  | 
||
716  | 
def update(self, msg=None, current=None, total=None):  | 
|
717  | 
self.updates.append((msg, current, total))  | 
|
718  | 
||
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
719  | 
vf = self.get_file()  | 
720  | 
        # add a base to get included
 | 
|
721  | 
vf.add_lines('base', [], ['base\n'])  | 
|
722  | 
        # add a ancestor to be included on one side
 | 
|
723  | 
vf.add_lines('lancestor', [], ['lancestor\n'])  | 
|
724  | 
        # add a ancestor to be included on the other side
 | 
|
725  | 
vf.add_lines('rancestor', ['base'], ['rancestor\n'])  | 
|
726  | 
        # add a child of rancestor with no eofile-nl
 | 
|
727  | 
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])  | 
|
728  | 
        # add a child of lancestor and base to join the two roots
 | 
|
729  | 
vf.add_lines('otherchild',  | 
|
730  | 
['lancestor', 'base'],  | 
|
731  | 
['base\n', 'lancestor\n', 'otherchild\n'])  | 
|
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
732  | 
def iter_with_versions(versions, expected):  | 
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
733  | 
            # now we need to see what lines are returned, and how often.
 | 
| 
2975.3.1
by Robert Collins
 Change (without backwards compatibility) the  | 
734  | 
lines = {}  | 
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
735  | 
progress = InstrumentedProgress()  | 
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
736  | 
            # iterate over the lines
 | 
| 
2975.3.1
by Robert Collins
 Change (without backwards compatibility) the  | 
737  | 
for line in vf.iter_lines_added_or_present_in_versions(versions,  | 
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
738  | 
pb=progress):  | 
| 
2975.3.1
by Robert Collins
 Change (without backwards compatibility) the  | 
739  | 
lines.setdefault(line, 0)  | 
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
740  | 
lines[line] += 1  | 
| 
2975.3.1
by Robert Collins
 Change (without backwards compatibility) the  | 
741  | 
if []!= progress.updates:  | 
| 
2039.1.2
by Aaron Bentley
 Tweak test to avoid catching assert  | 
742  | 
self.assertEqual(expected, progress.updates)  | 
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
743  | 
return lines  | 
| 
2147.1.3
by John Arbash Meinel
 In knit.py we were re-using a variable in 2 loops, causing bogus progress messages to be generated.  | 
744  | 
lines = iter_with_versions(['child', 'otherchild'],  | 
745  | 
[('Walking content.', 0, 2),  | 
|
746  | 
('Walking content.', 1, 2),  | 
|
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
747  | 
('Walking content.', 2, 2)])  | 
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
748  | 
        # we must see child and otherchild
 | 
| 
2975.3.1
by Robert Collins
 Change (without backwards compatibility) the  | 
749  | 
self.assertTrue(lines[('child\n', 'child')] > 0)  | 
750  | 
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)  | 
|
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
751  | 
        # we dont care if we got more than that.
 | 
752  | 
||
753  | 
        # test all lines
 | 
|
| 
2147.1.3
by John Arbash Meinel
 In knit.py we were re-using a variable in 2 loops, causing bogus progress messages to be generated.  | 
754  | 
lines = iter_with_versions(None, [('Walking content.', 0, 5),  | 
755  | 
('Walking content.', 1, 5),  | 
|
756  | 
('Walking content.', 2, 5),  | 
|
757  | 
('Walking content.', 3, 5),  | 
|
758  | 
('Walking content.', 4, 5),  | 
|
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
759  | 
('Walking content.', 5, 5)])  | 
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
760  | 
        # all lines must be seen at least once
 | 
| 
2975.3.1
by Robert Collins
 Change (without backwards compatibility) the  | 
761  | 
self.assertTrue(lines[('base\n', 'base')] > 0)  | 
762  | 
self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)  | 
|
763  | 
self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)  | 
|
764  | 
self.assertTrue(lines[('child\n', 'child')] > 0)  | 
|
765  | 
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)  | 
|
| 
1594.2.7
by Robert Collins
 Add versionedfile.fix_parents api for correcting data post hoc.  | 
766  | 
|
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
767  | 
def test_add_lines_with_ghosts(self):  | 
768  | 
        # some versioned file formats allow lines to be added with parent
 | 
|
769  | 
        # information that is > than that in the format. Formats that do
 | 
|
770  | 
        # not support this need to raise NotImplementedError on the
 | 
|
771  | 
        # add_lines_with_ghosts api.
 | 
|
772  | 
vf = self.get_file()  | 
|
773  | 
        # add a revision with ghost parents
 | 
|
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
774  | 
        # The preferred form is utf8, but we should translate when needed
 | 
775  | 
parent_id_unicode = u'b\xbfse'  | 
|
776  | 
parent_id_utf8 = parent_id_unicode.encode('utf8')  | 
|
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
777  | 
try:  | 
| 
2309.4.7
by John Arbash Meinel
 Update VersionedFile tests to ensure that they can take Unicode,  | 
778  | 
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])  | 
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
779  | 
except NotImplementedError:  | 
780  | 
            # check the other ghost apis are also not implemented
 | 
|
781  | 
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])  | 
|
782  | 
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')  | 
|
783  | 
            return
 | 
|
| 
2150.2.1
by Robert Collins
 Correctly decode utf8 revision ids from knits when parsing, fixes a regression where a unicode revision id is stored correctly, but then indexed by the utf8 value on the next invocation of bzr, rather than the unicode value.  | 
784  | 
vf = self.reopen_file()  | 
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
785  | 
        # test key graph related apis: getncestry, _graph, get_parents
 | 
786  | 
        # has_version
 | 
|
787  | 
        # - these are ghost unaware and must not be reflect ghosts
 | 
|
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
788  | 
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))  | 
789  | 
self.assertFalse(vf.has_version(parent_id_utf8))  | 
|
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
790  | 
        # we have _with_ghost apis to give us ghost information.
 | 
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
791  | 
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))  | 
792  | 
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))  | 
|
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
793  | 
        # if we add something that is a ghost of another, it should correct the
 | 
794  | 
        # results of the prior apis
 | 
|
| 
2858.2.1
by Martin Pool
 Remove most calls to safe_file_id and safe_revision_id.  | 
795  | 
vf.add_lines(parent_id_utf8, [], [])  | 
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
796  | 
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))  | 
| 
3287.5.2
by Robert Collins
 Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.  | 
797  | 
self.assertEqual({'notbxbfse':(parent_id_utf8,)},  | 
798  | 
vf.get_parent_map(['notbxbfse']))  | 
|
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
799  | 
self.assertTrue(vf.has_version(parent_id_utf8))  | 
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
800  | 
        # we have _with_ghost apis to give us ghost information.
 | 
| 
2858.2.1
by Martin Pool
 Remove most calls to safe_file_id and safe_revision_id.  | 
801  | 
self.assertEqual([parent_id_utf8, 'notbxbfse'],  | 
802  | 
vf.get_ancestry_with_ghosts(['notbxbfse']))  | 
|
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
803  | 
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))  | 
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
804  | 
|
| 
1594.2.9
by Robert Collins
 Teach Knit repositories how to handle ghosts without corrupting at all.  | 
805  | 
def test_add_lines_with_ghosts_after_normal_revs(self):  | 
806  | 
        # some versioned file formats allow lines to be added with parent
 | 
|
807  | 
        # information that is > than that in the format. Formats that do
 | 
|
808  | 
        # not support this need to raise NotImplementedError on the
 | 
|
809  | 
        # add_lines_with_ghosts api.
 | 
|
810  | 
vf = self.get_file()  | 
|
811  | 
        # probe for ghost support
 | 
|
812  | 
try:  | 
|
| 
3287.6.5
by Robert Collins
 Deprecate VersionedFile.has_ghost.  | 
813  | 
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])  | 
| 
1594.2.9
by Robert Collins
 Teach Knit repositories how to handle ghosts without corrupting at all.  | 
814  | 
except NotImplementedError:  | 
815  | 
            return
 | 
|
816  | 
vf.add_lines_with_ghosts('references_ghost',  | 
|
817  | 
['base', 'a_ghost'],  | 
|
818  | 
['line\n', 'line_b\n', 'line_c\n'])  | 
|
819  | 
origins = vf.annotate('references_ghost')  | 
|
820  | 
self.assertEquals(('base', 'line\n'), origins[0])  | 
|
821  | 
self.assertEquals(('base', 'line_b\n'), origins[1])  | 
|
822  | 
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])  | 
|
| 
1594.2.23
by Robert Collins
 Test versioned file storage handling of clean/dirty status for accessed versioned files.  | 
823  | 
|
824  | 
def test_readonly_mode(self):  | 
|
825  | 
transport = get_transport(self.get_url('.'))  | 
|
826  | 
factory = self.get_factory()  | 
|
827  | 
vf = factory('id', transport, 0777, create=True, access_mode='w')  | 
|
828  | 
vf = factory('id', transport, access_mode='r')  | 
|
829  | 
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])  | 
|
830  | 
self.assertRaises(errors.ReadOnlyError,  | 
|
831  | 
vf.add_lines_with_ghosts,  | 
|
832  | 
'base',  | 
|
833  | 
                          [],
 | 
|
834  | 
                          [])
 | 
|
| 
1666.1.6
by Robert Collins
 Make knit the default format.  | 
835  | 
|
| 
3316.2.9
by Robert Collins
 * ``VersionedFile.get_sha1`` is deprecated, please use  | 
836  | 
def test_get_sha1s(self):  | 
| 
1666.1.6
by Robert Collins
 Make knit the default format.  | 
837  | 
        # check the sha1 data is available
 | 
838  | 
vf = self.get_file()  | 
|
839  | 
        # a simple file
 | 
|
840  | 
vf.add_lines('a', [], ['a\n'])  | 
|
841  | 
        # the same file, different metadata
 | 
|
842  | 
vf.add_lines('b', ['a'], ['a\n'])  | 
|
843  | 
        # a file differing only in last newline.
 | 
|
844  | 
vf.add_lines('c', [], ['a'])  | 
|
| 
3350.8.3
by Robert Collins
 VF.get_sha1s needed changing to be stackable.  | 
845  | 
self.assertEqual({  | 
846  | 
'a': '3f786850e387550fdab836ed7e6dc881de23001b',  | 
|
847  | 
'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',  | 
|
848  | 
'b': '3f786850e387550fdab836ed7e6dc881de23001b',  | 
|
849  | 
            },
 | 
|
850  | 
vf.get_sha1s(['a', 'c', 'b']))  | 
|
| 
1594.2.9
by Robert Collins
 Teach Knit repositories how to handle ghosts without corrupting at all.  | 
851  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
852  | 
|
| 
2535.3.1
by Andrew Bennetts
 Add get_format_signature to VersionedFile  | 
853  | 
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):  | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
854  | 
|
855  | 
def get_file(self, name='foo'):  | 
|
| 
3316.2.3
by Robert Collins
 Remove manual notification of transaction finishing on versioned files.  | 
856  | 
return WeaveFile(name, get_transport(self.get_url('.')), create=True,  | 
857  | 
get_scope=self.get_transaction)  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
858  | 
|
| 
1563.2.6
by Robert Collins
 Start check tests for knits (pending), and remove dead code.  | 
859  | 
def get_file_corrupted_text(self):  | 
| 
3316.2.3
by Robert Collins
 Remove manual notification of transaction finishing on versioned files.  | 
860  | 
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,  | 
861  | 
get_scope=self.get_transaction)  | 
|
| 
1563.2.13
by Robert Collins
 InterVersionedFile implemented.  | 
862  | 
w.add_lines('v1', [], ['hello\n'])  | 
863  | 
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])  | 
|
| 
1563.2.6
by Robert Collins
 Start check tests for knits (pending), and remove dead code.  | 
864  | 
|
865  | 
        # We are going to invasively corrupt the text
 | 
|
866  | 
        # Make sure the internals of weave are the same
 | 
|
867  | 
self.assertEqual([('{', 0)  | 
|
868  | 
, 'hello\n'  | 
|
869  | 
, ('}', None)  | 
|
870  | 
, ('{', 1)  | 
|
871  | 
, 'there\n'  | 
|
872  | 
, ('}', None)  | 
|
873  | 
], w._weave)  | 
|
874  | 
||
875  | 
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'  | 
|
876  | 
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'  | 
|
877  | 
], w._sha1s)  | 
|
878  | 
w.check()  | 
|
879  | 
||
880  | 
        # Corrupted
 | 
|
881  | 
w._weave[4] = 'There\n'  | 
|
882  | 
return w  | 
|
883  | 
||
884  | 
def get_file_corrupted_checksum(self):  | 
|
885  | 
w = self.get_file_corrupted_text()  | 
|
886  | 
        # Corrected
 | 
|
887  | 
w._weave[4] = 'there\n'  | 
|
888  | 
self.assertEqual('hello\nthere\n', w.get_text('v2'))  | 
|
889  | 
||
890  | 
        #Invalid checksum, first digit changed
 | 
|
891  | 
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'  | 
|
892  | 
return w  | 
|
893  | 
||
| 
1666.1.6
by Robert Collins
 Make knit the default format.  | 
894  | 
def reopen_file(self, name='foo', create=False):  | 
| 
3316.2.3
by Robert Collins
 Remove manual notification of transaction finishing on versioned files.  | 
895  | 
return WeaveFile(name, get_transport(self.get_url('.')), create=create,  | 
896  | 
get_scope=self.get_transaction)  | 
|
| 
1563.2.9
by Robert Collins
 Update versionedfile api tests to ensure that data is available after every operation.  | 
897  | 
|
| 
1563.2.25
by Robert Collins
 Merge in upstream.  | 
898  | 
def test_no_implicit_create(self):  | 
899  | 
self.assertRaises(errors.NoSuchFile,  | 
|
900  | 
WeaveFile,  | 
|
901  | 
'foo',  | 
|
| 
3316.2.3
by Robert Collins
 Remove manual notification of transaction finishing on versioned files.  | 
902  | 
get_transport(self.get_url('.')),  | 
903  | 
get_scope=self.get_transaction)  | 
|
| 
1563.2.25
by Robert Collins
 Merge in upstream.  | 
904  | 
|
| 
1594.2.23
by Robert Collins
 Test versioned file storage handling of clean/dirty status for accessed versioned files.  | 
905  | 
def get_factory(self):  | 
906  | 
return WeaveFile  | 
|
907  | 
||
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
908  | 
|
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
909  | 
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):  | 
910  | 
||
911  | 
def setUp(self):  | 
|
912  | 
TestCaseWithMemoryTransport.setUp(self)  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
913  | 
mapper = PrefixMapper()  | 
914  | 
factory = make_file_factory(True, mapper)  | 
|
915  | 
self.vf1 = factory(self.get_transport('root-1'))  | 
|
916  | 
self.vf2 = factory(self.get_transport('root-2'))  | 
|
917  | 
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')  | 
|
918  | 
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])  | 
|
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
919  | 
|
920  | 
def test_add_lines(self):  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
921  | 
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])  | 
922  | 
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,  | 
|
923  | 
('root', 'a'), [], [])  | 
|
924  | 
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,  | 
|
925  | 
('root', 'a:'), None, [])  | 
|
926  | 
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,  | 
|
927  | 
('root', 'a:'), [], None)  | 
|
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
928  | 
|
929  | 
def setup_abcde(self):  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
930  | 
self.vf1.add_lines(('root', 'A'), [], ['a'])  | 
931  | 
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])  | 
|
932  | 
self.vf2.add_lines(('root', 'C'), [], ['c'])  | 
|
933  | 
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])  | 
|
934  | 
self.plan_merge_vf.add_lines(('root', 'E:'),  | 
|
935  | 
[('root', 'B'), ('root', 'D')], ['e'])  | 
|
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
936  | 
|
937  | 
def test_get_parents(self):  | 
|
938  | 
self.setup_abcde()  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
939  | 
self.assertEqual({('root', 'B'):(('root', 'A'),)},  | 
940  | 
self.plan_merge_vf.get_parent_map([('root', 'B')]))  | 
|
941  | 
self.assertEqual({('root', 'D'):(('root', 'C'),)},  | 
|
942  | 
self.plan_merge_vf.get_parent_map([('root', 'D')]))  | 
|
943  | 
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},  | 
|
944  | 
self.plan_merge_vf.get_parent_map([('root', 'E:')]))  | 
|
945  | 
self.assertEqual({},  | 
|
946  | 
self.plan_merge_vf.get_parent_map([('root', 'F')]))  | 
|
| 
3287.5.2
by Robert Collins
 Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.  | 
947  | 
self.assertEqual({  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
948  | 
('root', 'B'):(('root', 'A'),),  | 
949  | 
('root', 'D'):(('root', 'C'),),  | 
|
950  | 
('root', 'E:'):(('root', 'B'),('root', 'D')),  | 
|
951  | 
                },
 | 
|
952  | 
self.plan_merge_vf.get_parent_map(  | 
|
953  | 
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))  | 
|
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
954  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
955  | 
def test_get_record_stream(self):  | 
| 
3062.1.9
by Aaron Bentley
 Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile  | 
956  | 
self.setup_abcde()  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
957  | 
def get_record(suffix):  | 
958  | 
return self.plan_merge_vf.get_record_stream(  | 
|
959  | 
[('root', suffix)], 'unordered', True).next()  | 
|
960  | 
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))  | 
|
961  | 
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))  | 
|
962  | 
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))  | 
|
963  | 
self.assertEqual('absent', get_record('F').storage_kind)  | 
|
| 
1666.1.1
by Robert Collins
 Add trivial http-using test for versioned files.  | 
964  | 
|
965  | 
||
966  | 
class TestReadonlyHttpMixin(object):  | 
|
967  | 
||
| 
3316.2.3
by Robert Collins
 Remove manual notification of transaction finishing on versioned files.  | 
968  | 
def get_transaction(self):  | 
969  | 
return 1  | 
|
970  | 
||
| 
1666.1.1
by Robert Collins
 Add trivial http-using test for versioned files.  | 
971  | 
def test_readonly_http_works(self):  | 
972  | 
        # we should be able to read from http with a versioned file.
 | 
|
973  | 
vf = self.get_file()  | 
|
| 
1666.1.6
by Robert Collins
 Make knit the default format.  | 
974  | 
        # try an empty file access
 | 
975  | 
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))  | 
|
976  | 
self.assertEqual([], readonly_vf.versions())  | 
|
977  | 
        # now with feeling.
 | 
|
| 
1666.1.1
by Robert Collins
 Add trivial http-using test for versioned files.  | 
978  | 
vf.add_lines('1', [], ['a\n'])  | 
979  | 
vf.add_lines('2', ['1'], ['b\n', 'a\n'])  | 
|
980  | 
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))  | 
|
| 
1666.1.6
by Robert Collins
 Make knit the default format.  | 
981  | 
self.assertEqual(['1', '2'], vf.versions())  | 
| 
1666.1.1
by Robert Collins
 Add trivial http-using test for versioned files.  | 
982  | 
for version in readonly_vf.versions():  | 
983  | 
readonly_vf.get_lines(version)  | 
|
984  | 
||
985  | 
||
986  | 
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):  | 
|
987  | 
||
988  | 
def get_file(self):  | 
|
| 
3316.2.3
by Robert Collins
 Remove manual notification of transaction finishing on versioned files.  | 
989  | 
return WeaveFile('foo', get_transport(self.get_url('.')), create=True,  | 
990  | 
get_scope=self.get_transaction)  | 
|
| 
1666.1.1
by Robert Collins
 Add trivial http-using test for versioned files.  | 
991  | 
|
992  | 
def get_factory(self):  | 
|
993  | 
return WeaveFile  | 
|
994  | 
||
995  | 
||
| 
1664.2.9
by Aaron Bentley
 Ported weave merge test to versionedfile  | 
996  | 
class MergeCasesMixin(object):  | 
997  | 
||
998  | 
def doMerge(self, base, a, b, mp):  | 
|
999  | 
from cStringIO import StringIO  | 
|
1000  | 
from textwrap import dedent  | 
|
1001  | 
||
1002  | 
def addcrlf(x):  | 
|
1003  | 
return x + '\n'  | 
|
1004  | 
||
1005  | 
w = self.get_file()  | 
|
1006  | 
w.add_lines('text0', [], map(addcrlf, base))  | 
|
1007  | 
w.add_lines('text1', ['text0'], map(addcrlf, a))  | 
|
1008  | 
w.add_lines('text2', ['text0'], map(addcrlf, b))  | 
|
1009  | 
||
1010  | 
self.log_contents(w)  | 
|
1011  | 
||
1012  | 
self.log('merge plan:')  | 
|
1013  | 
p = list(w.plan_merge('text1', 'text2'))  | 
|
1014  | 
for state, line in p:  | 
|
1015  | 
if line:  | 
|
1016  | 
self.log('%12s | %s' % (state, line[:-1]))  | 
|
1017  | 
||
1018  | 
self.log('merge:')  | 
|
1019  | 
mt = StringIO()  | 
|
1020  | 
mt.writelines(w.weave_merge(p))  | 
|
1021  | 
mt.seek(0)  | 
|
1022  | 
self.log(mt.getvalue())  | 
|
1023  | 
||
1024  | 
mp = map(addcrlf, mp)  | 
|
1025  | 
self.assertEqual(mt.readlines(), mp)  | 
|
1026  | 
||
1027  | 
||
1028  | 
def testOneInsert(self):  | 
|
1029  | 
self.doMerge([],  | 
|
1030  | 
['aa'],  | 
|
1031  | 
                     [],
 | 
|
1032  | 
['aa'])  | 
|
1033  | 
||
1034  | 
def testSeparateInserts(self):  | 
|
1035  | 
self.doMerge(['aaa', 'bbb', 'ccc'],  | 
|
1036  | 
['aaa', 'xxx', 'bbb', 'ccc'],  | 
|
1037  | 
['aaa', 'bbb', 'yyy', 'ccc'],  | 
|
1038  | 
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])  | 
|
1039  | 
||
1040  | 
def testSameInsert(self):  | 
|
1041  | 
self.doMerge(['aaa', 'bbb', 'ccc'],  | 
|
1042  | 
['aaa', 'xxx', 'bbb', 'ccc'],  | 
|
1043  | 
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],  | 
|
1044  | 
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])  | 
|
1045  | 
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']  | 
|
1046  | 
def testOverlappedInsert(self):  | 
|
1047  | 
self.doMerge(['aaa', 'bbb'],  | 
|
1048  | 
['aaa', 'xxx', 'yyy', 'bbb'],  | 
|
1049  | 
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)  | 
|
1050  | 
||
1051  | 
        # really it ought to reduce this to 
 | 
|
1052  | 
        # ['aaa', 'xxx', 'yyy', 'bbb']
 | 
|
1053  | 
||
1054  | 
||
1055  | 
def testClashReplace(self):  | 
|
1056  | 
self.doMerge(['aaa'],  | 
|
1057  | 
['xxx'],  | 
|
1058  | 
['yyy', 'zzz'],  | 
|
1059  | 
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',  | 
|
1060  | 
'>>>>>>> '])  | 
|
1061  | 
||
1062  | 
def testNonClashInsert1(self):  | 
|
1063  | 
self.doMerge(['aaa'],  | 
|
1064  | 
['xxx', 'aaa'],  | 
|
1065  | 
['yyy', 'zzz'],  | 
|
1066  | 
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',  | 
|
1067  | 
'>>>>>>> '])  | 
|
1068  | 
||
1069  | 
def testNonClashInsert2(self):  | 
|
1070  | 
self.doMerge(['aaa'],  | 
|
1071  | 
['aaa'],  | 
|
1072  | 
['yyy', 'zzz'],  | 
|
1073  | 
['yyy', 'zzz'])  | 
|
1074  | 
||
1075  | 
||
1076  | 
def testDeleteAndModify(self):  | 
|
1077  | 
"""Clashing delete and modification.  | 
|
1078  | 
||
1079  | 
        If one side modifies a region and the other deletes it then
 | 
|
1080  | 
        there should be a conflict with one side blank.
 | 
|
1081  | 
        """
 | 
|
1082  | 
||
1083  | 
        #######################################
 | 
|
1084  | 
        # skippd, not working yet
 | 
|
1085  | 
        return
 | 
|
1086  | 
||
1087  | 
self.doMerge(['aaa', 'bbb', 'ccc'],  | 
|
1088  | 
['aaa', 'ddd', 'ccc'],  | 
|
1089  | 
['aaa', 'ccc'],  | 
|
1090  | 
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])  | 
|
1091  | 
||
1092  | 
def _test_merge_from_strings(self, base, a, b, expected):  | 
|
1093  | 
w = self.get_file()  | 
|
1094  | 
w.add_lines('text0', [], base.splitlines(True))  | 
|
1095  | 
w.add_lines('text1', ['text0'], a.splitlines(True))  | 
|
1096  | 
w.add_lines('text2', ['text0'], b.splitlines(True))  | 
|
1097  | 
self.log('merge plan:')  | 
|
1098  | 
p = list(w.plan_merge('text1', 'text2'))  | 
|
1099  | 
for state, line in p:  | 
|
1100  | 
if line:  | 
|
1101  | 
self.log('%12s | %s' % (state, line[:-1]))  | 
|
1102  | 
self.log('merge result:')  | 
|
1103  | 
result_text = ''.join(w.weave_merge(p))  | 
|
1104  | 
self.log(result_text)  | 
|
1105  | 
self.assertEqualDiff(result_text, expected)  | 
|
1106  | 
||
1107  | 
def test_weave_merge_conflicts(self):  | 
|
1108  | 
        # does weave merge properly handle plans that end with unchanged?
 | 
|
1109  | 
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))  | 
|
1110  | 
self.assertEqual(result, 'hello\n')  | 
|
1111  | 
||
1112  | 
def test_deletion_extended(self):  | 
|
1113  | 
"""One side deletes, the other deletes more.  | 
|
1114  | 
        """
 | 
|
1115  | 
base = """\  | 
|
1116  | 
            line 1
 | 
|
1117  | 
            line 2
 | 
|
1118  | 
            line 3
 | 
|
1119  | 
            """
 | 
|
1120  | 
a = """\  | 
|
1121  | 
            line 1
 | 
|
1122  | 
            line 2
 | 
|
1123  | 
            """
 | 
|
1124  | 
b = """\  | 
|
1125  | 
            line 1
 | 
|
1126  | 
            """
 | 
|
1127  | 
result = """\  | 
|
1128  | 
            line 1
 | 
|
1129  | 
            """
 | 
|
1130  | 
self._test_merge_from_strings(base, a, b, result)  | 
|
1131  | 
||
1132  | 
def test_deletion_overlap(self):  | 
|
1133  | 
"""Delete overlapping regions with no other conflict.  | 
|
1134  | 
||
1135  | 
        Arguably it'd be better to treat these as agreement, rather than 
 | 
|
1136  | 
        conflict, but for now conflict is safer.
 | 
|
1137  | 
        """
 | 
|
1138  | 
base = """\  | 
|
1139  | 
            start context
 | 
|
1140  | 
int a() {}  | 
|
1141  | 
int b() {}  | 
|
1142  | 
int c() {}  | 
|
1143  | 
            end context
 | 
|
1144  | 
            """
 | 
|
1145  | 
a = """\  | 
|
1146  | 
            start context
 | 
|
1147  | 
int a() {}  | 
|
1148  | 
            end context
 | 
|
1149  | 
            """
 | 
|
1150  | 
b = """\  | 
|
1151  | 
            start context
 | 
|
1152  | 
int c() {}  | 
|
1153  | 
            end context
 | 
|
1154  | 
            """
 | 
|
1155  | 
result = """\  | 
|
1156  | 
            start context
 | 
|
1157  | 
<<<<<<< 
 | 
|
1158  | 
int a() {}  | 
|
1159  | 
=======
 | 
|
1160  | 
int c() {}  | 
|
1161  | 
>>>>>>> 
 | 
|
1162  | 
            end context
 | 
|
1163  | 
            """
 | 
|
1164  | 
self._test_merge_from_strings(base, a, b, result)  | 
|
1165  | 
||
1166  | 
def test_agreement_deletion(self):  | 
|
1167  | 
"""Agree to delete some lines, without conflicts."""  | 
|
1168  | 
base = """\  | 
|
1169  | 
            start context
 | 
|
1170  | 
            base line 1
 | 
|
1171  | 
            base line 2
 | 
|
1172  | 
            end context
 | 
|
1173  | 
            """
 | 
|
1174  | 
a = """\  | 
|
1175  | 
            start context
 | 
|
1176  | 
            base line 1
 | 
|
1177  | 
            end context
 | 
|
1178  | 
            """
 | 
|
1179  | 
b = """\  | 
|
1180  | 
            start context
 | 
|
1181  | 
            base line 1
 | 
|
1182  | 
            end context
 | 
|
1183  | 
            """
 | 
|
1184  | 
result = """\  | 
|
1185  | 
            start context
 | 
|
1186  | 
            base line 1
 | 
|
1187  | 
            end context
 | 
|
1188  | 
            """
 | 
|
1189  | 
self._test_merge_from_strings(base, a, b, result)  | 
|
1190  | 
||
1191  | 
def test_sync_on_deletion(self):  | 
|
1192  | 
"""Specific case of merge where we can synchronize incorrectly.  | 
|
1193  | 
        
 | 
|
1194  | 
        A previous version of the weave merge concluded that the two versions
 | 
|
1195  | 
        agreed on deleting line 2, and this could be a synchronization point.
 | 
|
1196  | 
        Line 1 was then considered in isolation, and thought to be deleted on 
 | 
|
1197  | 
        both sides.
 | 
|
1198  | 
||
1199  | 
        It's better to consider the whole thing as a disagreement region.
 | 
|
1200  | 
        """
 | 
|
1201  | 
base = """\  | 
|
1202  | 
            start context
 | 
|
1203  | 
            base line 1
 | 
|
1204  | 
            base line 2
 | 
|
1205  | 
            end context
 | 
|
1206  | 
            """
 | 
|
1207  | 
a = """\  | 
|
1208  | 
            start context
 | 
|
1209  | 
            base line 1
 | 
|
1210  | 
            a's replacement line 2
 | 
|
1211  | 
            end context
 | 
|
1212  | 
            """
 | 
|
1213  | 
b = """\  | 
|
1214  | 
            start context
 | 
|
1215  | 
            b replaces
 | 
|
1216  | 
            both lines
 | 
|
1217  | 
            end context
 | 
|
1218  | 
            """
 | 
|
1219  | 
result = """\  | 
|
1220  | 
            start context
 | 
|
1221  | 
<<<<<<< 
 | 
|
1222  | 
            base line 1
 | 
|
1223  | 
            a's replacement line 2
 | 
|
1224  | 
=======
 | 
|
1225  | 
            b replaces
 | 
|
1226  | 
            both lines
 | 
|
1227  | 
>>>>>>> 
 | 
|
1228  | 
            end context
 | 
|
1229  | 
            """
 | 
|
1230  | 
self._test_merge_from_strings(base, a, b, result)  | 
|
1231  | 
||
1232  | 
||
| 
2535.3.1
by Andrew Bennetts
 Add get_format_signature to VersionedFile  | 
1233  | 
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):  | 
| 
1664.2.9
by Aaron Bentley
 Ported weave merge test to versionedfile  | 
1234  | 
|
1235  | 
def get_file(self, name='foo'):  | 
|
1236  | 
return WeaveFile(name, get_transport(self.get_url('.')), create=True)  | 
|
1237  | 
||
1238  | 
def log_contents(self, w):  | 
|
1239  | 
self.log('weave is:')  | 
|
1240  | 
tmpf = StringIO()  | 
|
1241  | 
write_weave(w, tmpf)  | 
|
1242  | 
self.log(tmpf.getvalue())  | 
|
1243  | 
||
1244  | 
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',  | 
|
1245  | 
'xxx', '>>>>>>> ', 'bbb']  | 
|
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
1246  | 
|
1247  | 
||
1248  | 
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):  | 
|
1249  | 
||
1250  | 
def test_select_adaptor(self):  | 
|
| 
3350.3.7
by Robert Collins
 Create a registry of versioned file record adapters.  | 
1251  | 
"""Test expected adapters exist."""  | 
1252  | 
        # One scenario for each lookup combination we expect to use.
 | 
|
1253  | 
        # Each is source_kind, requested_kind, adapter class
 | 
|
1254  | 
scenarios = [  | 
|
1255  | 
('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),  | 
|
1256  | 
('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),  | 
|
1257  | 
('knit-annotated-delta-gz', 'knit-delta-gz',  | 
|
1258  | 
_mod_knit.DeltaAnnotatedToUnannotated),  | 
|
1259  | 
('knit-annotated-delta-gz', 'fulltext',  | 
|
1260  | 
_mod_knit.DeltaAnnotatedToFullText),  | 
|
1261  | 
('knit-annotated-ft-gz', 'knit-ft-gz',  | 
|
1262  | 
_mod_knit.FTAnnotatedToUnannotated),  | 
|
1263  | 
('knit-annotated-ft-gz', 'fulltext',  | 
|
1264  | 
_mod_knit.FTAnnotatedToFullText),  | 
|
1265  | 
            ]
 | 
|
1266  | 
for source, requested, klass in scenarios:  | 
|
1267  | 
adapter_factory = versionedfile.adapter_registry.get(  | 
|
1268  | 
(source, requested))  | 
|
1269  | 
adapter = adapter_factory(None)  | 
|
1270  | 
self.assertIsInstance(adapter, klass)  | 
|
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
1271  | 
|
| 
3350.3.5
by Robert Collins
 Create adapters from plain compressed knit content.  | 
1272  | 
def get_knit(self, annotated=True):  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1273  | 
mapper = ConstantMapper('knit')  | 
1274  | 
transport = self.get_transport()  | 
|
1275  | 
return make_file_factory(annotated, mapper)(transport)  | 
|
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
1276  | 
|
1277  | 
def helpGetBytes(self, f, ft_adapter, delta_adapter):  | 
|
| 
3350.3.22
by Robert Collins
 Review feedback.  | 
1278  | 
"""Grab the interested adapted texts for tests."""  | 
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
1279  | 
        # origin is a fulltext
 | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1280  | 
entries = f.get_record_stream([('origin',)], 'unordered', False)  | 
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
1281  | 
base = entries.next()  | 
1282  | 
ft_data = ft_adapter.get_bytes(base, base.get_bytes_as(base.storage_kind))  | 
|
1283  | 
        # merged is both a delta and multiple parents.
 | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1284  | 
entries = f.get_record_stream([('merged',)], 'unordered', False)  | 
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
1285  | 
merged = entries.next()  | 
1286  | 
delta_data = delta_adapter.get_bytes(merged,  | 
|
1287  | 
merged.get_bytes_as(merged.storage_kind))  | 
|
1288  | 
return ft_data, delta_data  | 
|
1289  | 
||
1290  | 
def test_deannotation_noeol(self):  | 
|
1291  | 
"""Test converting annotated knits to unannotated knits."""  | 
|
1292  | 
        # we need a full text, and a delta
 | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1293  | 
f = self.get_knit()  | 
1294  | 
get_diamond_files(f, 1, trailing_eol=False)  | 
|
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
1295  | 
ft_data, delta_data = self.helpGetBytes(f,  | 
| 
3350.3.7
by Robert Collins
 Create a registry of versioned file record adapters.  | 
1296  | 
_mod_knit.FTAnnotatedToUnannotated(None),  | 
1297  | 
_mod_knit.DeltaAnnotatedToUnannotated(None))  | 
|
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
1298  | 
self.assertEqual(  | 
1299  | 
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'  | 
|
1300  | 
'origin\n'  | 
|
1301  | 
'end origin\n',  | 
|
1302  | 
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())  | 
|
1303  | 
self.assertEqual(  | 
|
1304  | 
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'  | 
|
1305  | 
'1,2,3\nleft\nright\nmerged\nend merged\n',  | 
|
1306  | 
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())  | 
|
1307  | 
||
1308  | 
def test_deannotation(self):  | 
|
1309  | 
"""Test converting annotated knits to unannotated knits."""  | 
|
1310  | 
        # we need a full text, and a delta
 | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1311  | 
f = self.get_knit()  | 
1312  | 
get_diamond_files(f, 1)  | 
|
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
1313  | 
ft_data, delta_data = self.helpGetBytes(f,  | 
| 
3350.3.7
by Robert Collins
 Create a registry of versioned file record adapters.  | 
1314  | 
_mod_knit.FTAnnotatedToUnannotated(None),  | 
1315  | 
_mod_knit.DeltaAnnotatedToUnannotated(None))  | 
|
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
1316  | 
self.assertEqual(  | 
1317  | 
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'  | 
|
1318  | 
'origin\n'  | 
|
1319  | 
'end origin\n',  | 
|
1320  | 
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())  | 
|
1321  | 
self.assertEqual(  | 
|
1322  | 
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'  | 
|
1323  | 
'2,2,2\nright\nmerged\nend merged\n',  | 
|
1324  | 
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())  | 
|
1325  | 
||
1326  | 
def test_annotated_to_fulltext_no_eol(self):  | 
|
1327  | 
"""Test adapting annotated knits to full texts (for -> weaves)."""  | 
|
1328  | 
        # we need a full text, and a delta
 | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1329  | 
f = self.get_knit()  | 
1330  | 
get_diamond_files(f, 1, trailing_eol=False)  | 
|
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
1331  | 
        # Reconstructing a full text requires a backing versioned file, and it
 | 
1332  | 
        # must have the base lines requested from it.
 | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1333  | 
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)  | 
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
1334  | 
ft_data, delta_data = self.helpGetBytes(f,  | 
| 
3350.3.7
by Robert Collins
 Create a registry of versioned file record adapters.  | 
1335  | 
_mod_knit.FTAnnotatedToFullText(None),  | 
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
1336  | 
_mod_knit.DeltaAnnotatedToFullText(logged_vf))  | 
1337  | 
self.assertEqual('origin', ft_data)  | 
|
1338  | 
self.assertEqual('base\nleft\nright\nmerged', delta_data)  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1339  | 
self.assertEqual([('get_record_stream', [('left',)], 'unordered',  | 
1340  | 
True)], logged_vf.calls)  | 
|
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
1341  | 
|
1342  | 
def test_annotated_to_fulltext(self):  | 
|
1343  | 
"""Test adapting annotated knits to full texts (for -> weaves)."""  | 
|
1344  | 
        # we need a full text, and a delta
 | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1345  | 
f = self.get_knit()  | 
1346  | 
get_diamond_files(f, 1)  | 
|
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
1347  | 
        # Reconstructing a full text requires a backing versioned file, and it
 | 
1348  | 
        # must have the base lines requested from it.
 | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1349  | 
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)  | 
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
1350  | 
ft_data, delta_data = self.helpGetBytes(f,  | 
| 
3350.3.7
by Robert Collins
 Create a registry of versioned file record adapters.  | 
1351  | 
_mod_knit.FTAnnotatedToFullText(None),  | 
| 
3350.3.4
by Robert Collins
 Finish adapters for annotated knits to unannotated knits and full texts.  | 
1352  | 
_mod_knit.DeltaAnnotatedToFullText(logged_vf))  | 
1353  | 
self.assertEqual('origin\n', ft_data)  | 
|
1354  | 
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1355  | 
self.assertEqual([('get_record_stream', [('left',)], 'unordered',  | 
1356  | 
True)], logged_vf.calls)  | 
|
| 
3350.3.5
by Robert Collins
 Create adapters from plain compressed knit content.  | 
1357  | 
|
1358  | 
def test_unannotated_to_fulltext(self):  | 
|
1359  | 
"""Test adapting unannotated knits to full texts.  | 
|
1360  | 
        
 | 
|
1361  | 
        This is used for -> weaves, and for -> annotated knits.
 | 
|
1362  | 
        """
 | 
|
1363  | 
        # we need a full text, and a delta
 | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1364  | 
f = self.get_knit(annotated=False)  | 
1365  | 
get_diamond_files(f, 1)  | 
|
| 
3350.3.5
by Robert Collins
 Create adapters from plain compressed knit content.  | 
1366  | 
        # Reconstructing a full text requires a backing versioned file, and it
 | 
1367  | 
        # must have the base lines requested from it.
 | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1368  | 
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)  | 
| 
3350.3.5
by Robert Collins
 Create adapters from plain compressed knit content.  | 
1369  | 
ft_data, delta_data = self.helpGetBytes(f,  | 
| 
3350.3.7
by Robert Collins
 Create a registry of versioned file record adapters.  | 
1370  | 
_mod_knit.FTPlainToFullText(None),  | 
| 
3350.3.5
by Robert Collins
 Create adapters from plain compressed knit content.  | 
1371  | 
_mod_knit.DeltaPlainToFullText(logged_vf))  | 
1372  | 
self.assertEqual('origin\n', ft_data)  | 
|
1373  | 
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1374  | 
self.assertEqual([('get_record_stream', [('left',)], 'unordered',  | 
1375  | 
True)], logged_vf.calls)  | 
|
| 
3350.3.5
by Robert Collins
 Create adapters from plain compressed knit content.  | 
1376  | 
|
| 
3350.3.6
by Robert Collins
 Test EOL behaviour of plain knit record adapters.  | 
1377  | 
def test_unannotated_to_fulltext_no_eol(self):  | 
1378  | 
"""Test adapting unannotated knits to full texts.  | 
|
1379  | 
        
 | 
|
1380  | 
        This is used for -> weaves, and for -> annotated knits.
 | 
|
1381  | 
        """
 | 
|
1382  | 
        # we need a full text, and a delta
 | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1383  | 
f = self.get_knit(annotated=False)  | 
1384  | 
get_diamond_files(f, 1, trailing_eol=False)  | 
|
| 
3350.3.6
by Robert Collins
 Test EOL behaviour of plain knit record adapters.  | 
1385  | 
        # Reconstructing a full text requires a backing versioned file, and it
 | 
1386  | 
        # must have the base lines requested from it.
 | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1387  | 
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)  | 
| 
3350.3.6
by Robert Collins
 Test EOL behaviour of plain knit record adapters.  | 
1388  | 
ft_data, delta_data = self.helpGetBytes(f,  | 
| 
3350.3.7
by Robert Collins
 Create a registry of versioned file record adapters.  | 
1389  | 
_mod_knit.FTPlainToFullText(None),  | 
| 
3350.3.6
by Robert Collins
 Test EOL behaviour of plain knit record adapters.  | 
1390  | 
_mod_knit.DeltaPlainToFullText(logged_vf))  | 
1391  | 
self.assertEqual('origin', ft_data)  | 
|
1392  | 
self.assertEqual('base\nleft\nright\nmerged', delta_data)  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1393  | 
self.assertEqual([('get_record_stream', [('left',)], 'unordered',  | 
1394  | 
True)], logged_vf.calls)  | 
|
| 
3350.3.6
by Robert Collins
 Test EOL behaviour of plain knit record adapters.  | 
1395  | 
|
| 
3350.6.1
by Robert Collins
 * New ``versionedfile.KeyMapper`` interface to abstract out the access to  | 
1396  | 
|
1397  | 
class TestKeyMapper(TestCaseWithMemoryTransport):  | 
|
1398  | 
"""Tests for various key mapping logic."""  | 
|
1399  | 
||
1400  | 
def test_identity_mapper(self):  | 
|
1401  | 
mapper = versionedfile.ConstantMapper("inventory")  | 
|
1402  | 
self.assertEqual("inventory", mapper.map(('foo@ar',)))  | 
|
1403  | 
self.assertEqual("inventory", mapper.map(('quux',)))  | 
|
1404  | 
||
1405  | 
def test_prefix_mapper(self):  | 
|
1406  | 
        #format5: plain
 | 
|
1407  | 
mapper = versionedfile.PrefixMapper()  | 
|
1408  | 
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))  | 
|
1409  | 
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))  | 
|
1410  | 
self.assertEqual(('file-id',), mapper.unmap("file-id"))  | 
|
1411  | 
self.assertEqual(('new-id',), mapper.unmap("new-id"))  | 
|
1412  | 
||
1413  | 
def test_hash_prefix_mapper(self):  | 
|
1414  | 
        #format6: hash + plain
 | 
|
1415  | 
mapper = versionedfile.HashPrefixMapper()  | 
|
1416  | 
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))  | 
|
1417  | 
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))  | 
|
1418  | 
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))  | 
|
1419  | 
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))  | 
|
1420  | 
||
1421  | 
def test_hash_escaped_mapper(self):  | 
|
1422  | 
        #knit1: hash + escaped
 | 
|
1423  | 
mapper = versionedfile.HashEscapedPrefixMapper()  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1424  | 
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))  | 
1425  | 
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",  | 
|
1426  | 
"revision-id")))  | 
|
1427  | 
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",  | 
|
1428  | 
"revision-id")))  | 
|
1429  | 
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))  | 
|
1430  | 
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))  | 
|
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
1431  | 
|
1432  | 
||
1433  | 
class TestVersionedFiles(TestCaseWithMemoryTransport):  | 
|
1434  | 
"""Tests for the multiple-file variant of VersionedFile."""  | 
|
1435  | 
||
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1436  | 
def get_versionedfiles(self, relpath='files'):  | 
1437  | 
transport = self.get_transport(relpath)  | 
|
1438  | 
if relpath != '.':  | 
|
1439  | 
transport.mkdir('.')  | 
|
1440  | 
files = self.factory(transport)  | 
|
1441  | 
if self.cleanup is not None:  | 
|
1442  | 
self.addCleanup(lambda:self.cleanup(files))  | 
|
1443  | 
return files  | 
|
1444  | 
||
1445  | 
def test_annotate(self):  | 
|
1446  | 
files = self.get_versionedfiles()  | 
|
1447  | 
self.get_diamond_files(files)  | 
|
1448  | 
if self.key_length == 1:  | 
|
1449  | 
prefix = ()  | 
|
1450  | 
else:  | 
|
1451  | 
prefix = ('FileA',)  | 
|
1452  | 
        # introduced full text
 | 
|
1453  | 
origins = files.annotate(prefix + ('origin',))  | 
|
1454  | 
self.assertEqual([  | 
|
1455  | 
(prefix + ('origin',), 'origin\n')],  | 
|
1456  | 
origins)  | 
|
1457  | 
        # a delta
 | 
|
1458  | 
origins = files.annotate(prefix + ('base',))  | 
|
1459  | 
self.assertEqual([  | 
|
1460  | 
(prefix + ('base',), 'base\n')],  | 
|
1461  | 
origins)  | 
|
1462  | 
        # a merge
 | 
|
1463  | 
origins = files.annotate(prefix + ('merged',))  | 
|
1464  | 
if self.graph:  | 
|
1465  | 
self.assertEqual([  | 
|
1466  | 
(prefix + ('base',), 'base\n'),  | 
|
1467  | 
(prefix + ('left',), 'left\n'),  | 
|
1468  | 
(prefix + ('right',), 'right\n'),  | 
|
1469  | 
(prefix + ('merged',), 'merged\n')  | 
|
1470  | 
                ],
 | 
|
1471  | 
origins)  | 
|
1472  | 
else:  | 
|
1473  | 
            # Without a graph everything is new.
 | 
|
1474  | 
self.assertEqual([  | 
|
1475  | 
(prefix + ('merged',), 'base\n'),  | 
|
1476  | 
(prefix + ('merged',), 'left\n'),  | 
|
1477  | 
(prefix + ('merged',), 'right\n'),  | 
|
1478  | 
(prefix + ('merged',), 'merged\n')  | 
|
1479  | 
                ],
 | 
|
1480  | 
origins)  | 
|
1481  | 
self.assertRaises(RevisionNotPresent,  | 
|
1482  | 
files.annotate, prefix + ('missing-key',))  | 
|
1483  | 
||
| 
3350.6.2
by Robert Collins
 Prepare parameterised test environment.  | 
1484  | 
def test_construct(self):  | 
1485  | 
"""Each parameterised test can be constructed on a transport."""  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1486  | 
files = self.get_versionedfiles()  | 
1487  | 
||
1488  | 
def get_diamond_files(self, files, trailing_eol=True, left_only=False):  | 
|
1489  | 
return get_diamond_files(files, self.key_length,  | 
|
1490  | 
trailing_eol=trailing_eol, nograph=not self.graph,  | 
|
1491  | 
left_only=left_only)  | 
|
1492  | 
||
1493  | 
def test_add_lines_return(self):  | 
|
1494  | 
files = self.get_versionedfiles()  | 
|
1495  | 
        # save code by using the stock data insertion helper.
 | 
|
1496  | 
adds = self.get_diamond_files(files)  | 
|
1497  | 
results = []  | 
|
1498  | 
        # We can only validate the first 2 elements returned from add_lines.
 | 
|
1499  | 
for add in adds:  | 
|
1500  | 
self.assertEqual(3, len(add))  | 
|
1501  | 
results.append(add[:2])  | 
|
1502  | 
if self.key_length == 1:  | 
|
1503  | 
self.assertEqual([  | 
|
1504  | 
('00e364d235126be43292ab09cb4686cf703ddc17', 7),  | 
|
1505  | 
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),  | 
|
1506  | 
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),  | 
|
1507  | 
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),  | 
|
1508  | 
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],  | 
|
1509  | 
results)  | 
|
1510  | 
elif self.key_length == 2:  | 
|
1511  | 
self.assertEqual([  | 
|
1512  | 
('00e364d235126be43292ab09cb4686cf703ddc17', 7),  | 
|
1513  | 
('00e364d235126be43292ab09cb4686cf703ddc17', 7),  | 
|
1514  | 
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),  | 
|
1515  | 
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),  | 
|
1516  | 
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),  | 
|
1517  | 
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),  | 
|
1518  | 
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),  | 
|
1519  | 
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),  | 
|
1520  | 
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),  | 
|
1521  | 
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],  | 
|
1522  | 
results)  | 
|
1523  | 
||
1524  | 
def test_empty_lines(self):  | 
|
1525  | 
"""Empty files can be stored."""  | 
|
1526  | 
f = self.get_versionedfiles()  | 
|
1527  | 
key_a = self.get_simple_key('a')  | 
|
1528  | 
f.add_lines(key_a, [], [])  | 
|
1529  | 
self.assertEqual('',  | 
|
1530  | 
f.get_record_stream([key_a], 'unordered', True  | 
|
1531  | 
).next().get_bytes_as('fulltext'))  | 
|
1532  | 
key_b = self.get_simple_key('b')  | 
|
1533  | 
f.add_lines(key_b, self.get_parents([key_a]), [])  | 
|
1534  | 
self.assertEqual('',  | 
|
1535  | 
f.get_record_stream([key_b], 'unordered', True  | 
|
1536  | 
).next().get_bytes_as('fulltext'))  | 
|
1537  | 
||
1538  | 
def test_newline_only(self):  | 
|
1539  | 
f = self.get_versionedfiles()  | 
|
1540  | 
key_a = self.get_simple_key('a')  | 
|
1541  | 
f.add_lines(key_a, [], ['\n'])  | 
|
1542  | 
self.assertEqual('\n',  | 
|
1543  | 
f.get_record_stream([key_a], 'unordered', True  | 
|
1544  | 
).next().get_bytes_as('fulltext'))  | 
|
1545  | 
key_b = self.get_simple_key('b')  | 
|
1546  | 
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])  | 
|
1547  | 
self.assertEqual('\n',  | 
|
1548  | 
f.get_record_stream([key_b], 'unordered', True  | 
|
1549  | 
).next().get_bytes_as('fulltext'))  | 
|
1550  | 
||
1551  | 
def test_get_record_stream_empty(self):  | 
|
1552  | 
"""An empty stream can be requested without error."""  | 
|
1553  | 
f = self.get_versionedfiles()  | 
|
1554  | 
entries = f.get_record_stream([], 'unordered', False)  | 
|
1555  | 
self.assertEqual([], list(entries))  | 
|
1556  | 
||
1557  | 
def assertValidStorageKind(self, storage_kind):  | 
|
1558  | 
"""Assert that storage_kind is a valid storage_kind."""  | 
|
1559  | 
self.assertSubset([storage_kind],  | 
|
1560  | 
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',  | 
|
1561  | 
'knit-ft', 'knit-delta', 'fulltext', 'knit-annotated-ft-gz',  | 
|
1562  | 
'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'])  | 
|
1563  | 
||
1564  | 
def capture_stream(self, f, entries, on_seen, parents):  | 
|
1565  | 
"""Capture a stream for testing."""  | 
|
1566  | 
for factory in entries:  | 
|
1567  | 
on_seen(factory.key)  | 
|
1568  | 
self.assertValidStorageKind(factory.storage_kind)  | 
|
| 
3350.8.3
by Robert Collins
 VF.get_sha1s needed changing to be stackable.  | 
1569  | 
self.assertEqual(f.get_sha1s([factory.key])[factory.key],  | 
1570  | 
factory.sha1)  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1571  | 
self.assertEqual(parents[factory.key], factory.parents)  | 
1572  | 
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),  | 
|
1573  | 
str)  | 
|
1574  | 
||
1575  | 
def test_get_record_stream_interface(self):  | 
|
1576  | 
"""each item in a stream has to provide a regular interface."""  | 
|
1577  | 
files = self.get_versionedfiles()  | 
|
1578  | 
self.get_diamond_files(files)  | 
|
1579  | 
keys, _ = self.get_keys_and_sort_order()  | 
|
1580  | 
parent_map = files.get_parent_map(keys)  | 
|
1581  | 
entries = files.get_record_stream(keys, 'unordered', False)  | 
|
1582  | 
seen = set()  | 
|
1583  | 
self.capture_stream(files, entries, seen.add, parent_map)  | 
|
1584  | 
self.assertEqual(set(keys), seen)  | 
|
1585  | 
||
1586  | 
def get_simple_key(self, suffix):  | 
|
1587  | 
"""Return a key for the object under test."""  | 
|
1588  | 
if self.key_length == 1:  | 
|
1589  | 
return (suffix,)  | 
|
1590  | 
else:  | 
|
1591  | 
return ('FileA',) + (suffix,)  | 
|
1592  | 
||
1593  | 
def get_keys_and_sort_order(self):  | 
|
1594  | 
"""Get diamond test keys list, and their sort ordering."""  | 
|
1595  | 
if self.key_length == 1:  | 
|
1596  | 
keys = [('merged',), ('left',), ('right',), ('base',)]  | 
|
1597  | 
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}  | 
|
1598  | 
else:  | 
|
1599  | 
keys = [  | 
|
1600  | 
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),  | 
|
1601  | 
('FileA', 'base'),  | 
|
1602  | 
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),  | 
|
1603  | 
('FileB', 'base'),  | 
|
1604  | 
                ]
 | 
|
1605  | 
sort_order = {  | 
|
1606  | 
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,  | 
|
1607  | 
('FileA', 'base'):0,  | 
|
1608  | 
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,  | 
|
1609  | 
('FileB', 'base'):0,  | 
|
1610  | 
                }
 | 
|
1611  | 
return keys, sort_order  | 
|
1612  | 
||
1613  | 
def test_get_record_stream_interface_ordered(self):  | 
|
1614  | 
"""each item in a stream has to provide a regular interface."""  | 
|
1615  | 
files = self.get_versionedfiles()  | 
|
1616  | 
self.get_diamond_files(files)  | 
|
1617  | 
keys, sort_order = self.get_keys_and_sort_order()  | 
|
1618  | 
parent_map = files.get_parent_map(keys)  | 
|
1619  | 
entries = files.get_record_stream(keys, 'topological', False)  | 
|
1620  | 
seen = []  | 
|
1621  | 
self.capture_stream(files, entries, seen.append, parent_map)  | 
|
1622  | 
self.assertStreamOrder(sort_order, seen, keys)  | 
|
1623  | 
||
1624  | 
def test_get_record_stream_interface_ordered_with_delta_closure(self):  | 
|
1625  | 
"""each item must be accessible as a fulltext."""  | 
|
1626  | 
files = self.get_versionedfiles()  | 
|
1627  | 
self.get_diamond_files(files)  | 
|
1628  | 
keys, sort_order = self.get_keys_and_sort_order()  | 
|
1629  | 
parent_map = files.get_parent_map(keys)  | 
|
1630  | 
entries = files.get_record_stream(keys, 'topological', True)  | 
|
1631  | 
seen = []  | 
|
1632  | 
for factory in entries:  | 
|
1633  | 
seen.append(factory.key)  | 
|
1634  | 
self.assertValidStorageKind(factory.storage_kind)  | 
|
| 
3350.8.3
by Robert Collins
 VF.get_sha1s needed changing to be stackable.  | 
1635  | 
self.assertSubset([factory.sha1],  | 
1636  | 
[None, files.get_sha1s([factory.key])[factory.key]])  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1637  | 
self.assertEqual(parent_map[factory.key], factory.parents)  | 
1638  | 
            # self.assertEqual(files.get_text(factory.key),
 | 
|
1639  | 
self.assertIsInstance(factory.get_bytes_as('fulltext'), str)  | 
|
1640  | 
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),  | 
|
1641  | 
str)  | 
|
1642  | 
self.assertStreamOrder(sort_order, seen, keys)  | 
|
1643  | 
||
1644  | 
def assertStreamOrder(self, sort_order, seen, keys):  | 
|
1645  | 
self.assertEqual(len(set(seen)), len(keys))  | 
|
1646  | 
if self.key_length == 1:  | 
|
1647  | 
lows = {():0}  | 
|
1648  | 
else:  | 
|
1649  | 
lows = {('FileA',):0, ('FileB',):0}  | 
|
1650  | 
if not self.graph:  | 
|
1651  | 
self.assertEqual(set(keys), set(seen))  | 
|
1652  | 
else:  | 
|
1653  | 
for key in seen:  | 
|
1654  | 
sort_pos = sort_order[key]  | 
|
1655  | 
self.assertTrue(sort_pos >= lows[key[:-1]],  | 
|
1656  | 
"Out of order in sorted stream: %r, %r" % (key, seen))  | 
|
1657  | 
lows[key[:-1]] = sort_pos  | 
|
1658  | 
||
1659  | 
def test_get_record_stream_unknown_storage_kind_raises(self):  | 
|
1660  | 
"""Asking for a storage kind that the stream cannot supply raises."""  | 
|
1661  | 
files = self.get_versionedfiles()  | 
|
1662  | 
self.get_diamond_files(files)  | 
|
1663  | 
if self.key_length == 1:  | 
|
1664  | 
keys = [('merged',), ('left',), ('right',), ('base',)]  | 
|
1665  | 
else:  | 
|
1666  | 
keys = [  | 
|
1667  | 
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),  | 
|
1668  | 
('FileA', 'base'),  | 
|
1669  | 
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),  | 
|
1670  | 
('FileB', 'base'),  | 
|
1671  | 
                ]
 | 
|
1672  | 
parent_map = files.get_parent_map(keys)  | 
|
1673  | 
entries = files.get_record_stream(keys, 'unordered', False)  | 
|
1674  | 
        # We track the contents because we should be able to try, fail a
 | 
|
1675  | 
        # particular kind and then ask for one that works and continue.
 | 
|
1676  | 
seen = set()  | 
|
1677  | 
for factory in entries:  | 
|
1678  | 
seen.add(factory.key)  | 
|
1679  | 
self.assertValidStorageKind(factory.storage_kind)  | 
|
| 
3350.8.3
by Robert Collins
 VF.get_sha1s needed changing to be stackable.  | 
1680  | 
self.assertEqual(files.get_sha1s([factory.key])[factory.key],  | 
1681  | 
factory.sha1)  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1682  | 
self.assertEqual(parent_map[factory.key], factory.parents)  | 
1683  | 
            # currently no stream emits mpdiff
 | 
|
1684  | 
self.assertRaises(errors.UnavailableRepresentation,  | 
|
1685  | 
factory.get_bytes_as, 'mpdiff')  | 
|
1686  | 
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),  | 
|
1687  | 
str)  | 
|
1688  | 
self.assertEqual(set(keys), seen)  | 
|
1689  | 
||
1690  | 
def test_get_record_stream_missing_records_are_absent(self):  | 
|
1691  | 
files = self.get_versionedfiles()  | 
|
1692  | 
self.get_diamond_files(files)  | 
|
1693  | 
if self.key_length == 1:  | 
|
1694  | 
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]  | 
|
1695  | 
else:  | 
|
1696  | 
keys = [  | 
|
1697  | 
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),  | 
|
1698  | 
('FileA', 'absent'), ('FileA', 'base'),  | 
|
1699  | 
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),  | 
|
1700  | 
('FileB', 'absent'), ('FileB', 'base'),  | 
|
1701  | 
('absent', 'absent'),  | 
|
1702  | 
                ]
 | 
|
1703  | 
parent_map = files.get_parent_map(keys)  | 
|
1704  | 
entries = files.get_record_stream(keys, 'unordered', False)  | 
|
1705  | 
self.assertAbsentRecord(files, keys, parent_map, entries)  | 
|
1706  | 
entries = files.get_record_stream(keys, 'topological', False)  | 
|
1707  | 
self.assertAbsentRecord(files, keys, parent_map, entries)  | 
|
1708  | 
||
1709  | 
def assertAbsentRecord(self, files, keys, parents, entries):  | 
|
1710  | 
"""Helper for test_get_record_stream_missing_records_are_absent."""  | 
|
1711  | 
seen = set()  | 
|
1712  | 
for factory in entries:  | 
|
1713  | 
seen.add(factory.key)  | 
|
1714  | 
if factory.key[-1] == 'absent':  | 
|
1715  | 
self.assertEqual('absent', factory.storage_kind)  | 
|
1716  | 
self.assertEqual(None, factory.sha1)  | 
|
1717  | 
self.assertEqual(None, factory.parents)  | 
|
1718  | 
else:  | 
|
1719  | 
self.assertValidStorageKind(factory.storage_kind)  | 
|
| 
3350.8.3
by Robert Collins
 VF.get_sha1s needed changing to be stackable.  | 
1720  | 
self.assertEqual(files.get_sha1s([factory.key])[factory.key],  | 
1721  | 
factory.sha1)  | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1722  | 
self.assertEqual(parents[factory.key], factory.parents)  | 
1723  | 
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),  | 
|
1724  | 
str)  | 
|
1725  | 
self.assertEqual(set(keys), seen)  | 
|
1726  | 
||
1727  | 
def test_filter_absent_records(self):  | 
|
1728  | 
"""Requested missing records can be filter trivially."""  | 
|
1729  | 
files = self.get_versionedfiles()  | 
|
1730  | 
self.get_diamond_files(files)  | 
|
1731  | 
keys, _ = self.get_keys_and_sort_order()  | 
|
1732  | 
parent_map = files.get_parent_map(keys)  | 
|
1733  | 
        # Add an absent record in the middle of the present keys. (We don't ask
 | 
|
1734  | 
        # for just absent keys to ensure that content before and after the
 | 
|
1735  | 
        # absent keys is still delivered).
 | 
|
1736  | 
present_keys = list(keys)  | 
|
1737  | 
if self.key_length == 1:  | 
|
1738  | 
keys.insert(2, ('extra',))  | 
|
1739  | 
else:  | 
|
1740  | 
keys.insert(2, ('extra', 'extra'))  | 
|
1741  | 
entries = files.get_record_stream(keys, 'unordered', False)  | 
|
1742  | 
seen = set()  | 
|
1743  | 
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,  | 
|
1744  | 
parent_map)  | 
|
1745  | 
self.assertEqual(set(present_keys), seen)  | 
|
1746  | 
||
1747  | 
def get_mapper(self):  | 
|
1748  | 
"""Get a mapper suitable for the key length of the test interface."""  | 
|
1749  | 
if self.key_length == 1:  | 
|
1750  | 
return ConstantMapper('source')  | 
|
1751  | 
else:  | 
|
1752  | 
return HashEscapedPrefixMapper()  | 
|
1753  | 
||
1754  | 
def get_parents(self, parents):  | 
|
1755  | 
"""Get parents, taking self.graph into consideration."""  | 
|
1756  | 
if self.graph:  | 
|
1757  | 
return parents  | 
|
1758  | 
else:  | 
|
1759  | 
return None  | 
|
1760  | 
||
1761  | 
def test_get_parent_map(self):  | 
|
1762  | 
files = self.get_versionedfiles()  | 
|
1763  | 
if self.key_length == 1:  | 
|
1764  | 
parent_details = [  | 
|
1765  | 
(('r0',), self.get_parents(())),  | 
|
1766  | 
(('r1',), self.get_parents((('r0',),))),  | 
|
1767  | 
(('r2',), self.get_parents(())),  | 
|
1768  | 
(('r3',), self.get_parents(())),  | 
|
1769  | 
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),  | 
|
1770  | 
                ]
 | 
|
1771  | 
else:  | 
|
1772  | 
parent_details = [  | 
|
1773  | 
(('FileA', 'r0'), self.get_parents(())),  | 
|
1774  | 
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),  | 
|
1775  | 
(('FileA', 'r2'), self.get_parents(())),  | 
|
1776  | 
(('FileA', 'r3'), self.get_parents(())),  | 
|
1777  | 
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),  | 
|
1778  | 
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),  | 
|
1779  | 
                ]
 | 
|
1780  | 
for key, parents in parent_details:  | 
|
1781  | 
files.add_lines(key, parents, [])  | 
|
1782  | 
            # immediately after adding it should be queryable.
 | 
|
1783  | 
self.assertEqual({key:parents}, files.get_parent_map([key]))  | 
|
1784  | 
        # We can ask for an empty set
 | 
|
1785  | 
self.assertEqual({}, files.get_parent_map([]))  | 
|
1786  | 
        # We can ask for many keys
 | 
|
1787  | 
all_parents = dict(parent_details)  | 
|
1788  | 
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))  | 
|
1789  | 
        # Absent keys are just not included in the result.
 | 
|
1790  | 
keys = all_parents.keys()  | 
|
1791  | 
if self.key_length == 1:  | 
|
1792  | 
keys.insert(1, ('missing',))  | 
|
1793  | 
else:  | 
|
1794  | 
keys.insert(1, ('missing', 'missing'))  | 
|
1795  | 
        # Absent keys are just ignored
 | 
|
1796  | 
self.assertEqual(all_parents, files.get_parent_map(keys))  | 
|
1797  | 
||
1798  | 
def test_get_sha1s(self):  | 
|
1799  | 
files = self.get_versionedfiles()  | 
|
1800  | 
self.get_diamond_files(files)  | 
|
1801  | 
if self.key_length == 1:  | 
|
1802  | 
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]  | 
|
1803  | 
else:  | 
|
1804  | 
            # ask for shas from different prefixes.
 | 
|
1805  | 
keys = [  | 
|
1806  | 
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),  | 
|
1807  | 
('FileA', 'merged'), ('FileB', 'right'),  | 
|
1808  | 
                ]
 | 
|
| 
3350.8.3
by Robert Collins
 VF.get_sha1s needed changing to be stackable.  | 
1809  | 
self.assertEqual({  | 
1810  | 
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',  | 
|
1811  | 
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',  | 
|
1812  | 
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',  | 
|
1813  | 
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',  | 
|
1814  | 
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',  | 
|
1815  | 
            },
 | 
|
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
1816  | 
files.get_sha1s(keys))  | 
1817  | 
||
1818  | 
def test_insert_record_stream_empty(self):  | 
|
1819  | 
"""Inserting an empty record stream should work."""  | 
|
1820  | 
files = self.get_versionedfiles()  | 
|
1821  | 
files.insert_record_stream([])  | 
|
1822  | 
||
1823  | 
def assertIdenticalVersionedFile(self, expected, actual):  | 
|
1824  | 
"""Assert that left and right have the same contents."""  | 
|
1825  | 
self.assertEqual(set(actual.keys()), set(expected.keys()))  | 
|
1826  | 
actual_parents = actual.get_parent_map(actual.keys())  | 
|
1827  | 
if self.graph:  | 
|
1828  | 
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))  | 
|
1829  | 
else:  | 
|
1830  | 
for key, parents in actual_parents.items():  | 
|
1831  | 
self.assertEqual(None, parents)  | 
|
1832  | 
for key in actual.keys():  | 
|
1833  | 
actual_text = actual.get_record_stream(  | 
|
1834  | 
[key], 'unordered', True).next().get_bytes_as('fulltext')  | 
|
1835  | 
expected_text = expected.get_record_stream(  | 
|
1836  | 
[key], 'unordered', True).next().get_bytes_as('fulltext')  | 
|
1837  | 
self.assertEqual(actual_text, expected_text)  | 
|
1838  | 
||
1839  | 
def test_insert_record_stream_fulltexts(self):  | 
|
1840  | 
"""Any file should accept a stream of fulltexts."""  | 
|
1841  | 
files = self.get_versionedfiles()  | 
|
1842  | 
mapper = self.get_mapper()  | 
|
1843  | 
source_transport = self.get_transport('source')  | 
|
1844  | 
source_transport.mkdir('.')  | 
|
1845  | 
        # weaves always output fulltexts.
 | 
|
1846  | 
source = make_versioned_files_factory(WeaveFile, mapper)(  | 
|
1847  | 
source_transport)  | 
|
1848  | 
self.get_diamond_files(source, trailing_eol=False)  | 
|
1849  | 
stream = source.get_record_stream(source.keys(), 'topological',  | 
|
1850  | 
False)  | 
|
1851  | 
files.insert_record_stream(stream)  | 
|
1852  | 
self.assertIdenticalVersionedFile(source, files)  | 
|
1853  | 
||
1854  | 
def test_insert_record_stream_fulltexts_noeol(self):  | 
|
1855  | 
"""Any file should accept a stream of fulltexts."""  | 
|
1856  | 
files = self.get_versionedfiles()  | 
|
1857  | 
mapper = self.get_mapper()  | 
|
1858  | 
source_transport = self.get_transport('source')  | 
|
1859  | 
source_transport.mkdir('.')  | 
|
1860  | 
        # weaves always output fulltexts.
 | 
|
1861  | 
source = make_versioned_files_factory(WeaveFile, mapper)(  | 
|
1862  | 
source_transport)  | 
|
1863  | 
self.get_diamond_files(source, trailing_eol=False)  | 
|
1864  | 
stream = source.get_record_stream(source.keys(), 'topological',  | 
|
1865  | 
False)  | 
|
1866  | 
files.insert_record_stream(stream)  | 
|
1867  | 
self.assertIdenticalVersionedFile(source, files)  | 
|
1868  | 
||
1869  | 
def test_insert_record_stream_annotated_knits(self):  | 
|
1870  | 
"""Any file should accept a stream from plain knits."""  | 
|
1871  | 
files = self.get_versionedfiles()  | 
|
1872  | 
mapper = self.get_mapper()  | 
|
1873  | 
source_transport = self.get_transport('source')  | 
|
1874  | 
source_transport.mkdir('.')  | 
|
1875  | 
source = make_file_factory(True, mapper)(source_transport)  | 
|
1876  | 
self.get_diamond_files(source)  | 
|
1877  | 
stream = source.get_record_stream(source.keys(), 'topological',  | 
|
1878  | 
False)  | 
|
1879  | 
files.insert_record_stream(stream)  | 
|
1880  | 
self.assertIdenticalVersionedFile(source, files)  | 
|
1881  | 
||
1882  | 
def test_insert_record_stream_annotated_knits_noeol(self):  | 
|
1883  | 
"""Any file should accept a stream from plain knits."""  | 
|
1884  | 
files = self.get_versionedfiles()  | 
|
1885  | 
mapper = self.get_mapper()  | 
|
1886  | 
source_transport = self.get_transport('source')  | 
|
1887  | 
source_transport.mkdir('.')  | 
|
1888  | 
source = make_file_factory(True, mapper)(source_transport)  | 
|
1889  | 
self.get_diamond_files(source, trailing_eol=False)  | 
|
1890  | 
stream = source.get_record_stream(source.keys(), 'topological',  | 
|
1891  | 
False)  | 
|
1892  | 
files.insert_record_stream(stream)  | 
|
1893  | 
self.assertIdenticalVersionedFile(source, files)  | 
|
1894  | 
||
1895  | 
def test_insert_record_stream_plain_knits(self):  | 
|
1896  | 
"""Any file should accept a stream from plain knits."""  | 
|
1897  | 
files = self.get_versionedfiles()  | 
|
1898  | 
mapper = self.get_mapper()  | 
|
1899  | 
source_transport = self.get_transport('source')  | 
|
1900  | 
source_transport.mkdir('.')  | 
|
1901  | 
source = make_file_factory(False, mapper)(source_transport)  | 
|
1902  | 
self.get_diamond_files(source)  | 
|
1903  | 
stream = source.get_record_stream(source.keys(), 'topological',  | 
|
1904  | 
False)  | 
|
1905  | 
files.insert_record_stream(stream)  | 
|
1906  | 
self.assertIdenticalVersionedFile(source, files)  | 
|
1907  | 
||
1908  | 
def test_insert_record_stream_plain_knits_noeol(self):  | 
|
1909  | 
"""Any file should accept a stream from plain knits."""  | 
|
1910  | 
files = self.get_versionedfiles()  | 
|
1911  | 
mapper = self.get_mapper()  | 
|
1912  | 
source_transport = self.get_transport('source')  | 
|
1913  | 
source_transport.mkdir('.')  | 
|
1914  | 
source = make_file_factory(False, mapper)(source_transport)  | 
|
1915  | 
self.get_diamond_files(source, trailing_eol=False)  | 
|
1916  | 
stream = source.get_record_stream(source.keys(), 'topological',  | 
|
1917  | 
False)  | 
|
1918  | 
files.insert_record_stream(stream)  | 
|
1919  | 
self.assertIdenticalVersionedFile(source, files)  | 
|
1920  | 
||
1921  | 
def test_insert_record_stream_existing_keys(self):  | 
|
1922  | 
"""Inserting keys already in a file should not error."""  | 
|
1923  | 
files = self.get_versionedfiles()  | 
|
1924  | 
source = self.get_versionedfiles('source')  | 
|
1925  | 
self.get_diamond_files(source)  | 
|
1926  | 
        # insert some keys into f.
 | 
|
1927  | 
self.get_diamond_files(files, left_only=True)  | 
|
1928  | 
stream = source.get_record_stream(source.keys(), 'topological',  | 
|
1929  | 
False)  | 
|
1930  | 
files.insert_record_stream(stream)  | 
|
1931  | 
self.assertIdenticalVersionedFile(source, files)  | 
|
1932  | 
||
1933  | 
def test_insert_record_stream_missing_keys(self):  | 
|
1934  | 
"""Inserting a stream with absent keys should raise an error."""  | 
|
1935  | 
files = self.get_versionedfiles()  | 
|
1936  | 
source = self.get_versionedfiles('source')  | 
|
1937  | 
stream = source.get_record_stream([('missing',) * self.key_length],  | 
|
1938  | 
'topological', False)  | 
|
1939  | 
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,  | 
|
1940  | 
stream)  | 
|
1941  | 
||
1942  | 
def test_insert_record_stream_out_of_order(self):  | 
|
1943  | 
"""An out of order stream can either error or work."""  | 
|
1944  | 
files = self.get_versionedfiles()  | 
|
1945  | 
source = self.get_versionedfiles('source')  | 
|
1946  | 
self.get_diamond_files(source)  | 
|
1947  | 
if self.key_length == 1:  | 
|
1948  | 
origin_keys = [('origin',)]  | 
|
1949  | 
end_keys = [('merged',), ('left',)]  | 
|
1950  | 
start_keys = [('right',), ('base',)]  | 
|
1951  | 
else:  | 
|
1952  | 
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]  | 
|
1953  | 
end_keys = [('FileA', 'merged',), ('FileA', 'left',),  | 
|
1954  | 
('FileB', 'merged',), ('FileB', 'left',)]  | 
|
1955  | 
start_keys = [('FileA', 'right',), ('FileA', 'base',),  | 
|
1956  | 
('FileB', 'right',), ('FileB', 'base',)]  | 
|
1957  | 
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)  | 
|
1958  | 
end_entries = source.get_record_stream(end_keys, 'topological', False)  | 
|
1959  | 
start_entries = source.get_record_stream(start_keys, 'topological', False)  | 
|
1960  | 
entries = chain(origin_entries, end_entries, start_entries)  | 
|
1961  | 
try:  | 
|
1962  | 
files.insert_record_stream(entries)  | 
|
1963  | 
except RevisionNotPresent:  | 
|
1964  | 
            # Must not have corrupted the file.
 | 
|
1965  | 
files.check()  | 
|
1966  | 
else:  | 
|
1967  | 
self.assertIdenticalVersionedFile(source, files)  | 
|
1968  | 
||
1969  | 
def test_insert_record_stream_delta_missing_basis_no_corruption(self):  | 
|
1970  | 
"""Insertion where a needed basis is not included aborts safely."""  | 
|
1971  | 
        # We use a knit always here to be sure we are getting a binary delta.
 | 
|
1972  | 
mapper = self.get_mapper()  | 
|
1973  | 
source_transport = self.get_transport('source')  | 
|
1974  | 
source_transport.mkdir('.')  | 
|
1975  | 
source = make_file_factory(False, mapper)(source_transport)  | 
|
1976  | 
self.get_diamond_files(source)  | 
|
1977  | 
entries = source.get_record_stream(['origin', 'merged'], 'unordered', False)  | 
|
1978  | 
files = self.get_versionedfiles()  | 
|
1979  | 
self.assertRaises(RevisionNotPresent, files.insert_record_stream,  | 
|
1980  | 
entries)  | 
|
1981  | 
files.check()  | 
|
1982  | 
self.assertEqual({}, files.get_parent_map([]))  | 
|
1983  | 
||
1984  | 
def test_iter_lines_added_or_present_in_keys(self):  | 
|
1985  | 
        # test that we get at least an equalset of the lines added by
 | 
|
1986  | 
        # versions in the store.
 | 
|
1987  | 
        # the ordering here is to make a tree so that dumb searches have
 | 
|
1988  | 
        # more changes to muck up.
 | 
|
1989  | 
||
1990  | 
class InstrumentedProgress(progress.DummyProgress):  | 
|
1991  | 
||
1992  | 
def __init__(self):  | 
|
1993  | 
||
1994  | 
progress.DummyProgress.__init__(self)  | 
|
1995  | 
self.updates = []  | 
|
1996  | 
||
1997  | 
def update(self, msg=None, current=None, total=None):  | 
|
1998  | 
self.updates.append((msg, current, total))  | 
|
1999  | 
||
2000  | 
files = self.get_versionedfiles()  | 
|
2001  | 
        # add a base to get included
 | 
|
2002  | 
files.add_lines(self.get_simple_key('base'), (), ['base\n'])  | 
|
2003  | 
        # add a ancestor to be included on one side
 | 
|
2004  | 
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])  | 
|
2005  | 
        # add a ancestor to be included on the other side
 | 
|
2006  | 
files.add_lines(self.get_simple_key('rancestor'),  | 
|
2007  | 
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])  | 
|
2008  | 
        # add a child of rancestor with no eofile-nl
 | 
|
2009  | 
files.add_lines(self.get_simple_key('child'),  | 
|
2010  | 
self.get_parents([self.get_simple_key('rancestor')]),  | 
|
2011  | 
['base\n', 'child\n'])  | 
|
2012  | 
        # add a child of lancestor and base to join the two roots
 | 
|
2013  | 
files.add_lines(self.get_simple_key('otherchild'),  | 
|
2014  | 
self.get_parents([self.get_simple_key('lancestor'),  | 
|
2015  | 
self.get_simple_key('base')]),  | 
|
2016  | 
['base\n', 'lancestor\n', 'otherchild\n'])  | 
|
2017  | 
def iter_with_keys(keys, expected):  | 
|
2018  | 
            # now we need to see what lines are returned, and how often.
 | 
|
2019  | 
lines = {}  | 
|
2020  | 
progress = InstrumentedProgress()  | 
|
2021  | 
            # iterate over the lines
 | 
|
2022  | 
for line in files.iter_lines_added_or_present_in_keys(keys,  | 
|
2023  | 
pb=progress):  | 
|
2024  | 
lines.setdefault(line, 0)  | 
|
2025  | 
lines[line] += 1  | 
|
2026  | 
if []!= progress.updates:  | 
|
2027  | 
self.assertEqual(expected, progress.updates)  | 
|
2028  | 
return lines  | 
|
2029  | 
lines = iter_with_keys(  | 
|
2030  | 
[self.get_simple_key('child'), self.get_simple_key('otherchild')],  | 
|
2031  | 
[('Walking content.', 0, 2),  | 
|
2032  | 
('Walking content.', 1, 2),  | 
|
2033  | 
('Walking content.', 2, 2)])  | 
|
2034  | 
        # we must see child and otherchild
 | 
|
2035  | 
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)  | 
|
2036  | 
self.assertTrue(  | 
|
2037  | 
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)  | 
|
2038  | 
        # we dont care if we got more than that.
 | 
|
2039  | 
||
2040  | 
        # test all lines
 | 
|
2041  | 
lines = iter_with_keys(files.keys(),  | 
|
2042  | 
[('Walking content.', 0, 5),  | 
|
2043  | 
('Walking content.', 1, 5),  | 
|
2044  | 
('Walking content.', 2, 5),  | 
|
2045  | 
('Walking content.', 3, 5),  | 
|
2046  | 
('Walking content.', 4, 5),  | 
|
2047  | 
('Walking content.', 5, 5)])  | 
|
2048  | 
        # all lines must be seen at least once
 | 
|
2049  | 
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)  | 
|
2050  | 
self.assertTrue(  | 
|
2051  | 
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)  | 
|
2052  | 
self.assertTrue(  | 
|
2053  | 
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)  | 
|
2054  | 
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)  | 
|
2055  | 
self.assertTrue(  | 
|
2056  | 
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)  | 
|
2057  | 
||
2058  | 
def test_make_mpdiffs(self):  | 
|
2059  | 
from bzrlib import multiparent  | 
|
2060  | 
files = self.get_versionedfiles('source')  | 
|
2061  | 
        # add texts that should trip the knit maximum delta chain threshold
 | 
|
2062  | 
        # as well as doing parallel chains of data in knits.
 | 
|
2063  | 
        # this is done by two chains of 25 insertions
 | 
|
2064  | 
files.add_lines(self.get_simple_key('base'), [], ['line\n'])  | 
|
2065  | 
files.add_lines(self.get_simple_key('noeol'),  | 
|
2066  | 
self.get_parents([self.get_simple_key('base')]), ['line'])  | 
|
2067  | 
        # detailed eol tests:
 | 
|
2068  | 
        # shared last line with parent no-eol
 | 
|
2069  | 
files.add_lines(self.get_simple_key('noeolsecond'),  | 
|
2070  | 
self.get_parents([self.get_simple_key('noeol')]),  | 
|
2071  | 
['line\n', 'line'])  | 
|
2072  | 
        # differing last line with parent, both no-eol
 | 
|
2073  | 
files.add_lines(self.get_simple_key('noeolnotshared'),  | 
|
2074  | 
self.get_parents([self.get_simple_key('noeolsecond')]),  | 
|
2075  | 
['line\n', 'phone'])  | 
|
2076  | 
        # add eol following a noneol parent, change content
 | 
|
2077  | 
files.add_lines(self.get_simple_key('eol'),  | 
|
2078  | 
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])  | 
|
2079  | 
        # add eol following a noneol parent, no change content
 | 
|
2080  | 
files.add_lines(self.get_simple_key('eolline'),  | 
|
2081  | 
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])  | 
|
2082  | 
        # noeol with no parents:
 | 
|
2083  | 
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])  | 
|
2084  | 
        # noeol preceeding its leftmost parent in the output:
 | 
|
2085  | 
        # this is done by making it a merge of two parents with no common
 | 
|
2086  | 
        # anestry: noeolbase and noeol with the 
 | 
|
2087  | 
        # later-inserted parent the leftmost.
 | 
|
2088  | 
files.add_lines(self.get_simple_key('eolbeforefirstparent'),  | 
|
2089  | 
self.get_parents([self.get_simple_key('noeolbase'),  | 
|
2090  | 
self.get_simple_key('noeol')]),  | 
|
2091  | 
['line'])  | 
|
2092  | 
        # two identical eol texts
 | 
|
2093  | 
files.add_lines(self.get_simple_key('noeoldup'),  | 
|
2094  | 
self.get_parents([self.get_simple_key('noeol')]), ['line'])  | 
|
2095  | 
next_parent = self.get_simple_key('base')  | 
|
2096  | 
text_name = 'chain1-'  | 
|
2097  | 
text = ['line\n']  | 
|
2098  | 
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',  | 
|
2099  | 
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',  | 
|
2100  | 
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',  | 
|
2101  | 
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',  | 
|
2102  | 
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',  | 
|
2103  | 
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',  | 
|
2104  | 
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',  | 
|
2105  | 
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',  | 
|
2106  | 
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',  | 
|
2107  | 
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',  | 
|
2108  | 
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',  | 
|
2109  | 
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',  | 
|
2110  | 
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',  | 
|
2111  | 
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',  | 
|
2112  | 
14:'2c4b1736566b8ca6051e668de68650686a3922f2',  | 
|
2113  | 
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',  | 
|
2114  | 
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',  | 
|
2115  | 
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',  | 
|
2116  | 
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',  | 
|
2117  | 
19:'1ebed371807ba5935958ad0884595126e8c4e823',  | 
|
2118  | 
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',  | 
|
2119  | 
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',  | 
|
2120  | 
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',  | 
|
2121  | 
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',  | 
|
2122  | 
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',  | 
|
2123  | 
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',  | 
|
2124  | 
                 }
 | 
|
2125  | 
for depth in range(26):  | 
|
2126  | 
new_version = self.get_simple_key(text_name + '%s' % depth)  | 
|
2127  | 
text = text + ['line\n']  | 
|
2128  | 
files.add_lines(new_version, self.get_parents([next_parent]), text)  | 
|
2129  | 
next_parent = new_version  | 
|
2130  | 
next_parent = self.get_simple_key('base')  | 
|
2131  | 
text_name = 'chain2-'  | 
|
2132  | 
text = ['line\n']  | 
|
2133  | 
for depth in range(26):  | 
|
2134  | 
new_version = self.get_simple_key(text_name + '%s' % depth)  | 
|
2135  | 
text = text + ['line\n']  | 
|
2136  | 
files.add_lines(new_version, self.get_parents([next_parent]), text)  | 
|
2137  | 
next_parent = new_version  | 
|
2138  | 
target = self.get_versionedfiles('target')  | 
|
2139  | 
for key in multiparent.topo_iter_keys(files, files.keys()):  | 
|
2140  | 
mpdiff = files.make_mpdiffs([key])[0]  | 
|
2141  | 
parents = files.get_parent_map([key])[key] or []  | 
|
2142  | 
target.add_mpdiffs(  | 
|
| 
3350.8.3
by Robert Collins
 VF.get_sha1s needed changing to be stackable.  | 
2143  | 
[(key, parents, files.get_sha1s([key])[key], mpdiff)])  | 
| 
3350.6.4
by Robert Collins
 First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.  | 
2144  | 
self.assertEqualDiff(  | 
2145  | 
files.get_record_stream([key], 'unordered',  | 
|
2146  | 
True).next().get_bytes_as('fulltext'),  | 
|
2147  | 
target.get_record_stream([key], 'unordered',  | 
|
2148  | 
True).next().get_bytes_as('fulltext')  | 
|
2149  | 
                )
 | 
|
2150  | 
||
2151  | 
def test_keys(self):  | 
|
2152  | 
        # While use is discouraged, versions() is still needed by aspects of
 | 
|
2153  | 
        # bzr.
 | 
|
2154  | 
files = self.get_versionedfiles()  | 
|
2155  | 
self.assertEqual(set(), set(files.keys()))  | 
|
2156  | 
if self.key_length == 1:  | 
|
2157  | 
key = ('foo',)  | 
|
2158  | 
else:  | 
|
2159  | 
key = ('foo', 'bar',)  | 
|
2160  | 
files.add_lines(key, (), [])  | 
|
2161  | 
self.assertEqual(set([key]), set(files.keys()))  | 
|
| 
3518.1.1
by Jelmer Vernooij
 Add VirtualVersionedFiles class.  | 
2162  | 
|
2163  | 
||
2164  | 
class VirtualVersionedFilesTests(TestCase):  | 
|
2165  | 
"""Basic tests for the VirtualVersionedFiles implementations."""  | 
|
2166  | 
||
2167  | 
def _get_parent_map(self, keys):  | 
|
2168  | 
ret = {}  | 
|
2169  | 
for k in keys:  | 
|
2170  | 
if k in self._parent_map:  | 
|
2171  | 
ret[k] = self._parent_map[k]  | 
|
2172  | 
return ret  | 
|
2173  | 
||
2174  | 
def setUp(self):  | 
|
2175  | 
TestCase.setUp(self)  | 
|
2176  | 
self._lines = {}  | 
|
2177  | 
self._parent_map = {}  | 
|
2178  | 
self.texts = VirtualVersionedFiles(self._get_parent_map,  | 
|
2179  | 
self._lines.get)  | 
|
2180  | 
||
2181  | 
def test_add_lines(self):  | 
|
2182  | 
self.assertRaises(NotImplementedError,  | 
|
2183  | 
self.texts.add_lines, "foo", [], [])  | 
|
2184  | 
||
2185  | 
def test_add_mpdiffs(self):  | 
|
2186  | 
self.assertRaises(NotImplementedError,  | 
|
2187  | 
self.texts.add_mpdiffs, [])  | 
|
2188  | 
||
2189  | 
def test_check(self):  | 
|
2190  | 
self.assertTrue(self.texts.check())  | 
|
2191  | 
||
2192  | 
def test_insert_record_stream(self):  | 
|
2193  | 
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,  | 
|
2194  | 
                          [])
 | 
|
2195  | 
||
| 
3518.1.2
by Jelmer Vernooij
 Fix some stylistic issues pointed out by Ian.  | 
2196  | 
def test_get_sha1s_nonexistent(self):  | 
2197  | 
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))  | 
|
| 
3518.1.1
by Jelmer Vernooij
 Add VirtualVersionedFiles class.  | 
2198  | 
|
2199  | 
def test_get_sha1s(self):  | 
|
2200  | 
self._lines["key"] = ["dataline1", "dataline2"]  | 
|
2201  | 
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},  | 
|
2202  | 
self.texts.get_sha1s([("key",)]))  | 
|
2203  | 
||
2204  | 
def test_get_parent_map(self):  | 
|
2205  | 
self._parent_map = {"G": ("A", "B")}  | 
|
2206  | 
self.assertEquals({("G",): (("A",),("B",))},  | 
|
2207  | 
self.texts.get_parent_map([("G",), ("L",)]))  | 
|
2208  | 
||
2209  | 
def test_get_record_stream(self):  | 
|
2210  | 
self._lines["A"] = ["FOO", "BAR"]  | 
|
2211  | 
it = self.texts.get_record_stream([("A",)], "unordered", True)  | 
|
2212  | 
record = it.next()  | 
|
2213  | 
self.assertEquals("fulltext", record.storage_kind)  | 
|
2214  | 
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))  | 
|
2215  | 
||
2216  | 
def test_get_record_stream_absent(self):  | 
|
2217  | 
it = self.texts.get_record_stream([("A",)], "unordered", True)  | 
|
2218  | 
record = it.next()  | 
|
2219  | 
self.assertEquals("absent", record.storage_kind)  | 
|
2220  |